好,所以过去几天我一直在尝试解决此问题。脚本在随机时间决定停止工作...可能是5分钟之后,也可能是3个小时的循环之后。我们不知道问题是什么,我们已经在4种不同的设备上尝试了不同的python版本,并且这种情况一直在发生。我们没有错误。
这是在我执行“ python3 -m trace --trace”时停止工作之前发生的
SocksChecker.py(108): log_msg += "%s " % fail_msg
SocksChecker.py(109): print(Fore.LIGHTRED_EX + log_msg + Style.RESET_ALL)
Trying Socks proxy 139.224.130.86:1080 Dead
SocksChecker.py(110): dead += 1
SocksChecker.py(111): tested += 1
SocksChecker.py(112): return None
SocksChecker.py(81): if result is not None:
SocksChecker.py(85): self.task_queue.task_done()
--- modulename: queue, funcname: task_done
queue.py(64): with self.all_tasks_done:
--- modulename: threading, funcname: __enter__
threading.py(240): return self._lock.__enter__()
queue.py(65): unfinished = self.unfinished_tasks - 1
queue.py(66): if unfinished <= 0:
queue.py(70): self.unfinished_tasks = unfinished
--- modulename: threading, funcname: __exit__
threading.py(243): return self._lock.__exit__(*args)
SocksChecker.py(78): task = self.task_queue.get()
--- modulename: queue, funcname: get
queue.py(158): with self.not_empty:
--- modulename: threading, funcname: __enter__
threading.py(240): return self._lock.__enter__()
queue.py(159): if not block:
queue.py(162): elif timeout is None:
queue.py(163): while not self._qsize():
--- modulename: queue, funcname: _qsize
queue.py(203): return len(self.queue)
queue.py(164): self.not_empty.wait()
--- modulename: threading, funcname: wait
threading.py(286): if not self._is_owned():
--- modulename: threading, funcname: _is_owned
threading.py(257): if self._lock.acquire(0):
threading.py(261): return True
threading.py(288): waiter = _allocate_lock()
threading.py(289): waiter.acquire()
threading.py(290): self._waiters.append(waiter)
threading.py(291): saved_state = self._release_save()
--- modulename: threading, funcname: _release_save
threading.py(249): self._lock.release() # No state to save
threading.py(292): gotit = False
threading.py(293): try: # restore state no matter what (e.g., KeyboardInterrupt)
threading.py(294): if timeout is None:
代码如下:
#Network
import configparser
import ctypes
import os
import subprocess
# Concurrency
import threading
import queue
import itertools
# Etc
import time
import pycurl as pycurl
from colorama import Fore, Style
config = configparser.ConfigParser()
config.read('config.ini')
# Global variables
in_directory = str(config.get('Socks Checker', 'in directory'))
out_filename = str(config.get('Socks Checker', 'out filename'))
test_url = config.get('Socks Checker', 'test url')
thread_number = int(config.get('Socks Checker', 'threads'))
timeout_value = int(config.get('Socks Checker', 'timeout'))
ok_msg = str(config.get('Socks Checker', 'okay message'))
fail_msg = str(config.get('Socks Checker', 'fail message'))
# Stats
good_proxy_num = itertools.count()
start_time = time.time()
end_time = time.time()
# Safe print()
mylock = threading.Lock()
# Printer
class PrintThread(threading.Thread):
def __init__(self, queue, filename):
threading.Thread.__init__(self)
self.queue = queue
self.output = open(filename, 'a')
self.shutdown = False
def write(self, line):
print(line, file=self.output)
def run(self):
while not self.shutdown:
lines = self.queue.get()
self.queue.task_done()
def terminate(self):
self.output.close()
self.shutdown = True
# Processor
alive = 0
dead = 0
tested = 0
class ProcessThread(threading.Thread):
def __init__(self, id, task_queue, out_queue):
threading.Thread.__init__(self)
self.task_queue = task_queue
self.out_queue = out_queue
self.id = id
# ...
def run(self):
while True:
task = self.task_queue.get()
result = self.process(task)
if result is not None:
self.out_queue.put(result)
next(good_proxy_num)
self.task_queue.task_done()
# Do the processing job here
def process(self, task):
global alive
global dead
global tested
proxy = task
log_msg = str("Trying Socks proxy%21s " % proxy)
c1 = pycurl.Curl()
c1.setopt(pycurl.URL, test_url)
c1.setopt(pycurl.PROXY, proxy.split(':')[0])
c1.setopt(pycurl.PROXYPORT, int(proxy.split(':')[1]))
c1.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
c1.setopt(pycurl.CONNECTTIMEOUT, int(timeout_value))
c1.setopt(pycurl.WRITEFUNCTION, lambda x: None)
try:
t1 = time.time()
response = c1.perform()
tested += 1
t2 = time.time()
except Exception:
log_msg += "%s " % fail_msg
print(Fore.LIGHTRED_EX + log_msg + Style.RESET_ALL)
dead += 1
tested += 1
return None
log_msg += ok_msg + " Response time: %d" % (int((t2 - t1) * 1000))
print(Fore.LIGHTGREEN_EX + log_msg + Style.RESET_ALL)
text_file = open(out_filename, "a")
text_file.write(proxy + "\n")
text_file.close()
alive += 1
#ctypes.windll.kernel32.SetConsoleTitleW(f"Proxy Checker [SOCKS (4/5)] | Total proxies Left: {input_queue.qsize()} | Tested: {tested} | Alive: {alive} | Dead: {dead}")
return proxy
def terminate(self):
print("Thread #%d is down..." % self.id)
for worker in workers:
worker.join(timeout=4)
worker.terminate()
#
# Main starts here
#
# Init some stuff
input_queue = queue.Queue()
result_queue = queue.Queue()
# Spawn worker threads
workers = []
for i in range(0, int(thread_number)):
try:
t = ProcessThread(i, input_queue, result_queue)
t.setDaemon(True)
t.start()
workers.append(t)
except RuntimeError:
continue
# Spawn printer thread to print
f_printer = PrintThread(result_queue, out_filename)
f_printer.setDaemon(True)
try:
f_printer.start()
except RuntimeError:
pass
# Add some stuff to the input queue
start_time = time.time()
proxies = []
with open(config.get('Socks Checker', 'in directory'), 'r') as file:
for lines in file:
line = lines.strip()
proxies.append(line)
for proxy in proxies:
input_queue.put_nowait(proxy)
total_proxy_num = len(open(file.name).readlines())
if total_proxy_num == 0:
exit()
# Wait for queue to get empty
input_queue.join()
result_queue.join()
# while (not input_queue.empty()):
# time.sleep(1)
# Shutdown
f_printer.terminate()
# Print some info
good_proxy_num = float(next(good_proxy_num))
end_time = time.time()
logs = open("Logs.txt", "a")
logs.write("----- [SOCKS (4/5)] -----\n")
logs.write("In: %d. Good: %d, that's %.2f%%.\n" % (total_proxy_num, good_proxy_num, 100.0 * good_proxy_num/total_proxy_num))
logs.write("Time elapsed: %.1f seconds.\n" % (end_time - start_time))
logs.close()
subprocess.Popen(["python3", "Checker/HTTPChecker.py"])
这不完全是我们的代码,我们只是尝试对其进行修改,因此它可以24/7运行而不会出现问题。 原始脚本来自此处:https://www.proxyrack.com/multi-threaded-python-socks-proxy-checker/,它们还具有HTTP(s)检查器,并且可以100%正常工作。