我需要使用python3每分钟从多个RPi中获取状态,通过连接到套接字并运行json命令,状态为grep。
每15分钟将状态保存到mysql数据库,否则它将被放入memcached
目前我有10多个RPi,有没有办法优化它? 我们计划增长到100 Rpi。
这是我的简化线程
class getStatus(threading.Thread):
def __init__(self, rpi, status_queue):
super().__init__()
self.rpi = rpi
self.status_queue = status_queue
def run(self):
r = b""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((self.rpi.get("ip_address"), 9999))
cmd = json.dumps({"command":"status"})
s.send(cmd.encode())
while True:
recv = s.recv(1024)
if not recv:
break
else:
r += recv
s.close()
except:
s.close()
if len(r) > 0:
r = r[:r.index(b"\x00")]
self.status_queue.put({"rpi": self.rpi, "status": r})
这是我如何运行线程
if name == "__main__":
import_time = int(time.time())
exact_second = import_time % 60
import_time = import_time - exact_second
minute = int(time.strftime("%M", time.gmtime(import_time)))
#Retrieve rpi list from mysql, each rpi has element id and ip_address
rpis = get_rpi_list()
threads = []
status_queue = queue.Queue()
for rpi in rpis:
threads.append(getStatus(rpi, status_queue))
for t in threads:
t.daemon = True
t.start()
for t in threads:
while t.is_alive():
t.join()
status_queue.put(None)
#Iter the queue so we can save it on db one by one
for q in iter(status_queue.get, None):
if not q.get('status') is None:
#If not 15 minute then I save it to memcached
save_to_mc(q.get('rpi').get('id'), q.get('status'))
#If 15 minute I save it to mysql
if (minute % 15) == 0:
save_to_db(q.get('rpi').get('id'), q.get('status'), int(import_time))