请问以下问题?
data
是由save
函数修改的共享资源。
determine_ixmac
是每个将来执行的功能。
data = {
"ip_spotlight": {
"ixmac": {_: {} for _ in INFO["dev"].keys()},
"ipacc": {_: {} for _ in INFO["dev"].keys()},
"bgpic": {_: {} for _ in INFO["dev"].keys()},
}
}
def save(app, namespace, netelement, job):
data[app][namespace][netelement] = job.result()
with concurrent.futures.ProcessPoolExecutor(
max_workers=multiprocessing.cpu_count()
) as executor:
for k, v in INFO["dev"].items():
job_ixmac = executor.submit(
determine_ixmac, k, v["dev_ip"], v["snmp_comm"]
)
job_ixmac.add_done_callback(
functools.partial(save, "ip_spotlight", "ixmac", k)
但是,当所有期货都完成后,就会出现死锁:
Traceback (most recent call last):
File "/usr/lib64/python3.6/multiprocessing/process.py", line 258, in _bootstrap
self.run()
File "/usr/lib64/python3.6/multiprocessing/process.py", line 93, in run
self._target(*self._args, **self._kwargs)
File "/usr/lib64/python3.6/concurrent/futures/process.py", line 169, in _process_worker
call_item = call_queue.get(block=True)
File "/usr/lib64/python3.6/multiprocessing/queues.py", line 93, in get
with self._rlock:
File "/usr/lib64/python3.6/multiprocessing/synchronize.py", line 96, in __enter__
return self._semlock.__enter__()
KeyboardInterrupt
我已经尝试过,但是没有运气:
mlock = multiprocessing.Lock
def save(app, namespace, netelement, job):
with mlock:
data[app][namespace][netelement] = job.result()
您能告诉我们缺少什么吗?