在使用模块multiprocessing
时,如何在Python中使一个线程休眠而不使其他线程休眠?当队列少于10个元素时,我想使getDataProcess休眠一段时间。在这段时间内,主线程将用更多数据填充队列,而while循环将继续执行。
import multiprocessing as mp
import os
import csv
import sys
import subprocess
import time
cur=[]
dataList=[]
def getData(queue):
global dataList
print('get data started')
with open('temp.csv','w+') as file:
writer=csv.DictWriter(file,fieldnames=['index','name','fileaccess'],lineterminator='\n')
while not queue.empty():
cur=queue.get()
writer.writerow(cur)
if len(dataList)<=100:
dataList.append(cur)
print('appending to datalist',end='\r')
if len(dataList)==100:
showData(queue)
if(queue.qsize()<10):
print('danger race condition'+str(queue.qsize()))
if os.path.exists('temp.csv'):
try:
os.rename('temp.csv','temp.csv')
print('may have completed reading')
except OSError as e:
#time.sleep(10)
print('sleeping to prevent end ')
def showData(queue):
print('showdata started')
global dataList
#time.sleep(1)
print(dataList)
if(queue.qsize()<100):
print('danger race condition')
if __name__=="__main__":
try:
filename=sys.argv[1]
key=sys.argv[2]
except:
print('arguments not provided')
queue = mp.Queue()
getDataProcess=mp.Process(target=getData,args=(queue,))
getDataProcessStatus=False
showDataProcess=mp.Process(target=showData,args=(queue,))
showDataProcessStatus=False
with open('archive/data.csv') as file:
matches=0
reader=csv.DictReader(file,fieldnames=['index','name','fileaccess'],delimiter=',')
header=next(reader)
for i,row in enumerate(reader):
if(row['fileaccess'][0]=='d'):
matches+=1
queue.put(row)
if(getDataProcessStatus==False):
getDataProcess.start()
getDataProcessStatus=True
print('getdata started')
# if(matches>200):
# if(showDataProcessStatus==False):
# print('show data started')
# showDataProcess.start()
# showDataProcessStatus=True
答案 0 :(得分:0)
使用:
time.sleep( delay )
这将使您的进程进入睡眠状态而不会影响其他人。