关于python多处理的一个非常令人困惑的问题

时间:2019-06-11 03:13:13

标签: python

我在过滤文件时遇到问题,例如,如果我先运行此代码,速度很慢,一直运行到global_step = 100000,然后终止此代码,然后再次运行,它将很快global_step = 100000,然后它会变慢。我找不到为什么会这样,有人可以给我一个建议,我英语说得不好

from multiprocessing import Queue, Process, Value
read_dir = '..'
write_dir ='..'
dict_dir = '..'
Q_read = Queue(10000)
Q_write = Queue(10000)
global_step = Value('i', 0)
def Push_queue(Q_read, r_dir):
    f = open(r_dir, 'r')
    lines = f.readlines()
    for line in lines:
        Q_read.put(line)
    f.close()

def Write_from_Queue(Q_write, w_dir):
    fw = open(w_dir, 'w')
    while True:
        try:
            line = Q_write.get(timeout=30)
            fw.write(line)
            fw.flush()
        except:
            fw.close()
            return



def asy_run(Q_read, Q_write, global_step, char2ind_dict):
    while True:
        line = Q_read.get(timeout=30)
        #########################

        line = .......do something

        #########################
        Q_write.put(line)
        global_step.value +=1

def main_run(num, char2ind_dict):
    process_list = []
    process_push = Process(target=Push_queue, args=(Q_read, read_dir))
    process_push.start()
    for i in range(num):
        process_i = Process(target=asy_run, args=(Q_read, Q_write, global_step, char2ind_dict))
        process_i.start()
        process_list.append(process_i)
    process_write = Process(target=Write_from_Queue, args=(Q_write, write_dir))
    process_write.start()
    process_push.join()    
    Q_read.join()
    Q_write.join()
    for p in process_list:
        p.join()
    process_write.join()

if __name__ =='__main__':
    char2ind_dict = get_dict(dict_dir)
    main_run(50, char2ind_dict)

0 个答案:

没有答案