我想用子进程导出多个txt文件。
ip_address = ['172.16.{}.{}'.format(rack_number, box_number) for box_number in stb_list]
ip_address = ['10.10.8.89'] # Testing for single ip
# def planner_events_info():
#Connect to Boxes
if len(ip_address) > 0:
for ip in ip_address:
action = 'FullExport'
menu_action = 'all'
arg_list = [('C:\\Users\\yke01\\Documents\\StormTest\\Scripts\\Completed'
'\\Utils\\UPNP_Client_Cmd_Line.py')]
arg_list.append(' --action=')
arg_list.append(action)
arg_list.append(' --ip=')
arg_list.append(ip)
arg_list.append(' --menu=')
arg_list.append(menu_action)
x = subprocess.Popen(arg_list, shell=True)
# print arg_list
with open("output.txt", "w+") as output:
subprocess.call(["python", arg_list], stdout=output)
使用单个IP地址,我可以导出output.txt。我正在为最多16个不同的ips编写一个脚本。
ip_address = ['172.16.1.1, 172.16.1.2, 172.16.1.3, 172.16.1.4, ]
例如,对于上面的ip地址,我想导出4个txt文件。任何帮助,将不胜感激。!
答案 0 :(得分:0)
<强> 1。一个接一个 - 串行管道
for i,ip in enumerate(ip_address):
filename = "output_"+str(i)+".txt"
#...
with open(filename, "w+") as output:
subprocess.call(["python", arg_list], stdout=output)
<强> 2。并行管道
使用multiprocessing.Pool中的apply_async运行异步
这是一个完整的示例
import multiprocessing
ip_address = ['172.16.1.1', '172.16.1.2', '172.16.1.3', '172.16.1.4', ]
def runProcess(arg_list,output_file):
print('Will run:\n %s\nand save to \n %s\n'%(arg_list, output_file))
x = subprocess.Popen(arg_list, shell=True)
with open(output_file, "w+") as output:
subprocess.call(["python", arg_list], stdout=output)
tasks=[]
#prepare list of tasks
for i,ip in enumerate(ip_address):
filename = "output_"+str(i)+".txt"
action = 'FullExport'
menu_action = 'all'
arg_list = [('C:\\Users\\yke01\\Documents\\StormTest\\Scripts\\Completed'
'\\Utils\\UPNP_Client_Cmd_Line.py')]
arg_list.append(' --action=')
arg_list.append(action)
arg_list.append(' --ip=')
arg_list.append(ip)
arg_list.append(' --menu=')
arg_list.append(menu_action)
tasks.append((arg_list,filename)) #pairs: (arg_list,output_file)
#run tasks
numthreads = multiprocessing.cpu_count()
pool = multiprocessing.Pool( numthreads )
results = [pool.apply_async( runProcess, t ) for t in tasks]
pool.close()
pool.join()