This script reads lines from a txt file is used as part of my external command:
for every line in txt do
os.system(comand)
Is it possible to use multiprocess with this to improve speed?
i=0
x=[]
for file in os.listdir(carpeta):
if file.endswith('.json'):
x.append(file)
i+=1
print('the total number of files: ' +str(i))
archivo='./temp.txt'
strm=open(archivo,encoding='UTF8')
min=1
for line in strm:
if min > i:
min=1
origen=args.o+"/"+line.rstrip('\n')
destino=re.sub("\/(?:.(?!\/))+$","",args.d+"/"+line.rstrip('\n'))
comando=args.r+" copy \""+origen+"\" \""+destino+"\" --drive-service-account-file \""+carpeta+"\\"+str(min)+".json\" --size-only"
os.system(comando)
min+=1
You can use the multiprocessing
to launch the os processes from a list of commands.
Something like this:
import multiprocessing, os, re
from multiprocessing import Process, freeze_support
def getcmdlist():
i=0
x=[]
for file in os.listdir(carpeta):
if file.endswith('.json'):
x.append(file)
i+=1
print('the total number of files: ' +str(i))
archivo='./temp.txt'
strm=open(archivo,encoding='UTF8')
min=1
cmdlist = []
for line in strm:
if min > i:
min=1
origen=args.o+"/"+line.rstrip('\n')
destino=re.sub("\/(?:.(?!\/))+$","",args.d+"/"+line.rstrip('\n'))
comando=args.r+" copy \""+origen+"\" \""+destino+"\" --drive-service-account-file \""+carpeta+"\\"+str(min)+".json\" --size-only"
cmdlist.append(comando)
min+=1
return cmdlist
if __name__ == '__main__':
freeze_support() # needed for Windows
cmdlist = getcmdlist() # all commands
with multiprocessing.Pool(processes=4) as pool:
pool.map(os.system, cmdlist) # pass each command to os process
The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.