簡體   English   中英

R foreach:並行讀取和操作多個文件

[英]R foreach: Read and manipulate multiple files in parallel

我有 500 個 tar.xz 文件,其中包含 2000 個 csv 文件。 我需要一次解壓幾個 tar 文件(因為磁盤空間),將它們處理成 data.table,從磁盤中刪除 csv 文件,然后將結果保存為 RDS,然后再繼續下幾個 tar 文件。

我的 function 串行工作正常,但並行它會使文件在內核之間混淆。 為什么是這樣?

一些樣本數據:

    for(j in 1:5){
     for(i in 1:5){
      a<-df[sample(x = 1:nrow(df), size = 50, replace = TRUE),]
      write.csv(a,paste0("seed_",i,".csv"))
      lf<-list.files(pattern=".csv")
                  }
     tar(tarfile = paste0("seed_",j,".tar"),files = lf,compression = c("xz"), tar="tar")
                 }

帶有 foreach 的示例代碼

require(dplyr)
require(tidyr)
require(foreach)
require(doParallel)
require(magrittr)

#List all tar files in directory
list_of_files<-list.files(pattern = ".tar")

  packsINeed<-c("vroom","magrittr","dplyr","tidyr","doParallel")
    
  #Start for loop

myCluster<-makeCluster(6,type="PSOCK")
registerDoParallel(myCluster) 

  foreach(i= 1:NROW(list_of_files),.packages = packsINeed)%dopar%{

print(paste(list_of_files[i], "which is", i, "of", NROW(list_of_files) ))

print("2. Untar .csv files inside")
 untar(tarfile = list_of_files[i], exdir = "tempOutputFiles")



 print("#3. Read in files and add up two columns")
df<-vroom::vroom(list.files("tempOutputFiles/$.csv"), id="path")

df$A<-df$B+df$C

    print("#4. save RDS")

saveRDS(object = df, file = paste0(tools::file_path_sans_ext(list_of_files[i], compression = TRUE),".rds"))

 print("#5. Clean up files")

.files<-list.files("tempOutputFiles",pattern=".csv")

    file.remove(basename(.files))
}

使用 mclapply - 行為相同

require(dplyr)
require(tidyr)
require(foreach)
require(doParallel)
require(magrittr)

#List all tar files in directory
list_of_files<-list.files(pattern = ".tar")

myParFun<-fun(文件名){

print(paste(filename))

print("2. Untar all .csv files inside")
 untar(tarfile = filename, exdir = "tempOutputFiles")



 print("#3. Read in files and add up two columns")
df<-vroom::vroom(list.files("tempOutputFiles/$.csv"), id="path")

df$A<-df$B+df$C

    print("#4. save RDS")

saveRDS(object = df, file = paste0(tools::file_path_sans_ext(filename, compression = TRUE),".rds"))

 print("#5. Clean up files")

   .files<-list.files("tempOutputFiles",pattern=".csv")

    file.remove(.files)
}

mclapply(FUN=myParFun, list_of_files, mc.cores=4)

根據 Waldi 的評論,我為 list_of_files 中的每個文件創建了一個目錄,現在它工作正常。 但是有打鼾的方法嗎? 例如使用 tempdir?

正如評論中所建議的,下面的代碼為每個進程/tar 文件創建一個目錄,解壓縮,合並 a.rds 文件中的 CSV 並刪除它們。
請注意,似乎vroom需要altrep = FALSE參數來避免刪除時出現權限被拒絕錯誤

# Generate sample tars for test
write.csv(mtcars,'file1.csv')
write.csv(mtcars,'file2.csv')
write.csv(iris,'file3.csv')
write.csv(iris,'file4.csv')
tar('tar1.tar',files=c('file1.csv','file2.csv'),tar="tar")
tar('tar2.tar',files=c('file3.csv','file4.csv'),tar="tar")

require(dplyr)
require(tidyr)
require(foreach)
require(doParallel)
require(magrittr)

#List all tar files in directory
list_of_files<-list.files(pattern = "\\.tar")

packsINeed<-c("vroom","magrittr","dplyr","tidyr","doParallel")

#Start for loop

myCluster<-makeCluster(2,type="PSOCK")
registerDoParallel(myCluster) 

foreach(i= 1:NROW(list_of_files),.packages = packsINeed)%dopar%{
  print(paste(list_of_files[i], "which is", i, "of", NROW(list_of_files) ))
  
  print("2. Untar .csv files inside")
  fileout <- tools::file_path_sans_ext(list_of_files[i], compression = TRUE)
  exdir <- paste0("temp",fileout)
  untar(tarfile = list_of_files[i], exdir = exdir)
  
  print("#3. Read in files and add up two columns")
  df<-vroom::vroom(file.path(exdir,dir(exdir,"*.csv")),altrep = FALSE)
  
  # df$A<-df$B+df$C   # These columns don't exist in mtcars used as example
  
  print("#4. save RDS")
  
  saveRDS(object = df, file = file.path(exdir,paste0(fileout,".rds")))
  
  print("#5. Clean up files")
  
  .files<-list.files(exdir,pattern="\\.csv")
  
  file.remove(file.path(exdir,.files))
}

不確定 .rds 應該在哪里 go,所以暫時留在臨時文件夾中。

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM