![]() ![]() # Find the requested data and export to CSV, specifying a pattern if needed.įind_cmd = 'find ' + location + ' -name ' + pattern + ' -fprintf ' + outputFile + ' "%Y%M,%n,%u,%g,%s,%A+,%P\n"' OutputFile = rootDir + '_directory_contents.csv' Pattern = '*.py' # Use this if you want to only return certain filetypes # Global variables for directory being mapped (and that join is slow)Ī pretty simple solution would be to run a couple of sub process calls to export the files into CSV format: import subprocess So as you can see for yourself, the listdir version is much more efficient. Print("Time taken: %.2fs"%(time.time()-start)) # 0.42sįor i in range(100): files = listFiles4("src") # walk and join ![]() Print("Time taken: %.2fs"%(time.time()-start)) # 0.28sįor i in range(100): files = listFiles2("src") # listdir and join Since every example here is just using walk (with join), I'd like to show a nice example and comparison with listdir: import os, timeįolder = walk.pop(0)+"/" items = os.listdir(folder) # items = folders + filesįor i in items: i=folder+i (walk if os.path.isdir(i) else allFiles).append(i)ĭef listFiles2(root): # listdir/join (takes ~1.4x as long) (and uses '\\' instead)įolder = walk.pop(0) items = os.listdir(folder) # items = folders + filesįor i in items: i=os.path.join(folder,i) (walk if os.path.isdir(i) else allFiles).append(i)ĭef listFiles3(root): # walk (takes ~1.5x as long)įor folder, folders, files in os.walk(root):įor file in files: allFiles+= # folder+"\\"+file still ~1.5xĭef listFiles4(root): # walk/join (takes ~1.6x as long) (and uses '\\' instead)įor file in files: allFiles+=įor i in range(100): files = listFiles1("src") # warm upįor i in range(100): files = listFiles1("src") # listdir ![]()
0 Comments
Leave a Reply.AuthorWrite something about yourself. No need to be fancy, just an overview. ArchivesCategories |