我正在尝试编写一个 Python 脚本,该脚本将爬过一个目录并找到所有重复的文件并报告重复项。解决这个问题最好的办法是什么?
import os, sys
def crawlDirectories(directoryToCrawl):
crawledDirectory = [os.path.join(path, subname) for path, dirnames, filenames in os.walk(directoryToCrawl) for subname in dirnames + filenames]
return crawledDirectory
#print 'Files crawled',crawlDirectories(sys.argv[1])
directoriesWithSize = {}
def getByteSize(crawledDirectory):
for eachFile in crawledDirectory:
size = os.path.getsize(eachFile)
directoriesWithSize[eachFile] = size
return directoriesWithSize
getByteSize(crawlDirectories(sys.argv[1]))
#print directoriesWithSize.values()
duplicateItems = {}
def compareSizes(dictionaryDirectoryWithSizes):
for key,value in dictionaryDirectoryWithSizes.items():
if directoriesWithSize.values().count(value) > 1:
duplicateItems[key] = value
compareSizes(directoriesWithSize)
#print directoriesWithSize.values().count(27085)
compareSizes(directoriesWithSize)
print duplicateItems
为什么这会抛出这个错误?
Traceback (most recent call last):
File "main.py", line 16, in <module>
getByteSize(crawlDirectories(sys.argv[1]))
File "main.py", line 12, in getByteSize
size = os.path.getsize(eachFile)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/genericpath.py", line 49, in getsize
OSError: [Errno 2] No such file or directory: '../Library/Containers/com.apple.ImageKit.RecentPictureService/Data/Documents/iChats'