我正在编写一个 Java 应用程序来获取目录中文件的文件元数据并将其导出到 csv 文件。如果文件数量较少,该应用程序可以正常工作。但是,如果我输入一个在所有目录和子目录中包含 320000 个文件的路径,它将永远存在。有什么办法可以加快速度吗?
private void extractDetailsCSV(File libSourcePath, String extractFile) throws ScraperException {
log.info("Inside extract details csv");
try{
FileMetadataUtil fileUtil = new FileMetadataUtil();
File[] listOfFiles = libSourcePath.listFiles();
for(int i = 0; i < listOfFiles.length; i++) {
if(listOfFiles[i].isDirectory()) {
extractDetailsCSV(listOfFiles[i],extractFile);
}
if(listOfFiles[i].isFile()){
ScraperOutputVO so = new ScraperOutputVO();
Path path = Paths.get(listOfFiles[i].getAbsolutePath());
so.setFilePath(listOfFiles[i].getParent());
so.setFileName(listOfFiles[i].getName());
so.setFileType(getFileType(listOfFiles[i].getAbsolutePath()));
BasicFileAttributes basicAttribs = fileUtil.getBasicFileAttributes(path);
if(basicAttribs != null) {
so.setDateCreated(basicAttribs.creationTime().toString().substring(0, 10) + " " + basicAttribs.creationTime().toString().substring(11, 16));
so.setDateLastModified(basicAttribs.lastModifiedTime().toString().substring(0, 10) + " " + basicAttribs.lastModifiedTime().toString().substring(11, 16));
so.setDateLastAccessed(basicAttribs.lastAccessTime().toString().substring(0, 10) + " " + basicAttribs.lastAccessTime().toString().substring(11, 16));
}
so.setFileSize(String.valueOf(listOfFiles[i].length()));
so.setAuthors(fileUtil.getOwner(path));
so.setFolderLink(listOfFiles[i].getAbsolutePath());
writeCsvFileDtl(extractFile, so);
so.setFileName(listOfFiles[i].getName());
noOfFiles ++;
}
}
} catch (Exception e) {
log.error("IOException while setting up columns" + e.fillInStackTrace());
throw new ScraperException("IOException while setting up columns" , e.fillInStackTrace());
}
log.info("Done extracting details to csv file");
}
public void writeCsvFileDtl(String extractFile, ScraperOutputVO scraperOutputVO) throws ScraperException {
try {
FileWriter writer = new FileWriter(extractFile, true);
writer.append(scraperOutputVO.getFilePath());
writer.append(',');
writer.append(scraperOutputVO.getFileName());
writer.append(',');
writer.append(scraperOutputVO.getFileType());
writer.append(',');
writer.append(scraperOutputVO.getDateCreated());
writer.append(',');
writer.append(scraperOutputVO.getDateLastModified());
writer.append(',');
writer.append(scraperOutputVO.getDateLastAccessed());
writer.append(',');
writer.append(scraperOutputVO.getFileSize());
writer.append(',');
writer.append(scraperOutputVO.getAuthors());
writer.append(',');
writer.append(scraperOutputVO.getFolderLink());
writer.append('\n');
writer.flush();
writer.close();
} catch (IOException e) {
log.info("IOException while writing to csv file" + e.fillInStackTrace());
throw new ScraperException("IOException while writing to csv file" , e.fillInStackTrace());
}
}
}