334

关于使用 fs.readdir 进行异步目录搜索的任何想法?我意识到我们可以引入递归并使用下一个要读取的目录调用读取目录函数,但我有点担心它不是异步的......

有任何想法吗?我看过node-walk很棒,但它不像 readdir 那样只给我数组中的文件。虽然

寻找像...这样的输出

['file1.txt', 'file2.txt', 'dir/file3.txt']
4

42 回答 42

422

基本上有两种方法可以实现这一点。在异步环境中,您会注意到有两种循环:串行和并行。串行循环在进入下一次迭代之前等待一次迭代完成 - 这保证了循环的每次迭代都按顺序完成。在并行循环中,所有的迭代都是同时开始的,并且一个迭代可能在另一个之前完成,但是它比串行循环快得多。因此,在这种情况下,使用并行循环可能会更好,因为步行完成的顺序并不重要,只要它完成并返回结果(除非您希望它们按顺序排列)。

并行循环如下所示:

var fs = require('fs');
var path = require('path');
var walk = function(dir, done) {
  var results = [];
  fs.readdir(dir, function(err, list) {
    if (err) return done(err);
    var pending = list.length;
    if (!pending) return done(null, results);
    list.forEach(function(file) {
      file = path.resolve(dir, file);
      fs.stat(file, function(err, stat) {
        if (stat && stat.isDirectory()) {
          walk(file, function(err, res) {
            results = results.concat(res);
            if (!--pending) done(null, results);
          });
        } else {
          results.push(file);
          if (!--pending) done(null, results);
        }
      });
    });
  });
};

串行循环如下所示:

var fs = require('fs');
var path = require('path');
var walk = function(dir, done) {
  var results = [];
  fs.readdir(dir, function(err, list) {
    if (err) return done(err);
    var i = 0;
    (function next() {
      var file = list[i++];
      if (!file) return done(null, results);
      file = path.resolve(dir, file);
      fs.stat(file, function(err, stat) {
        if (stat && stat.isDirectory()) {
          walk(file, function(err, res) {
            results = results.concat(res);
            next();
          });
        } else {
          results.push(file);
          next();
        }
      });
    })();
  });
};

并在你的主目录上测试它(警告:如果你的主目录中有很多东西,结果列表会很大):

walk(process.env.HOME, function(err, results) {
  if (err) throw err;
  console.log(results);
});

编辑:改进的例子。

于 2011-04-29T04:29:35.503 回答
287

这个使用了节点 8 中可用的大量新的流行语功能,包括 Promises、util/promisify、destructuring、async-await、map+reduce 等,让您的同事在试图弄清楚什么时摸不着头脑正在进行。

节点 8+

没有外部依赖。

const { promisify } = require('util');
const { resolve } = require('path');
const fs = require('fs');
const readdir = promisify(fs.readdir);
const stat = promisify(fs.stat);

async function getFiles(dir) {
  const subdirs = await readdir(dir);
  const files = await Promise.all(subdirs.map(async (subdir) => {
    const res = resolve(dir, subdir);
    return (await stat(res)).isDirectory() ? getFiles(res) : res;
  }));
  return files.reduce((a, f) => a.concat(f), []);
}

用法

getFiles(__dirname)
  .then(files => console.log(files))
  .catch(e => console.error(e));

节点 10.10+

为节点 10+ 更新了更多功能:

const { resolve } = require('path');
const { readdir } = require('fs').promises;

async function getFiles(dir) {
  const dirents = await readdir(dir, { withFileTypes: true });
  const files = await Promise.all(dirents.map((dirent) => {
    const res = resolve(dir, dirent.name);
    return dirent.isDirectory() ? getFiles(res) : res;
  }));
  return Array.prototype.concat(...files);
}

请注意,从节点 11.15.0 开始,您可以使用files.flat()而不是Array.prototype.concat(...files)展平文件数组。

节点 11+

如果您想彻底颠覆所有人,您可以使用以下版本的异步迭代器。除了非常酷之外,它还允许消费者一次提取一个结果,使其更适合非常大的目录。

const { resolve } = require('path');
const { readdir } = require('fs').promises;

async function* getFiles(dir) {
  const dirents = await readdir(dir, { withFileTypes: true });
  for (const dirent of dirents) {
    const res = resolve(dir, dirent.name);
    if (dirent.isDirectory()) {
      yield* getFiles(res);
    } else {
      yield res;
    }
  }
}

用法已更改,因为返回类型现在是异步迭代器而不是承诺

;(async () => {
  for await (const f of getFiles('.')) {
    console.log(f);
  }
})()

如果有人感兴趣,我在这里写了更多关于异步迭代器的内容:https ://qwtel.com/posts/software/async-generators-in-the-wild/

于 2017-07-16T16:42:04.073 回答
131

以防万一有人觉得它有用,我还整理了一个同步版本。

var walk = function(dir) {
    var results = [];
    var list = fs.readdirSync(dir);
    list.forEach(function(file) {
        file = dir + '/' + file;
        var stat = fs.statSync(file);
        if (stat && stat.isDirectory()) { 
            /* Recurse into a subdirectory */
            results = results.concat(walk(file));
        } else { 
            /* Is a file */
            results.push(file);
        }
    });
    return results;
}

提示:过滤时使用更少的资源。在此函数本身内过滤。例如results.push(file);用下面的代码替换。根据需要调整:

    file_type = file.split(".").pop();
    file_name = file.split(/(\\|\/)/g).pop();
    if (file_type == "json") results.push(file);
于 2013-05-22T06:02:42.740 回答
91

A.看看文件模块。它有一个名为 walk 的函数:

file.walk(开始,回调)

导航文件树,为每个目录调用回调,传入 (null, dirPath, dirs, files)。

这可能适合你!是的,它是异步的。但是,如果需要,我认为您必须自己汇总完整路径。

B.另一种选择,甚至是我的最爱之一:find为此使用 unix。为什么要再做一些已经编程好的事情?也许不完全是您所需要的,但仍然值得一试:

var execFile = require('child_process').execFile;
execFile('find', [ 'somepath/' ], function(err, stdout, stderr) {
  var file_list = stdout.split('\n');
  /* now you've got a list with full path file names */
});

Find 有一个很好的内置缓存机制,只要只有少数文件夹发生变化,就可以使后续搜索非常快速。

于 2011-06-15T13:44:11.507 回答
45

另一个不错的 npm 包是glob

npm install glob

它非常强大,应该可以满足您所有的递归需求。

编辑:

实际上我对 glob 并不十分满意,所以我创建了readdirp

我非常有信心,它的 API 使得递归查找文件和目录以及应用特定过滤器变得非常容易。

通读其文档以更好地了解其功能并通过以下方式安装:

npm install readdirp

于 2012-06-01T12:25:57.543 回答
44

我建议使用node-glob来完成该任务。

var glob = require( 'glob' );  

glob( 'dirname/**/*.js', function( err, files ) {
  console.log( files );
});
于 2015-03-10T23:58:19.007 回答
17

如果你想使用 npm 包,wrench非常好。

var wrench = require("wrench");

var files = wrench.readdirSyncRecursive("directory");

wrench.readdirRecursive("directory", function (error, files) {
    // live your dreams
});

编辑(2018 年):
最近阅读的任何人:作者在 2015 年弃用了这个包:

wrench.js 已被弃用,并且已经有一段时间没有更新了。我强烈建议使用 fs-extra来执行任何额外的文件系统操作。

于 2012-04-24T17:45:03.297 回答
10

我喜欢上面chjj的答案,如果没有这个开始,我就无法创建我的并行循环版本。

var fs = require("fs");

var tree = function(dir, done) {
  var results = {
        "path": dir
        ,"children": []
      };
  fs.readdir(dir, function(err, list) {
    if (err) { return done(err); }
    var pending = list.length;
    if (!pending) { return done(null, results); }
    list.forEach(function(file) {
      fs.stat(dir + '/' + file, function(err, stat) {
        if (stat && stat.isDirectory()) {
          tree(dir + '/' + file, function(err, res) {
            results.children.push(res);
            if (!--pending){ done(null, results); }
          });
        } else {
          results.children.push({"path": dir + "/" + file});
          if (!--pending) { done(null, results); }
        }
      });
    });
  });
};

module.exports = tree;

我也创建了一个 Gist。欢迎评论。我仍然在 NodeJS 领域起步,所以这是我希望了解更多信息的一种方式。

于 2012-09-14T00:16:23.013 回答
10

使用递归

var fs = require('fs')
var path = process.cwd()
var files = []

var getFiles = function(path, files){
    fs.readdirSync(path).forEach(function(file){
        var subpath = path + '/' + file;
        if(fs.lstatSync(subpath).isDirectory()){
            getFiles(subpath, files);
        } else {
            files.push(path + '/' + file);
        }
    });     
}

打电话

getFiles(path, files)
console.log(files) // will log all files in directory
于 2016-04-19T22:54:05.020 回答
9

异步

const fs = require('fs')
const path = require('path')

const readdir = (p, done, a = [], i = 0) => fs.readdir(p, (e, d = []) =>
  d.map(f => readdir(a[a.push(path.join(p, f)) - 1], () =>
    ++i == d.length && done(a), a)).length || done(a))

readdir(__dirname, console.log)

同步

const fs = require('fs')
const path = require('path')

const readdirSync = (p, a = []) => {
  if (fs.statSync(p).isDirectory())
    fs.readdirSync(p).map(f => readdirSync(a[a.push(path.join(p, f)) - 1], a))
  return a
}

console.log(readdirSync(__dirname))

异步可读

function readdir (currentPath, done, allFiles = [], i = 0) {
  fs.readdir(currentPath, function (e, directoryFiles = []) {
    if (!directoryFiles.length)
      return done(allFiles)
    directoryFiles.map(function (file) {
      var joinedPath = path.join(currentPath, file)
      allFiles.push(joinedPath)
      readdir(joinedPath, function () {
        i = i + 1
        if (i == directoryFiles.length)
          done(allFiles)}
      , allFiles)
    })
  })
}

readdir(__dirname, console.log)

注意:两个版本都将遵循符号链接(与原始版本相同fs.readdir

于 2019-04-08T04:04:24.980 回答
8

使用node-dir准确生成您喜欢的输出

var dir = require('node-dir');

dir.files(__dirname, function(err, files) {
  if (err) throw err;
  console.log(files);
  //we have an array of files now, so now we can iterate that array
  files.forEach(function(path) {
    action(null, path);
  })
});
于 2014-05-14T14:46:34.900 回答
6

基于现代承诺的读取目录递归版本:

const fs = require('fs');
const path = require('path');

const readDirRecursive = async (filePath) => {
    const dir = await fs.promises.readdir(filePath);
    const files = await Promise.all(dir.map(async relativePath => {
        const absolutePath = path.join(filePath, relativePath);
        const stat = await fs.promises.lstat(absolutePath);

        return stat.isDirectory() ? readDirRecursive(absolutePath) : absolutePath;
    }));

    return files.flat();
}
于 2021-09-24T17:00:45.393 回答
4

我最近对此进行了编码,并认为在这里分享它是有意义的。该代码使用异步库

var fs = require('fs');
var async = require('async');

var scan = function(dir, suffix, callback) {
  fs.readdir(dir, function(err, files) {
    var returnFiles = [];
    async.each(files, function(file, next) {
      var filePath = dir + '/' + file;
      fs.stat(filePath, function(err, stat) {
        if (err) {
          return next(err);
        }
        if (stat.isDirectory()) {
          scan(filePath, suffix, function(err, results) {
            if (err) {
              return next(err);
            }
            returnFiles = returnFiles.concat(results);
            next();
          })
        }
        else if (stat.isFile()) {
          if (file.indexOf(suffix, file.length - suffix.length) !== -1) {
            returnFiles.push(filePath);
          }
          next();
        }
      });
    }, function(err) {
      callback(err, returnFiles);
    });
  });
};

你可以像这样使用它:

scan('/some/dir', '.ext', function(err, files) {
  // Do something with files that ends in '.ext'.
  console.log(files);
});
于 2013-04-08T15:15:29.450 回答
4

使用 async/await,这应该可以工作:

const FS = require('fs');
const readDir = promisify(FS.readdir);
const fileStat = promisify(FS.stat);

async function getFiles(dir) {
    let files = await readDir(dir);

    let result = files.map(file => {
        let path = Path.join(dir,file);
        return fileStat(path).then(stat => stat.isDirectory() ? getFiles(path) : path);
    });

    return flatten(await Promise.all(result));
}

function flatten(arr) {
    return Array.prototype.concat(...arr);
}

你可以使用bluebird.Promisify或者这个:

/**
 * Returns a function that will wrap the given `nodeFunction`. Instead of taking a callback, the returned function will return a promise whose fate is decided by the callback behavior of the given node function. The node function should conform to node.js convention of accepting a callback as last argument and calling that callback with error as the first argument and success value on the second argument.
 *
 * @param {Function} nodeFunction
 * @returns {Function}
 */
module.exports = function promisify(nodeFunction) {
    return function(...args) {
        return new Promise((resolve, reject) => {
            nodeFunction.call(this, ...args, (err, data) => {
                if(err) {
                    reject(err);
                } else {
                    resolve(data);
                }
            })
        });
    };
};

Node 8+内置了 Promisify

有关可以更快地给出结果的生成器方法,请参阅我的其他答案。

于 2017-02-23T00:39:58.327 回答
4

这是一个简单的同步递归解决方案

const fs = require('fs')

const getFiles = path => {
    const files = []
    for (const file of fs.readdirSync(path)) {
        const fullPath = path + '/' + file
        if(fs.lstatSync(fullPath).isDirectory())
            getFiles(fullPath).forEach(x => files.push(file + '/' + x))
        else files.push(file)
    }
    return files
}

用法:

const files = getFiles(process.cwd())

console.log(files)

您可以异步编写它,但没有必要。只需确保输入目录存在并且可以访问。

于 2021-01-28T13:57:09.270 回答
4

另一种选择是名为Filehound的库。它将递归搜索给定目录(默认为工作目录)。它支持各种过滤器、回调、承诺和同步搜索。

例如,在当前工作目录中搜索所有文件(使用回调):

const Filehound = require('filehound');

Filehound.create()
.find((err, files) => {
    if (err) {
        return console.error(`error: ${err}`);
    }
    console.log(files); // array of files
});

或承诺并指定特定目录:

const Filehound = require('filehound');

Filehound.create()
.paths("/tmp")
.find()
.each(console.log);

有关更多用例和使用示例,请参阅文档:https ://github.com/nspragg/filehound

免责声明:我是作者。

于 2016-11-14T19:28:38.810 回答
3

查看final-fs库。它提供了一个readdirRecursive功能:

ffs.readdirRecursive(dirPath, true, 'my/initial/path')
    .then(function (files) {
        // in the `files` variable you've got all the files
    })
    .otherwise(function (err) {
        // something went wrong
    });
于 2013-06-05T23:37:03.117 回答
3

qwtel答案变体,在TypeScript中

import { resolve } from 'path';
import { readdir } from 'fs/promises';

async function* getFiles(dir: string): AsyncGenerator<string> {
    const entries = await readdir(dir, { withFileTypes: true });
    for (const entry of entries) {
        const res = resolve(dir, entry.name);
        if (entry.isDirectory()) {
            yield* getFiles(res);
        } else {
            yield res;
        }
    }
}
于 2020-12-22T20:03:33.847 回答
3

简单,基于异步 Promise


const fs = require('fs/promises');
const getDirRecursive = async (dir) => {
    try {
        const items = await fs.readdir(dir);
        let files = [];
        for (const item of items) {
            if ((await fs.lstat(`${dir}/${item}`)).isDirectory()) files = [...files, ...(await getDirRecursive(`${dir}/${item}`))];
            else files.push({file: item, path: `${dir}/${item}`, parents: dir.split("/")});
        }
        return files;
    } catch (e) {
        return e
    }
};

用法:await getDirRecursive("./public");

于 2021-02-17T08:19:18.963 回答
2

Standalone promise implementation

I am using the when.js promise library in this example.

var fs = require('fs')
, path = require('path')
, when = require('when')
, nodefn = require('when/node/function');

function walk (directory, includeDir) {
    var results = [];
    return when.map(nodefn.call(fs.readdir, directory), function(file) {
        file = path.join(directory, file);
        return nodefn.call(fs.stat, file).then(function(stat) {
            if (stat.isFile()) { return results.push(file); }
            if (includeDir) { results.push(file + path.sep); }
            return walk(file, includeDir).then(function(filesInDir) {
                results = results.concat(filesInDir);
            });
        });
    }).then(function() {
        return results;
    });
};

walk(__dirname).then(function(files) {
    console.log(files);
}).otherwise(function(error) {
    console.error(error.stack || error);
});

I've included an optional parameter includeDir which will include directories in the file listing if set to true.

于 2013-09-20T21:32:42.713 回答
2

对于这类事情, klawklaw-sync值得考虑。这些是 node-fs-extra 的一部分

于 2017-01-23T15:40:53.363 回答
1

这是另一个实现。上述解决方案都没有任何限制,因此如果您的目录结构很大,它们都会崩溃并最终耗尽资源。

var async = require('async');
var fs = require('fs');
var resolve = require('path').resolve;

var scan = function(path, concurrency, callback) {
    var list = [];

    var walker = async.queue(function(path, callback) {
        fs.stat(path, function(err, stats) {
            if (err) {
                return callback(err);
            } else {
                if (stats.isDirectory()) {
                    fs.readdir(path, function(err, files) {
                        if (err) {
                            callback(err);
                        } else {
                            for (var i = 0; i < files.length; i++) {
                                walker.push(resolve(path, files[i]));
                            }
                            callback();
                        }
                    });
                } else {
                    list.push(path);
                    callback();
                }
            }
        });
    }, concurrency);

    walker.push(path);

    walker.drain = function() {
        callback(list);
    }
};

使用 50 的并发效果非常好,并且几乎与小型目录结构的简单实现一样快。

于 2014-08-06T16:07:04.837 回答
1

我修改了 Trevor Senior 的基于Promise的答案以与Bluebird合作

var fs = require('fs'),
    path = require('path'),
    Promise = require('bluebird');

var readdirAsync = Promise.promisify(fs.readdir);
var statAsync = Promise.promisify(fs.stat);
function walkFiles (directory) {
    var results = [];
    return readdirAsync(directory).map(function(file) {
        file = path.join(directory, file);
        return statAsync(file).then(function(stat) {
            if (stat.isFile()) {
                return results.push(file);
            }
            return walkFiles(file).then(function(filesInDir) {
                results = results.concat(filesInDir);
            });
        });
    }).then(function() {
        return results;
    });
}

//use
walkDir(__dirname).then(function(files) {
    console.log(files);
}).catch(function(e) {
    console.error(e); {
});
于 2015-01-24T21:51:34.880 回答
1

recursive-readdir模块具有此功能。

于 2014-11-25T00:19:39.050 回答
1

为了好玩,这是一个基于流的版本,它与 highland.js 流库一起使用。它由 Victor Vu 合着。

###
  directory >---m------> dirFilesStream >---------o----> out
                |                                 |
                |                                 |
                +--------< returnPipe <-----------+

  legend: (m)erge  (o)bserve

 + directory         has the initial file
 + dirListStream     does a directory listing
 + out               prints out the full path of the file
 + returnPipe        runs stat and filters on directories

###

_ = require('highland')
fs = require('fs')
fsPath = require('path')

directory = _(['someDirectory'])
mergePoint = _()
dirFilesStream = mergePoint.merge().flatMap((parentPath) ->
  _.wrapCallback(fs.readdir)(parentPath).sequence().map (path) ->
    fsPath.join parentPath, path
)
out = dirFilesStream
# Create the return pipe
returnPipe = dirFilesStream.observe().flatFilter((path) ->
  _.wrapCallback(fs.stat)(path).map (v) ->
    v.isDirectory()
)
# Connect up the merge point now that we have all of our streams.
mergePoint.write directory
mergePoint.write returnPipe
mergePoint.end()
# Release backpressure.  This will print files as they are discovered
out.each H.log
# Another way would be to queue them all up and then print them all out at once.
# out.toArray((files)-> console.log(files))
于 2015-09-10T16:08:24.150 回答
1

谁想要同步替代已接受的答案(我知道我做到了):

var fs = require('fs');
var path = require('path');
var walk = function(dir) {
    let results = [], err = null, list;
    try {
        list = fs.readdirSync(dir)
    } catch(e) {
        err = e.toString();
    }
    if (err) return err;
    var i = 0;
    return (function next() {
        var file = list[i++];

        if(!file) return results;
        file = path.resolve(dir, file);
        let stat = fs.statSync(file);
        if (stat && stat.isDirectory()) {
          let res = walk(file);
          results = results.concat(res);
          return next();
        } else {
          results.push(file);
           return next();
        }

    })();

};

console.log(
    walk("./")
)
于 2020-03-31T08:49:03.380 回答
1

使用蓝鸟 promise.coroutine:

let promise = require('bluebird'),
    PC = promise.coroutine,
    fs = promise.promisifyAll(require('fs'));
let getFiles = PC(function*(dir){
    let files = [];
    let contents = yield fs.readdirAsync(dir);
    for (let i = 0, l = contents.length; i < l; i ++) {
        //to remove dot(hidden) files on MAC
        if (/^\..*/.test(contents[i])) contents.splice(i, 1);
    }
    for (let i = 0, l = contents.length; i < l; i ++) {
        let content = path.resolve(dir, contents[i]);
        let contentStat = yield fs.statAsync(content);
        if (contentStat && contentStat.isDirectory()) {
            let subFiles = yield getFiles(content);
            files = files.concat(subFiles);
        } else {
            files.push(content);
        }
    }
    return files;
});
//how to use
//easy error handling in one place
getFiles(your_dir).then(console.log).catch(err => console.log(err));
于 2016-08-24T08:34:08.687 回答
1

另一个答案,但这次使用 TypeScript:

/**
 * Recursively walk a directory asynchronously and obtain all file names (with full path).
 *
 * @param dir Folder name you want to recursively process
 * @param done Callback function, returns all files with full path.
 * @param filter Optional filter to specify which files to include, 
 *   e.g. for json files: (f: string) => /.json$/.test(f)
 */
const walk = (
  dir: string,
  done: (err: Error | null, results ? : string[]) => void,
  filter ? : (f: string) => boolean
) => {
  let results: string[] = [];
  fs.readdir(dir, (err: Error, list: string[]) => {
    if (err) {
      return done(err);
    }
    let pending = list.length;
    if (!pending) {
      return done(null, results);
    }
    list.forEach((file: string) => {
      file = path.resolve(dir, file);
      fs.stat(file, (err2, stat) => {
        if (stat && stat.isDirectory()) {
          walk(file, (err3, res) => {
            if (res) {
              results = results.concat(res);
            }
            if (!--pending) {
              done(null, results);
            }
          }, filter);
        } else {
          if (typeof filter === 'undefined' || (filter && filter(file))) {
            results.push(file);
          }
          if (!--pending) {
            done(null, results);
          }
        }
      });
    });
  });
};

于 2018-05-15T08:20:24.267 回答
1

我必须将基于 Promise 的sander库添加到列表中。

 var sander = require('sander');
 sander.lsr(directory).then( filenames => { console.log(filenames) } );
于 2016-01-14T12:58:26.113 回答
1

使用 Promises ( Q ) 以函数式风格解决这个问题:

var fs = require('fs'),
    fsPath = require('path'),
    Q = require('q');

var walk = function (dir) {
  return Q.ninvoke(fs, 'readdir', dir).then(function (files) {

    return Q.all(files.map(function (file) {

      file = fsPath.join(dir, file);
      return Q.ninvoke(fs, 'lstat', file).then(function (stat) {

        if (stat.isDirectory()) {
          return walk(file);
        } else {
          return [file];
        }
      });
    }));
  }).then(function (files) {
    return files.reduce(function (pre, cur) {
      return pre.concat(cur);
    });
  });
};

它返回一个数组的承诺,因此您可以将其用作:

walk('/home/mypath').then(function (files) { console.log(files); });
于 2015-11-25T13:18:17.793 回答
1

TypeScript 中基于 Promise 的递归解决方案,使用 Array.flat() 处理嵌套返回。

import { resolve } from 'path'
import { Dirent } from 'fs'
import * as fs from 'fs'

function getFiles(root: string): Promise<string[]> {
 return fs.promises
   .readdir(root, { withFileTypes: true })
   .then(dirents => {
      const mapToPath = (r: string) => (dirent: Dirent): string => resolve(r, dirent.name)
      const directoryPaths = dirents.filter(a => a.isDirectory()).map(mapToPath(root))
      const filePaths = dirents.filter(a => a.isFile()).map(mapToPath(root))

     return Promise.all<string>([
       ...directoryPaths.map(a => getFiles(a, include)).flat(),
       ...filePaths.map(a => Promise.resolve(a))
     ]).then(a => a.flat())
  })
}
于 2020-03-09T06:30:34.057 回答
1

短而高效:

import {lstat, readdir} from 'node:fs/promises'
import {join} from 'node:path/posix'

const deepReadDir = async (dirPath) => await Promise.all(
  (await readdir(dirPath)).map(async (entity) => {
    const path = join(dirPath, entity)
    return (await lstat(path)).isDirectory() ? await deepReadDir(path) : path
  }),
)

此自动将每个嵌套路径折叠到一个新的(嵌套)数组中。例如,如果:

const files = await deepReadDir('src')

files就像:

[
  [
    'src/client/api.js',
    'src/client/http-constants.js',
    'src/client/index.html',
    'src/client/index.js',
    [ 'src/client/res/favicon.ico' ],
    'src/client/storage.js'
  ],
  [ 'src/crypto/keygen.js' ],
  'src/discover.js',
  [
    'src/mutations/createNewMutation.js',
    'src/mutations/newAccount.js',
    'src/mutations/transferCredit.js',
    'src/mutations/updateApp.js'
  ],
  [
    'src/server/authentication.js',
    'src/server/handlers.js',
    'src/server/quick-response.js',
    'src/server/server.js',
    'src/server/static-resources.js'
  ],
  [ 'src/util/prompt.js', 'src/util/safeWriteFile.js' ],
  'src/util.js'
]

但是你可以很容易地把它弄平

files.flat(Number.POSITIVE_INFINITY)
[
  'src/client/api.js',
  'src/client/http-constants.js',
  'src/client/index.html',
  'src/client/index.js',
  'src/client/res/favicon.ico',
  'src/client/storage.js',
  'src/crypto/keygen.js',
  'src/discover.js',
  'src/mutations/createNewMutation.js',
  'src/mutations/newAccount.js',
  'src/mutations/transferCredit.js',
  'src/mutations/updateApp.js',
  'src/server/authentication.js',
  'src/server/handlers.js',
  'src/server/quick-response.js',
  'src/server/server.js',
  'src/server/static-resources.js',
  'src/util/prompt.js',
  'src/util/safeWriteFile.js',
  'src/util.js'
]
于 2022-02-17T22:24:00.820 回答
1

有一个名为cup-readdir的新模块可以非常快速地递归搜索目录。在处理深层目录结构时,它使用异步承诺并且优于许多流行的模块。

它可以返回数组中的所有文件并按属性对它们进行排序,但缺少文件过滤和输入符号链接目录等功能。这对于您只想从目录中获取每个文件的大型项目可能很有用。这是他们项目主页的链接。

于 2021-01-19T23:41:15.080 回答
0

查看 loaddir https://npmjs.org/package/loaddir

npm install loaddir

  loaddir = require('loaddir')

  allJavascripts = []
  loaddir({
    path: __dirname + '/public/javascripts',
    callback: function(){  allJavascripts.push(this.relativePath + this.baseName); }
  })

如果您也需要扩展, 您可以使用fileName而不是。baseName

一个额外的好处是它也会监视文件并再次调用回调。有大量的配置选项使其非常灵活。

我只是guard在短时间内使用 loaddir 从 ruby​​ 重新制作了 gem

于 2013-08-15T14:31:27.707 回答
0

这是我的回答。希望它可以帮助某人。

我的重点是使搜索程序可以在任何地方停止,并且对于找到的文件,告诉原始路径的相对深度。

var _fs = require('fs');
var _path = require('path');
var _defer = process.nextTick;

// next() will pop the first element from an array and return it, together with
// the recursive depth and the container array of the element. i.e. If the first
// element is an array, it'll be dug into recursively. But if the first element is
// an empty array, it'll be simply popped and ignored.
// e.g. If the original array is [1,[2],3], next() will return [1,0,[[2],3]], and
// the array becomes [[2],3]. If the array is [[[],[1,2],3],4], next() will return
// [1,2,[2]], and the array becomes [[[2],3],4].
// There is an infinity loop `while(true) {...}`, because I optimized the code to
// make it a non-recursive version.
var next = function(c) {
    var a = c;
    var n = 0;
    while (true) {
        if (a.length == 0) return null;
        var x = a[0];
        if (x.constructor == Array) {
            if (x.length > 0) {
                a = x;
                ++n;
            } else {
                a.shift();
                a = c;
                n = 0;
            }
        } else {
            a.shift();
            return [x, n, a];
        }
    }
}

// cb is the callback function, it have four arguments:
//    1) an error object if any exception happens;
//    2) a path name, may be a directory or a file;
//    3) a flag, `true` means directory, and `false` means file;
//    4) a zero-based number indicates the depth relative to the original path.
// cb should return a state value to tell whether the searching routine should
// continue: `true` means it should continue; `false` means it should stop here;
// but for a directory, there is a third state `null`, means it should do not
// dig into the directory and continue searching the next file.
var ls = function(path, cb) {
    // use `_path.resolve()` to correctly handle '.' and '..'.
    var c = [ _path.resolve(path) ];
    var f = function() {
        var p = next(c);
        p && s(p);
    };
    var s = function(p) {
        _fs.stat(p[0], function(err, ss) {
            if (err) {
                // use `_defer()` to turn a recursive call into a non-recursive call.
                cb(err, p[0], null, p[1]) && _defer(f);
            } else if (ss.isDirectory()) {
                var y = cb(null, p[0], true, p[1]);
                if (y) r(p);
                else if (y == null) _defer(f);
            } else {
                cb(null, p[0], false, p[1]) && _defer(f);
            }
        });
    };
    var r = function(p) {
        _fs.readdir(p[0], function(err, files) {
            if (err) {
                cb(err, p[0], true, p[1]) && _defer(f);
            } else {
                // not use `Array.prototype.map()` because we can make each change on site.
                for (var i = 0; i < files.length; i++) {
                    files[i] = _path.join(p[0], files[i]);
                }
                p[2].unshift(files);
                _defer(f);
            }
        });
    }
    _defer(f);
};

var printfile = function(err, file, isdir, n) {
    if (err) {
        console.log('-->   ' + ('[' + n + '] ') + file + ': ' + err);
        return true;
    } else {
        console.log('... ' + ('[' + n + '] ') + (isdir ? 'D' : 'F') + ' ' + file);
        return true;
    }
};

var path = process.argv[2];
ls(path, printfile);
于 2014-09-15T00:38:34.237 回答
0

因为每个人都应该自己写,所以我做了一个。

walk(dir, cb, endCb) cb(文件) endCb(err | null)

肮脏的

module.exports = walk;

function walk(dir, cb, endCb) {
  var fs = require('fs');
  var path = require('path');

  fs.readdir(dir, function(err, files) {
    if (err) {
      return endCb(err);
    }

    var pending = files.length;
    if (pending === 0) {
      endCb(null);
    }
    files.forEach(function(file) {
      fs.stat(path.join(dir, file), function(err, stats) {
        if (err) {
          return endCb(err)
        }

        if (stats.isDirectory()) {
          walk(path.join(dir, file), cb, function() {
            pending--;
            if (pending === 0) {
              endCb(null);
            }
          });
        } else {
          cb(path.join(dir, file));
          pending--;
          if (pending === 0) {
            endCb(null);
          }
        }
      })
    });

  });
}
于 2013-03-05T16:11:22.233 回答
0

对于Node 10.3+,这是一个等待解决方案:

#!/usr/bin/env node

const FS = require('fs');
const Util = require('util');
const readDir = Util.promisify(FS.readdir);
const Path = require('path');

async function* readDirR(path) {
    const entries = await readDir(path,{withFileTypes:true});
    for(let entry of entries) {
        const fullPath = Path.join(path,entry.name);
        if(entry.isDirectory()) {
            yield* readDirR(fullPath);
        } else {
            yield fullPath;
        }
    }
}

async function main() {
    const start = process.hrtime.bigint();
    for await(const file of readDirR('/mnt/home/media/Unsorted')) {
        console.log(file);
    }
    console.log((process.hrtime.bigint()-start)/1000000n);
}

main().catch(err => {
    console.error(err);
});

此解决方案的好处是您可以立即开始处理结果;例如,读取媒体目录中的所有文件需要 12 秒,但如果我这样做,我可以在几毫秒内得到第一个结果。

于 2019-03-04T07:39:18.493 回答
0

只是一个简单的步行

let pending = [baseFolderPath]
function walk () {
    pending.shift();
    // do stuffs width pending[0] and change pending items
    if (pending[0]) walk(pending[0])
}
walk(pending[0])
于 2020-04-03T06:18:54.807 回答
0

这是获取所有文件(包括子目录)的递归方法。

const FileSystem = require("fs");
const Path = require("path");

//...

function getFiles(directory) {
    directory = Path.normalize(directory);
    let files = FileSystem.readdirSync(directory).map((file) => directory + Path.sep + file);

    files.forEach((file, index) => {
        if (FileSystem.statSync(file).isDirectory()) {
            Array.prototype.splice.apply(files, [index, 1].concat(getFiles(file)));
        }
    });

    return files;
}
于 2017-02-03T04:51:59.290 回答
0

这就是我使用 nodejs fs.readdir 函数递归搜索目录的方式。

const fs = require('fs');
const mime = require('mime-types');
const readdirRecursivePromise = path => {
    return new Promise((resolve, reject) => {
        fs.readdir(path, (err, directoriesPaths) => {
            if (err) {
                reject(err);
            } else {
                if (directoriesPaths.indexOf('.DS_Store') != -1) {
                    directoriesPaths.splice(directoriesPaths.indexOf('.DS_Store'), 1);
                }
                directoriesPaths.forEach((e, i) => {
                    directoriesPaths[i] = statPromise(`${path}/${e}`);
                });
                Promise.all(directoriesPaths).then(out => {
                    resolve(out);
                }).catch(err => {
                    reject(err);
                });
            }
        });
    });
};
const statPromise = path => {
    return new Promise((resolve, reject) => {
        fs.stat(path, (err, stats) => {
            if (err) {
                reject(err);
            } else {
                if (stats.isDirectory()) {
                    readdirRecursivePromise(path).then(out => {
                        resolve(out);
                    }).catch(err => {
                        reject(err);
                    });
                } else if (stats.isFile()) {
                    resolve({
                        'path': path,
                        'type': mime.lookup(path)
                    });
                } else {
                    reject(`Error parsing path: ${path}`);
                }
            }
        });
    });
};
const flatten = (arr, result = []) => {
    for (let i = 0, length = arr.length; i < length; i++) {
        const value = arr[i];
        if (Array.isArray(value)) {
            flatten(value, result);
        } else {
            result.push(value);
        }
    }
    return result;
};

假设您的节点项目根目录中有一个名为“/database”的路径。一旦这个promise被解决,它应该在'/database'下吐出一个包含每个文件的数组。

readdirRecursivePromise('database').then(out => {
    console.log(flatten(out));
}).catch(err => {
    console.log(err);
});
于 2017-05-03T08:13:47.620 回答
0

另一个简单而有用的

function walkDir(root) {
    const stat = fs.statSync(root);

    if (stat.isDirectory()) {
        const dirs = fs.readdirSync(root).filter(item => !item.startsWith('.'));
        let results = dirs.map(sub => walkDir(`${root}/${sub}`));
        return [].concat(...results);
    } else {
        return root;
    }
}
于 2017-02-24T14:58:40.430 回答
-1

这是完整的工作代码。根据您的要求。您可以递归地获取所有文件和文件夹。

var recur = function(dir) {
            fs.readdir(dir,function(err,list){
                list.forEach(function(file){
                    var file2 = path.resolve(dir, file);
                    fs.stat(file2,function(err,stats){
                        if(stats.isDirectory()) {
                            recur(file2);
                        }
                        else {
                            console.log(file2);
                        }
                    })
                })
            });
        };
        recur(path);

在路径中给出您要在其中搜索的目录路径,例如“c:\test”

于 2016-07-09T05:58:05.137 回答