In node.js I read files with a spawn of find instead of using fs.readdir that is much more slower:
scanDirStream: function (needle, params) {
var options = {
type: 'f',
name: '*'
};
for (var attrname in params) { options[attrname] = params[attrname]; }
return new Promise((resolve, reject) => {
var opt = [needle];
for (var k in options) {
var v = options[k];
if (!Util.empty(v)) {
opt.push('-' + k);
opt.push(v);
}
};
var res = '';
var find = spawn('find', opt)
find.stdout.on('data', _data => {
var buff = Buffer.from(_data, 'utf-8').toString();
if (buff != '') res += buff;
})
find.stderr.on('data', error => {
return reject(Buffer.from(error, 'utf-8').toString());
});
find.on('close', (_) => {
data = res.split('\n');
data = data.slice(0, data.length - 1);
return resolve(data);
});
});
}//scanDirStream
and you call it like
scanDirStream(path, { name: name }).then(files => console.log(files))
For huge directories I would furtherly improve it limiting the output of find, before returning the listing to Node.js, so using a pipe to head like:
find -type -f "*.mp3" | head -n 100
that it should be like
var head = spawn('head',['-n','100])
var find = spawn('find', opt)
head.stdin.pipe(find)
find.stdout.on('data', _data => {
var buff = Buffer.from(_data, 'utf-8').toString();
if (buff != '') res += buff;
})
find.stderr.on('data', error => {
return reject(Buffer.from(error, 'utf-8').toString());
});
find.on('close', (_) => {
data = res.split('\n');
data = data.slice(0, data.length - 1);
return resolve(data);
});
but the pipe does not work properly
fs.readdiris slow, how many files were in the directory?