Javascript file dropping and reading directories - Asynchronous Recursion - javascript

So I'm trying to create a file dropper web application. Right now, a user can drop files on the screen and I can read them, including all the files in a directory that was dropped. But, I don't know when the script is done reading the files.
Some code:
This first function handles a 'drop' event and will loop through each file and send it to another function that will read its contents.
function readDrop( evt )
{
for( var i = 0; i < evt.dataTransfer.files.length; i++)
{
var entry = evt.dataTransfer.items[i].webkitGetAsEntry();
if(entry)
readContents(entry, "");
}
//Do stuff after all files and directories have been read.
}
This function is a recursive FileEntry reader. If it is a file, I will read the FileEntry. If it is a directory, it will loop through the contents and pass it through this function.
function readContents(entry, path)
{
if( entry.isFile )
{
readFileData( entry, path, function(fileData)
{
_MyFiles.push( fileData );
});
}
else if( entry.isDirectory )
{
var directoryReader = entry.createReader();
var path = path + entry.name;
directoryReader.readEntries(function(results)
{
for( var j = 0; j < results.length; j++ )
{
readContents(entry, path);
}
}, errorHandler)
}
}
And here is my function for reading the files. The callback just pushes the fileData object to a global array
function readFileData(entry, path, callback)
{
var fileData = {"name": entry.name, "size": 0, "path": path, "file": entry};
entry.file(function(file)
{
fileData["size"] = file.size;
callback( fileData );
}
}
I'm not sure where to go from here so that I can have a callback when all files and directories have been read.

The FileSystem API doesn't seem well suited for the task of a full recursive traversal, perhaps that's part of the reason why other vendors are not adopting it. Anyway, with an arcane combination of Promises I think I was able to accomplish this goal:
function traverse_directory(entry) {
let reader = entry.createReader();
// Resolved when the entire directory is traversed
return new Promise((resolve_directory) => {
var iteration_attempts = [];
(function read_entries() {
// According to the FileSystem API spec, readEntries() must be called until
// it calls the callback with an empty array. Seriously??
reader.readEntries((entries) => {
if (!entries.length) {
// Done iterating this particular directory
resolve_directory(Promise.all(iteration_attempts));
} else {
// Add a list of promises for each directory entry. If the entry is itself
// a directory, then that promise won't resolve until it is fully traversed.
iteration_attempts.push(Promise.all(entries.map((entry) => {
if (entry.isFile) {
// DO SOMETHING WITH FILES
return entry;
} else {
// DO SOMETHING WITH DIRECTORIES
return traverse_directory(entry);
}
})));
// Try calling readEntries() again for the same dir, according to spec
read_entries();
}
}, errorHandler );
})();
});
}
traverse_directory(my_directory_entry).then(()=> {
// AT THIS POINT THE DIRECTORY SHOULD BE FULLY TRAVERSED.
});

Following on from the answer by drarmstr, I modified the function to be compliant with the Airbnb ESLint standards, and wanted to make some further comments on its usage and results
Here's the new function:
function traverseDirectory(entry) {
const reader = entry.createReader();
// Resolved when the entire directory is traversed
return new Promise((resolve, reject) => {
const iterationAttempts = [];
function readEntries() {
// According to the FileSystem API spec, readEntries() must be called until
// it calls the callback with an empty array. Seriously??
reader.readEntries((entries) => {
if (!entries.length) {
// Done iterating this particular directory
resolve(Promise.all(iterationAttempts));
} else {
// Add a list of promises for each directory entry. If the entry is itself
// a directory, then that promise won't resolve until it is fully traversed.
iterationAttempts.push(Promise.all(entries.map((ientry) => {
if (ientry.isFile) {
// DO SOMETHING WITH FILES
return ientry;
}
// DO SOMETHING WITH DIRECTORIES
return traverseDirectory(ientry);
})));
// Try calling readEntries() again for the same dir, according to spec
readEntries();
}
}, error => reject(error));
}
readEntries();
});
}
here's a drop event handler:
function dropHandler(evt) {
evt.preventDefault();
const data = evt.dataTransfer.items;
for (let i = 0; i < data.length; i += 1) {
const item = data[i];
const entry = item.webkitGetAsEntry();
traverseDirectory(entry).then(result => console.log(result));
}
}
The result variable at the end contains an array, mirroring the tree structure of the folder you dragged and dropped.
For example, here is a git repo for my own site, ran through the above code:
Here's the Git repo for comparison https://github.com/tomjn/tomjn.com

Related

Cannot iterate over file list from dropped elements in browser web page with DataTransfer

There are similar questions like this, but they don't explain why this fails.
After dropping multiple files into the browser, iterating with the for...of statement fails while the normal for loop works.
The for...of version outputs undefined for file.name while the normal for version outputs the filename correctly.
Why?
function initDragDropEvents() {
// Trigger file uploads on drop events.
$('body').on('drop', function(e) {
let fileList = getFileListFromDropEvent(e.originalEvent);
// Have image? Update view.
for (const file in fileList) {
console.log('1 Dropped file: ' + file.name);
}
for (let i = 0; i < fileList.length; i++) {
console.log('2 Dropped file: ' + fileList[i].name);
}
});
}
function getFileListFromDropEvent(event) {
// Prevent default behavior, which opens the file.
event.preventDefault();
// Set default result for file.
let fileList = [];
// Get item list from #event.
let itemList = event.dataTransfer.items;
if (!itemList) {
itemList = event.dataTransfer.files;
}
if (itemList) {
for (const item of itemList) {
// Assume DataTransfer interface by default.
let file = item;
// Use DataTransferItemList interface instead?
if (item.kind === 'file') {
file = item.getAsFile();
}
fileList.push(file);
}
}
return fileList;
}
This was caused by a subtle bug. Rather than delete the question, will post the answer in case someone else makes the same mistake in the future.
This works if you use for...of and not for...in.

Nodejs: merge join mulitple files

consider this scenario:
I have 2 csv files, each one is sorted and contains the id filed.
I need to join the rows using the id field. Because the files are already sorted by the id I wanted to perform merge join (https://en.wikipedia.org/wiki/Sort-merge_join).
For that I need to have a way to load some portion of both files, process it and iteratively load more again from one or both files.
(The files are big and would not fit into memory so only streaming approach will work).
The problem is the Node API, what to use? readline will not work because of https://github.com/nodejs/node/issues/33463. Any other ideas?
I had to do something quite similar recently and decided to use the node-line-reader module that has a simpler interface than the built-in readline. I then created a little recursive function that determines which file to read from next by comparing the id of each csv-entry of each provided file. After that the corresponding line gets written out to the target file, and the method is called again until all lines of all files are processed. Here's the whole class I ended up with:
const fs = require('fs');
const LineReader = require('node-line-reader').LineReader;
class OrderedCsvFileMerger {
constructor(files, targetFile) {
this.lineBuffer = [];
this.initReaders(files);
this.initWriter(targetFile);
}
initReaders(files) {
this.readers = files.map(file => new LineReader(file));
}
initWriter(targetFile) {
this.writer = fs.createWriteStream(targetFile);
}
async mergeFiles() {
// initially read first line from all files
for (const reader of this.readers) {
this.lineBuffer.push(await this.nextLine(reader));
}
return this.merge();
}
async nextLine(reader) {
return new Promise((resolve, reject) => {
reader.nextLine(function (err, line) {
if (err) reject(err);
resolve(line);
});
})
}
async merge() {
if (this.allLinesProcessed()) {
return;
}
let currentBufferIndex = -1;
let minRowId = Number.MAX_VALUE;
for (let i = 0; i < this.lineBuffer.length; i++) {
const currentRowId = parseInt(this.lineBuffer[i]); // implement parsing logic if your lines do not start
// with an integer id
if (currentRowId < minRowId) {
minRowId = currentRowId;
currentBufferIndex = i;
}
}
const line = this.lineBuffer[currentBufferIndex];
this.writer.write(line + "\n");
this.lineBuffer[currentBufferIndex] = await this.nextLine(this.readers[currentBufferIndex]);
return this.merge();
}
allLinesProcessed() {
return this.lineBuffer.every(l => !l);
}
}
(async () => {
const input = ['./path/to/csv1.csv', './path/to/csv2.csv'];
const target = './path/to/target.csv';
const merger = new OrderedCsvFileMerger(files, output);
await merger.mergeFiles();
console.log("Files were merged successfully!")
})().catch(err => {
console.log(err);
});

Async promises recursion with unknow supply of async values

I'm banging my head around async promises recursion. I have bunch of promises that resolve when async data is download (combined by Promise.all). But sometimes in the data that I just download there is link to another data, that must be download (recursion). The best explanation is showing code I guess. Comments are in code.
(I have tried various combinations to no avail.)
var urls = ['http://czyprzy.vdl.pl/file1.txt', 'http://czyprzy.vdl.pl/file2.txt', 'http://czyprzy.vdl.pl/file3.txt'];
var urlsPromise = [];
var secondPart = [];
var thirdPart = [];
function urlContent(url, number) {
return new Promise(function (resolve) {
var dl = request(url, function (err, resp, content) {
if (err || resp.statusCode >= 400) {
return resolve({number : number, url : url, error : 'err'});
}
if (!err && resp.statusCode == 200) {
if (content.indexOf('file') !== -1) // if there is 'file' inside content we need (would like to :) download this new file by recursion
{
content = content.slice(content.indexOf('file') + 4);
content =+ content; // (number to pass later on, so we know what file we are working on)
url = 'http://czyprzy.vdl.pl/file' + content + '.txt'; // (we build new address)
//urlsPromise.push(urlContent(url, content)); // this will perform AFTER Promise.all(urlsPromise) so we simply can't do recurention (like that) here
secondPart.push(urlContent(url, content)); // if we use another promise array that put resolved items to that array everything will work just fine - but only till first time, then we would need to add another (thirdPart) array and use another Promise.all(thirdPart)... and so on and so on... --- the problem is I don't know how many files there will be, so it means I have no idea how many 'parts' for Promise.all I need to create, some kind of asynchronous loop/recursion would save me here, but I don't know how to do that properly so the code can run in proper order
}
return resolve({number : number, url : url}); // this goes to 'urlsPromise' array
}
});
});
}
if (urls.length !== 0) {
for (var i = 0; i < urls.length; i++)
{urlsPromise.push(urlContent(urls[i], i + 1));}
}
Promise.all(urlsPromise).then(function(urlsPromise) {
console.log('=======================================');
console.log('urlsPromise:\n');
console.log(urlsPromise); // some code/calculations here
}).then(function() {
return Promise.all(secondPart).then(function(secondPart) {
console.log('=======================================');
console.log('secondPart:\n');
console.log(secondPart); // some code/calculations here
secondPart.forEach(function(item)
{
thirdPart.push(urlContent(item.url, item.number + 3));
});
});
}).then(function() {
return Promise.all(thirdPart).then(function(thirdPart) {
console.log('=======================================');
console.log('thirdPart:\n');
console.log(thirdPart); // some code/calculations here
});
}).then(function()
{
console.log();
console.log('and so on and so on...');
});
//// files LINKING (those files do exist on live server - just for testing purposes):
// file1->file4->file7->file10 /-/ file1 content: file4 /-/ file4 content: file7 /-/ file7 content: file10
// file2->file5->file8->file11 /-/ file2 content: file5 /-/ file5 content: file8 /-/ file8 content: file11
// file3->file6->file9->file12 /-/ file3 content: file6 /-/ file6 content: file9 /-/ file9 content: file12
//// the console.log output looks like this:
// =======================================
// urlsPromise:
// [ { number: 1, url: 'http://czyprzy.vdl.pl/file4.txt' },
// { number: 2, url: 'http://czyprzy.vdl.pl/file5.txt' },
// { number: 3, url: 'http://czyprzy.vdl.pl/file6.txt' } ]
// =======================================
// secondPart:
// [ { number: 4, url: 'http://czyprzy.vdl.pl/file7.txt' },
// { number: 5, url: 'http://czyprzy.vdl.pl/file8.txt' },
// { number: 6, url: 'http://czyprzy.vdl.pl/file9.txt' } ]
// =======================================
// thirdPart:
// [ { number: 7, url: 'http://czyprzy.vdl.pl/file10.txt' },
// { number: 8, url: 'http://czyprzy.vdl.pl/file11.txt' },
// { number: 9, url: 'http://czyprzy.vdl.pl/file12.txt' } ]
// and so on and so on...
The await keyword can massively simplify this. You won't need to use a self recursive function. This demo fakes the server call with a randomly sized array.
https://jsfiddle.net/mvwahq19/1/
// setup: create a list witha random number of options.
var sourceList = [];
var numItems = 10 + Math.floor(Math.random() * 20);
for (var i = 0; i < numItems; i++)
{
sourceList.push(i);
}
sourceList.push(100);
var currentIndex = 0;
// a function which returns a promise. Imagine it is asking a server.
function getNextItem() {
var item = sourceList[currentIndex];
currentIndex++;
return new Promise(function(resolve) {
setTimeout(function() {
resolve(item);
}, 100);
});
}
async function poll() {
var collection = [];
var done = false;
while(!done) {
var item = await getNextItem();
collection.push(item);
console.log("Got another item", item);
if (item >= 100) {
done = true;
}
}
console.log("Got all items", collection);
}
poll();
You can write a normal for loop except the contents use await.
This answer was provided thanks to trincot - https://stackoverflow.com/users/5459839/trincot
When I asked him this question directly, he support me with time and knowledge and give this excellent answer.
CODE:
//// files LINKING (those files do exist on live server - just for testing purposes):
// file1->file4(AND file101)->file7->file10 /-/ file1 content: file4 /-/ file4 content: file7 /-/ file7 content: file10 /-/ file10 content: EMPTY /-/ file101 content: EMPTY
// file2->file5(AND file102)->file8->file11 /-/ file2 content: file5 /-/ file5 content: file8 /-/ file8 content: file11 /-/ file11 content: EMPTY /-/ file102 content: EMPTY
// file3->file6(AND file103)->file9->file12 /-/ file3 content: file6 /-/ file6 content: file9 /-/ file9 content: file12 /-/ file12 content: EMPTY /-/ file103 content: EMPTY
var urls = ['http://czyprzy.vdl.pl/file1.txt', 'http://czyprzy.vdl.pl/file2.txt', 'http://czyprzy.vdl.pl/file3.txt'];
var urlsPromise = [];
function requestPromise(url) {
return new Promise(function(resolve, reject) {
request(url, function (err, resp, content) {
if (err || resp.statusCode != 200) reject(err || resp.statusCode);
else resolve(content);
});
});
}
async function urlContent(url, number) {
var arr = [];
let content = await requestPromise(url);
while (content.indexOf(';') !== -1)
{
var semiColon = content.indexOf(';');
var fileLink = content.slice(content.indexOf('file'), semiColon + 1);
content = content.replace(fileLink, ''); // we need to remove the file link so we won't iterate over it again, we will add to the array only new links
var fileLinkNumber = fileLink.replace('file', '');
fileLinkNumber = fileLinkNumber.replace(';', '');
fileLinkNumber =+ fileLinkNumber;
url = 'http://czyprzy.vdl.pl/file' + fileLinkNumber + '.txt'; // we build new address
arr.push({url, fileLinkNumber});
}
if (content.indexOf('file') !== -1)
{
var fileLinkNumber = content.slice(content.indexOf('file') + 4);
fileLinkNumber =+ fileLinkNumber;
url = 'http://czyprzy.vdl.pl/file' + fileLinkNumber + '.txt';
arr.push({url, fileLinkNumber});
}
var newArr = arr.map(function(item)
{
return urlContent(item.url, item.fileLinkNumber); // return IS important here
});
return [].concat(arr, ...await Promise.all(newArr));
}
async function doing() {
let urlsPromise = [];
for (let i = 0; i < urls.length; i++) {
urlsPromise.push(urlContent(urls[i], i + 1));
}
let results = [].concat(...await Promise.all(urlsPromise)); // flatten the array of arrays
console.log(results);
}
//// this is only to show Promise.all chaining - so you can do async loop, and then wait for some another async data - in proper chain.
var test_a = ['http://czyprzy.vdl.pl/css/1.css', 'http://czyprzy.vdl.pl/css/2.css', 'http://czyprzy.vdl.pl/css/cssa/1a.css', 'http://czyprzy.vdl.pl/css/cssa/2a.css'];
var promisesTest_a = [];
function requestStyle(url)
{
return new Promise(function(resolve, reject)
{
request(url, function(error, response, content)
{
if (response.statusCode === 200 && !error)
{resolve(content);}
else
{reject(error);}
});
});
}
for (var i = 0; i < test_a.length; i++)
{promisesTest_a.push(requestStyle(test_a[i]));}
Promise.all(promisesTest_a).then(function(promisesTest_a)
{
console.log(promisesTest_a);
}).then(function()
{
console.log('\nNow we start with #imports...\n');
}).then(function()
{
return doing();
}).then(function()
{
console.log('ALL DONE!');
});
COMMENT:
At first the explanation what is [...] - destructured rest parameters (just in case if you don't know it).
var arr = [];
var array1 = ['one', 'two', 'three']
var array2 = [['four', 'five', ['six', 'seven']], 'eight', 'nine', 'ten'];
arr = array1.concat(array2);
console.log(arr); // it does not flattern the array - it just concatenate them (join them together)
console.log('---');
// however
arr = array1.concat(...array2);
console.log(arr); // notice the [...] - as you can see it flatern the array - 'four' and 'five' are pull out of an array - think of it as level up :) remember that it pull up WHOLE array that is deeper - so 'six' and 'seven' are now 1 level deep (up from 2 levels deep, but still in another array).
console.log('---');
// so
arr = [].concat(...arr);
console.log(arr); // hurrrray our array is flat (single array without nested elements)
console.log();
All files (links) that are ready to be download (those 3 starting ones in a urls array) are downloaded almost immediately (synchronous loop over array that contain them - one after the other, but very fast, right away cause we simply iterate over them in synchronous way).
Then, when we have their contents (cause we Await till content is downloaded - so we got a resolved promise data here) we start to look for info about other possible urls (files) related to the one we already got, to download them (via async recursion).
When we found all the info about possible additional urls/files (presented in an array of regexs - matches), we push it to data array (named arr in our code) and download them (thanks to the mutation of url).
We download them by return the async urlContent function that need to Await for requestPromise promise (so we have the resolve/rejected data in urlContent so if needed we can mutate it - build proper url to get the next file/content).
And so on, so on, till we "iterate" (download) over all files. Every time the urlContent is called, it return an array of promises (promises variable) that initially are pending. When we await Promise.all(promises) the execution only resumes at that spot when ALL those promises have been resolved. And so, at that moment, we have the values for each of these promises. Each of these is an array. We use one big concat to nit all those arrays together into one big array, also including the elements of arr (we need to remmeber that it can be more then 1 file to download from file we have already download - that is why we store values in data array - named arr in code - which store promiseReques function resolved/rejected values). This "big" array is the value, with which a promise is resolved. Recall that this promise is the one that was returned already by this current function context, at the time the first await was executed.
This is important part - so it (urlContent) returns (await) a single promise and that (returned) promise is resolved with an array as value. Note that an async function returns the promise to the caller immediately, when the first await is encountered. The return statement in an async function determines what the value is with which that returned promise is resolved. In our case that is an array.
So urlContent at every call return an promise - resolved value in a array - [...] (destructured rest parameters - returns a promise that eventually resolves to an array), that is collected by our async doing function (cause 3 urls was fired at start - every one has it own urlContent function... path), that collect (Await!) all those arrays from Promise.all(urlsPromise), and when they are resolved (we await for them to be resolved and passed by Promise.all) it 'return' your data (results variable). To be precise, doing returns a promise (because it is async). But the way that we call doing, we show we are not interested in what this promise resolves to, and in fact, since doing does not have a return statement, that promise resolves to UNDEFINED (!). Anyway, we don't use it - we merely output the results to the console.
One thing that can be confusing with async functions is that the return statement is not executed when the function returns (what is in a name, right!? ;). The function has already returned when it executed the first await. When eventually it executes the return statement, it does not really return a value, but it resolves "its own" promise; the one it had returned earlier. If we would really want to separate output from logic, we should not do console.log(results) there, but do return results, and then, where we call doing, we could do doing.then(console.log); Now we do use the promise returned by doing!
I would reserve the verb "to return" for what the caller of a function gets back from it synchronously.
I would use "to resolve" for the action that sets a promise to a resolved state with a value, a value that can be accessed with await or .then().

Node fs.readdir freezing in folders with too many files

In Node.js I have to read files in a folder and for each file get file handler info, this is my simplest implementation using fs.readdir:
FileServer.prototype.listLocal = function (params) {
var self = this;
var options = {
limit: 100,
desc: 1
};
// override defaults
for (var attrname in params) { options[attrname] = params[attrname]; }
// media path is the media folder
var mediaDir = path.join(self._options.mediaDir, path.sep);
return new Promise((resolve, reject) => {
fs.readdir(mediaDir, (error, results) => {
if (error) {
self.logger.error("FileServer.list error:%s", error);
return reject(error);
} else { // list files
// cut to max files
results = results.slice(0, options.limit);
// filter default ext
results = results.filter(item => {
return (item.indexOf('.mp3') > -1);
});
// format meta data
results = results.map(file => {
var filePath = path.join(self._options.mediaDir, path.sep, file);
var item = {
name: file,
path: filePath
};
const fd = fs.openSync(filePath, 'r');
var fstat = fs.fstatSync(fd);
// file size in bytes
item.size = fstat.size;
item.sizehr = self.formatSizeUnits(fstat.size);
// "Birth Time" Time of file creation. Set once when the file is created.
item.birthtime = fstat.birthtime;
// "Modified Time" Time when file data last modified.
item.mtime = fstat.mtime;
// "Access Time" Time when file data last accessed.
item.atime = fstat.atime;
item.timestamp = new Date(item.mtime).getTime();
item.media_id = path.basename(filePath, '.mp3');
fs.closeSync(fd);//close file
return item;
});
if (options.desc) { // sort by most recent
results.sort(function (a, b) {
return b.timestamp - a.timestamp;
});
} else { // sort by older
results.sort(function (a, b) {
return a.timestamp - b.timestamp;
});
}
return resolve(results);
}
})
});
}
so that for each file I get an array of items
{
"name": "sample121.mp3",
"path": "/data/sample121.mp3",
"size": 5751405,
"sizehr": "5.4850 MB",
"birthtime": "2018-10-08T15:26:08.397Z",
"mtime": "2018-10-08T15:26:11.650Z",
"atime": "2018-10-10T09:01:48.534Z",
"timestamp": 1539012371650,
"media_id": "sample121"
}
That said, the problem is it's knonw that node.js fs.readdir may freeze Node I/O Loop when the folder to list has a large number of files, let's say from ten thousands to hundred thousands and more.
This is a known issue - see here for more info.
There are also plans to improve fs.readdir in a some way, like streaming - see here about this.
In the meanwhile I'm searching for like a patch to this, because my folders are pretty large.
Since the problem is the Event Loop get frozen, someone proposed a solution using process.nextTick, that I have ensembled here
FileServer.prototype.listLocalNextTick = function (params) {
var self = this;
var options = {
limit: 100,
desc: 1
};
// override defaults
for (var attrname in params) { options[attrname] = params[attrname]; }
// media path is the media folder
var mediaDir = path.join(self._options.mediaDir, path.sep);
return new Promise((resolve, reject) => {
var AsyncArrayProcessor = function (inArray, inEntryProcessingFunction) {
var elemNum = 0;
var arrLen = inArray.length;
var ArrayIterator = function () {
inEntryProcessingFunction(inArray[elemNum]);
elemNum++;
if (elemNum < arrLen) process.nextTick(ArrayIterator);
}
if (elemNum < arrLen) process.nextTick(ArrayIterator);
}
fs.readdir(mediaDir, function (error, results) {
if (error) {
self.logger.error("FileServer.list error:%s", error);
return reject(error);
}
// cut to max files
results = results.slice(0, options.limit);
// filter default ext
results = results.filter(item => {
return (item.indexOf('.mp3') > -1);
});
var ProcessDirectoryEntry = function (file) {
// This may be as complex as you may fit in a single event loop
var filePath = path.join(self._options.mediaDir, path.sep, file);
var item = {
name: file,
path: filePath
};
const fd = fs.openSync(filePath, 'r');
var fstat = fs.fstatSync(fd);
// file size in bytes
item.size = fstat.size;
item.sizehr = self.formatSizeUnits(fstat.size);
// "Birth Time" Time of file creation. Set once when the file is created.
item.birthtime = fstat.birthtime;
// "Modified Time" Time when file data last modified.
item.mtime = fstat.mtime;
// "Access Time" Time when file data last accessed.
item.atime = fstat.atime;
item.timestamp = new Date(item.mtime).getTime();
item.media_id = path.basename(filePath, '.mp3');
// map to file item
file = item;
}//ProcessDirectoryEntry
// LP: fs.readdir() callback is finished, event loop continues...
AsyncArrayProcessor(results, ProcessDirectoryEntry);
if (options.desc) { // sort by most recent
results.sort(function (a, b) {
return b.timestamp - a.timestamp;
});
} else { // sort by older
results.sort(function (a, b) {
return a.timestamp - b.timestamp;
});
}
return resolve(results);
});
});
}//listLocalNextTick
This seems to avoid the original issue, but I cannot anymore map the files lists to the items with file handler I did before, because when running the AsyncArrayProcessor on the files list, thus the ProcessDirectoryEntry on each file entry the async nature of process.nextTick causes that I cannot get back the results array modified as in the previous listLocal function where I just did an iterative array.map of the results array.
How to patch the listLocalNextTick to behave like the listLocal but keeping process.nextTick approach?
[UPDATE]
According to the proposed solution, this is the best implementation so far:
/**
* Scan files in directory
* #param {String} needle
* #param {object} options
* #returns {nodeStream}
*/
scanDirStream : function(needle,params) {
var options = {
type: 'f',
name: '*'
};
for (var attrname in params) { options[attrname] = params[attrname]; }
return new Promise((resolve, reject) => {
var opt=[needle];
for (var k in options) {
var v = options[k];
if (!Util.empty(v)) {
opt.push('-' + k);
opt.push(v);
}
};
var data='';
var listing = spawn('find',opt)
listing.stdout.on('data', _data => {
var buff=Buffer.from(_data, 'utf-8').toString();
if(buff!='') data+=buff;
})
listing.stderr.on('data', error => {
return reject(Buffer.from(error, 'utf-8').toString());
});
listing.on('close', (code) => {
var res = data.split('\n');
return resolve(res);
});
});
Example of usage:
scanDirStream(mediaRoot,{
name: '*.mp3'
})
.then(results => {
console.info("files:%d", results);
})
.catch(error => {
console.error("error %s", error);
});
This can be eventually modified to add a tick callback at every stdout.on event emitted when getting a new file in the directory listening.
I have Created a wrapper around find for it but you could use dir or ls in the same way.
const { spawn } = require('child_process');
/**
* findNodeStream
* #param {String} dir
* #returns {nodeStream}
*/
const findNodeStream = (dir,options) => spawn('find',[dir,options].flat().filter(x=>x));
/**
* Usage Example:
let listing = findNodeStream('dir',[options])
listing.stdout.on('data', d=>console.log(d.toString()))
listing.stderr.on('data', d=>console.log(d.toString()))
listing.on('close', (code) => {
console.log(`child process exited with code ${code}`);
});
*/
this allows you to stream a directory chunked and not in a whole as fs.readdir does.
Important
NodeJS > 12.11.1 will have async readdir support
Landed in cbd8d71 ( https://github.com/nodejs/node/commit/cbd8d715b2286e5726e6988921f5c870cbf74127 ) as fs{Promises}.opendir(), which returns an fs.Dir, which exposes an async iterator. tada
https://nodejs.org/api/fs.html#fs_fspromises_opendir_path_options
const fs = require('fs');
async function print(path) {
const dir = await fs.promises.opendir(path);
for await (const dirent of dir) {
console.log(dirent.name);
}
}
print('./').catch(console.error);

How to access a specific folder to get file names within my extension

So i need to get names of files in my "db" folder, that i later need to use in my extension. I searched on how to do this and I can get all file names from my root extension folder but not from any other.
This is the code that gives back all file names from extension folder that i found from this question: How do I get a list of filenames in a subfolder of a Chrome extension?
chrome.runtime.getPackageDirectoryEntry(function(directoryEntry) {
var directoryReader = directoryEntry.createReader();
// List of DirectoryEntry and/or FileEntry objects.
var filenames = [];
(function readNext() {
directoryReader.readEntries(function(entries) {
if (entries.length) {
for (var i = 0; i < entries.length; ++i) {
filenames.push(entries[i].name);
}
readNext();
} else {
// No more entries, so all files in the directory are known.
// Do something, e.g. print all file names:
console.log(filenames);
}
});
})();
});
This is the code that is supposed to do what i want but i cant figure out how to implement it here
directoryEntry.getDirectory('_locales', {}, function(subDirectoryEntry) {
var directoryReader = subDirectoryEntry.createReader();
// etc.. same code as in previous snippet.
});
The code I needed was this.
chrome.runtime.getPackageDirectoryEntry(function(directoryEntry) {
directoryEntry.getDirectory('index', {}, function(subDirectoryEntry) {
var directoryReader = subDirectoryEntry.createReader();
// List of DirectoryEntry and/or FileEntry objects.
var filenames = [];
(function readNext() {
directoryReader.readEntries(function(entries) {
if (entries.length) {
for (var i = 0; i < entries.length; ++i) {
filenames.push(entries[i].name);
}
readNext();
} else {
// No more entries, so all files in the directory are known.
// Do something, e.g. print all file names:
console.log(filenames);
}
});
})();
});
});

Categories