Node fs.readdir freezing in folders with too many files - javascript

In Node.js I have to read files in a folder and for each file get file handler info, this is my simplest implementation using fs.readdir:
FileServer.prototype.listLocal = function (params) {
var self = this;
var options = {
limit: 100,
desc: 1
};
// override defaults
for (var attrname in params) { options[attrname] = params[attrname]; }
// media path is the media folder
var mediaDir = path.join(self._options.mediaDir, path.sep);
return new Promise((resolve, reject) => {
fs.readdir(mediaDir, (error, results) => {
if (error) {
self.logger.error("FileServer.list error:%s", error);
return reject(error);
} else { // list files
// cut to max files
results = results.slice(0, options.limit);
// filter default ext
results = results.filter(item => {
return (item.indexOf('.mp3') > -1);
});
// format meta data
results = results.map(file => {
var filePath = path.join(self._options.mediaDir, path.sep, file);
var item = {
name: file,
path: filePath
};
const fd = fs.openSync(filePath, 'r');
var fstat = fs.fstatSync(fd);
// file size in bytes
item.size = fstat.size;
item.sizehr = self.formatSizeUnits(fstat.size);
// "Birth Time" Time of file creation. Set once when the file is created.
item.birthtime = fstat.birthtime;
// "Modified Time" Time when file data last modified.
item.mtime = fstat.mtime;
// "Access Time" Time when file data last accessed.
item.atime = fstat.atime;
item.timestamp = new Date(item.mtime).getTime();
item.media_id = path.basename(filePath, '.mp3');
fs.closeSync(fd);//close file
return item;
});
if (options.desc) { // sort by most recent
results.sort(function (a, b) {
return b.timestamp - a.timestamp;
});
} else { // sort by older
results.sort(function (a, b) {
return a.timestamp - b.timestamp;
});
}
return resolve(results);
}
})
});
}
so that for each file I get an array of items
{
"name": "sample121.mp3",
"path": "/data/sample121.mp3",
"size": 5751405,
"sizehr": "5.4850 MB",
"birthtime": "2018-10-08T15:26:08.397Z",
"mtime": "2018-10-08T15:26:11.650Z",
"atime": "2018-10-10T09:01:48.534Z",
"timestamp": 1539012371650,
"media_id": "sample121"
}
That said, the problem is it's knonw that node.js fs.readdir may freeze Node I/O Loop when the folder to list has a large number of files, let's say from ten thousands to hundred thousands and more.
This is a known issue - see here for more info.
There are also plans to improve fs.readdir in a some way, like streaming - see here about this.
In the meanwhile I'm searching for like a patch to this, because my folders are pretty large.
Since the problem is the Event Loop get frozen, someone proposed a solution using process.nextTick, that I have ensembled here
FileServer.prototype.listLocalNextTick = function (params) {
var self = this;
var options = {
limit: 100,
desc: 1
};
// override defaults
for (var attrname in params) { options[attrname] = params[attrname]; }
// media path is the media folder
var mediaDir = path.join(self._options.mediaDir, path.sep);
return new Promise((resolve, reject) => {
var AsyncArrayProcessor = function (inArray, inEntryProcessingFunction) {
var elemNum = 0;
var arrLen = inArray.length;
var ArrayIterator = function () {
inEntryProcessingFunction(inArray[elemNum]);
elemNum++;
if (elemNum < arrLen) process.nextTick(ArrayIterator);
}
if (elemNum < arrLen) process.nextTick(ArrayIterator);
}
fs.readdir(mediaDir, function (error, results) {
if (error) {
self.logger.error("FileServer.list error:%s", error);
return reject(error);
}
// cut to max files
results = results.slice(0, options.limit);
// filter default ext
results = results.filter(item => {
return (item.indexOf('.mp3') > -1);
});
var ProcessDirectoryEntry = function (file) {
// This may be as complex as you may fit in a single event loop
var filePath = path.join(self._options.mediaDir, path.sep, file);
var item = {
name: file,
path: filePath
};
const fd = fs.openSync(filePath, 'r');
var fstat = fs.fstatSync(fd);
// file size in bytes
item.size = fstat.size;
item.sizehr = self.formatSizeUnits(fstat.size);
// "Birth Time" Time of file creation. Set once when the file is created.
item.birthtime = fstat.birthtime;
// "Modified Time" Time when file data last modified.
item.mtime = fstat.mtime;
// "Access Time" Time when file data last accessed.
item.atime = fstat.atime;
item.timestamp = new Date(item.mtime).getTime();
item.media_id = path.basename(filePath, '.mp3');
// map to file item
file = item;
}//ProcessDirectoryEntry
// LP: fs.readdir() callback is finished, event loop continues...
AsyncArrayProcessor(results, ProcessDirectoryEntry);
if (options.desc) { // sort by most recent
results.sort(function (a, b) {
return b.timestamp - a.timestamp;
});
} else { // sort by older
results.sort(function (a, b) {
return a.timestamp - b.timestamp;
});
}
return resolve(results);
});
});
}//listLocalNextTick
This seems to avoid the original issue, but I cannot anymore map the files lists to the items with file handler I did before, because when running the AsyncArrayProcessor on the files list, thus the ProcessDirectoryEntry on each file entry the async nature of process.nextTick causes that I cannot get back the results array modified as in the previous listLocal function where I just did an iterative array.map of the results array.
How to patch the listLocalNextTick to behave like the listLocal but keeping process.nextTick approach?
[UPDATE]
According to the proposed solution, this is the best implementation so far:
/**
* Scan files in directory
* #param {String} needle
* #param {object} options
* #returns {nodeStream}
*/
scanDirStream : function(needle,params) {
var options = {
type: 'f',
name: '*'
};
for (var attrname in params) { options[attrname] = params[attrname]; }
return new Promise((resolve, reject) => {
var opt=[needle];
for (var k in options) {
var v = options[k];
if (!Util.empty(v)) {
opt.push('-' + k);
opt.push(v);
}
};
var data='';
var listing = spawn('find',opt)
listing.stdout.on('data', _data => {
var buff=Buffer.from(_data, 'utf-8').toString();
if(buff!='') data+=buff;
})
listing.stderr.on('data', error => {
return reject(Buffer.from(error, 'utf-8').toString());
});
listing.on('close', (code) => {
var res = data.split('\n');
return resolve(res);
});
});
Example of usage:
scanDirStream(mediaRoot,{
name: '*.mp3'
})
.then(results => {
console.info("files:%d", results);
})
.catch(error => {
console.error("error %s", error);
});
This can be eventually modified to add a tick callback at every stdout.on event emitted when getting a new file in the directory listening.

I have Created a wrapper around find for it but you could use dir or ls in the same way.
const { spawn } = require('child_process');
/**
* findNodeStream
* #param {String} dir
* #returns {nodeStream}
*/
const findNodeStream = (dir,options) => spawn('find',[dir,options].flat().filter(x=>x));
/**
* Usage Example:
let listing = findNodeStream('dir',[options])
listing.stdout.on('data', d=>console.log(d.toString()))
listing.stderr.on('data', d=>console.log(d.toString()))
listing.on('close', (code) => {
console.log(`child process exited with code ${code}`);
});
*/
this allows you to stream a directory chunked and not in a whole as fs.readdir does.
Important
NodeJS > 12.11.1 will have async readdir support
Landed in cbd8d71 ( https://github.com/nodejs/node/commit/cbd8d715b2286e5726e6988921f5c870cbf74127 ) as fs{Promises}.opendir(), which returns an fs.Dir, which exposes an async iterator. tada
https://nodejs.org/api/fs.html#fs_fspromises_opendir_path_options
const fs = require('fs');
async function print(path) {
const dir = await fs.promises.opendir(path);
for await (const dirent of dir) {
console.log(dirent.name);
}
}
print('./').catch(console.error);

Related

Nodejs: merge join mulitple files

consider this scenario:
I have 2 csv files, each one is sorted and contains the id filed.
I need to join the rows using the id field. Because the files are already sorted by the id I wanted to perform merge join (https://en.wikipedia.org/wiki/Sort-merge_join).
For that I need to have a way to load some portion of both files, process it and iteratively load more again from one or both files.
(The files are big and would not fit into memory so only streaming approach will work).
The problem is the Node API, what to use? readline will not work because of https://github.com/nodejs/node/issues/33463. Any other ideas?
I had to do something quite similar recently and decided to use the node-line-reader module that has a simpler interface than the built-in readline. I then created a little recursive function that determines which file to read from next by comparing the id of each csv-entry of each provided file. After that the corresponding line gets written out to the target file, and the method is called again until all lines of all files are processed. Here's the whole class I ended up with:
const fs = require('fs');
const LineReader = require('node-line-reader').LineReader;
class OrderedCsvFileMerger {
constructor(files, targetFile) {
this.lineBuffer = [];
this.initReaders(files);
this.initWriter(targetFile);
}
initReaders(files) {
this.readers = files.map(file => new LineReader(file));
}
initWriter(targetFile) {
this.writer = fs.createWriteStream(targetFile);
}
async mergeFiles() {
// initially read first line from all files
for (const reader of this.readers) {
this.lineBuffer.push(await this.nextLine(reader));
}
return this.merge();
}
async nextLine(reader) {
return new Promise((resolve, reject) => {
reader.nextLine(function (err, line) {
if (err) reject(err);
resolve(line);
});
})
}
async merge() {
if (this.allLinesProcessed()) {
return;
}
let currentBufferIndex = -1;
let minRowId = Number.MAX_VALUE;
for (let i = 0; i < this.lineBuffer.length; i++) {
const currentRowId = parseInt(this.lineBuffer[i]); // implement parsing logic if your lines do not start
// with an integer id
if (currentRowId < minRowId) {
minRowId = currentRowId;
currentBufferIndex = i;
}
}
const line = this.lineBuffer[currentBufferIndex];
this.writer.write(line + "\n");
this.lineBuffer[currentBufferIndex] = await this.nextLine(this.readers[currentBufferIndex]);
return this.merge();
}
allLinesProcessed() {
return this.lineBuffer.every(l => !l);
}
}
(async () => {
const input = ['./path/to/csv1.csv', './path/to/csv2.csv'];
const target = './path/to/target.csv';
const merger = new OrderedCsvFileMerger(files, output);
await merger.mergeFiles();
console.log("Files were merged successfully!")
})().catch(err => {
console.log(err);
});

Upload multiple images in a angular and firebase project, It is not working in sequence

async onSubmit(formValue) {
this.isSubmitted = true;
if(this.selectedImageArray.length > 0) { // 4 images in this array
for (let index = 0; index < this.selectedImageArray.length; index++) { // Loop through this image array
await new Promise(resolve => {
setTimeout(()=> {
console.log('This is iteration ' + index);
var filePath = `images/tours/${this.selectedImageArray[index].name.split('.').slice(0,-1).join('.')}_${new Date(). getTime()}`;
const fileRef = this.storage.ref(filePath);
this.storage.upload(filePath, this.selectedImageArray[index]).snapshotChanges().pipe(
finalize(() => {
fileRef.getDownloadURL().subscribe((url) => {
formValue[`imageUrl${index+1}`] = url;
console.log(url);
});
})
).subscribe()
resolve();
}, 3000);
});
}
console.log('After loop execution');
// this.value(formValue);
}
}
After submitting the code it will download and print 3 urls and then it print 'after loop execution' then it print 4th one I don't understand why. See here in console
see in the image line no of code execution.
What I want to execute code in sequence after all images download then after it will go out of loop.
I wrote another version of this that hopefully works as you expect it to.
First we create an array of all the storage upload snapshot observables.
The we use concat() to run them all in sequence. (If you change from concat() to merge() they will all go at once)
The we use mergeMap to jump over to the getDownloadURL
Then in the subscribe we add the url to the formValues
Finally in the finalize we set the class propery "value" equal to the formValue.
onSubmit(formValue) {
const snapshotObservables = this.selectedImageArray.map(selectedImage => { // 4 images in this array
const filePath = `images/tours/${selectedImage.name.split('.').slice(0, -1).join('.')}_${new Date(). getTime()}`;
return combineLatest(this.storage.upload(filePath, selectedImage).snapshotChanges(), filePath);
});
concat(...snapshotObservables).pipe(
mergeMap(([snapshot, filePath]) => {
const fileRef = this.storage.ref(filePath);
return fileRef.getDownloadURL();
}),
finalize(() => {
this.value(formValue);
})
).subscribe(url => {
formValue[`imageUrl${index+1}`] = url;
});
}
I wrote a new function for multiple file upload
public multipleFileUpload(event, isEncodeNeeded?: Boolean):Array<any> {
if(!isEncodeNeeded){
isEncodeNeeded=false;
}
let fileList = [];
for (let index = 0; index < event.target.files.length; index++) {
let returnData = {};
let file: File = event.target.files[index];
let myReader: FileReader = new FileReader();
returnData['documentName'] = event.target.files[index]['name'];
returnData['documentType'] = event.target.files[index]['type'];
myReader.addEventListener("load", function (e) {
if (myReader.readyState == 2) {
returnData['document'] = isEncodeNeeded ? btoa(e.target['result']) : e.target['result'];
}
});
myReader.readAsBinaryString(file);
fileList.push(returnData);
}
return fileList;
}
In this function event is the event of the input and the isEncodeNeeded is conversion is needed. If this is true then it convert to base64 format.
The output format is
[{
"document": documentbyte,
"documentName": document name,
"documentType": file format
}]

How to use Promise with yield in nodejs router?

I tried to configure backend factory to obtain data using Amazon Products Api.
Here is the script I execute in nodejs:
var amazon = require('amazon-product-api');
var client = amazon.createClient({
awsId: 'ID',
awsSecret: 'sectret',
awsTag: 'tag',
});
// SERVER
// var Promise = require('bluebird');
var koa = require('koa');
var router = require('koa-router')();
router.get('/node/:index', function* () {
this.body = yield client.browseNodeLookup({
browseNodeId: this.params.index,
});
});
router.get('/', function* (ctx, next) {
var node = yield client.browseNodeLookup({
browseNodeId: 1063498,
});
this.body = node;
});
var app = koa();
app.use(router.routes()).use(router.allowedMethods());
app.listen(8005);
At the frontend I use Promise.map() by bluebird.js to map an array of amazon's product nodes. At the final of the function, I expect to transform links (strings) in the Array to objects (obtained by API).
Here is the function:
someFn(links) { // links is an array of node IDs
return Promise.map(links, (link) => {
var link = link;
if (typeof link === "object" || level > 1) {
return link;
} else {
return loadUrl({
url: 'http://localhost:8005/node/'+link,
action: 'json',
}).then((res) => {
if (isJSON(res)) {
return new Category(res); // transform string to object
} else {
return link;
}
});
}
});
}
Amazon allows at max 10 queries, that's why I need to run the function a few times or loop it to obtain an object for the each string in the Array.
The idea is to wait for the successful answer at the backend or repeat the query (yield client.browseNodeLookup)
Or just pass the array of node IDs and get JSON of each as the result.
I have not a lot experience with nodejs server and routing configuration, so can you help me to configure it properly?
I still did not find the solution to use backend for the task, but I've updated a loadUrl function. The function runs itself until retrieved the successful answer:
function loadUrl(options) {
var options = options;
return new Promise((resolve, reject) => {
$.post('/', options).done((result) => {
if (result != 'Internal Server Error') {
resolve(result);
} else {
(function run(x) {
var x = x || 0;
// 300 - calls limit
// for me 100 is enough
if (x <= 300) {
setTimeout(function() {
$.post('/', options).done((result) => {
if (result != 'Internal Server Error') {
resolve(result);
} else {
console.log('call', x);
run(x+1);
}
});
}, 100);
} else {
resolve(result);
}
})();
}
});
});
}
As I understood, the $.post() has time limit for the answer, and thats why I've got the problem. The data is obtained by the backend factory, but my frontend script was not ready to wait for it and was stopped.

How can I chain these functions together with promises?

This is a program that scrapes the data out of a tshirt website and then writes the product info to a CSV file.
There are 3 scrape functions and 1 write function.
Right now, I am having an absolute nightmare trying to get my head around how to implement promises here without any 3rd party libraries or packages. Is this possible with just the native features of ES6?
Due to the async nature of the requests, I need each function and its requests to finish completely before the next one is called. This is so I can use the variables such as urlSet in the next function.
How can I do this simply without rewriting my whole code?
I should mention that each of these functions work on an individual basis, they've all been tested several times.
Does each function become an individual promise?
Code is below, thank you:
//TASK: Create a command line application that goes to an ecommerce site to get the latest prices.
//Save the scraped data in a spreadsheet (CSV format).
'use strict';
//Modules being used:
var cheerio = require('cheerio');
var json2csv = require('json2csv');
var request = require('request');
var moment = require('moment');
var fs = require('fs');
//harcoded url
var url = 'http://shirts4mike.com/';
//url for tshirt pages
var urlSet = new Set();
var remainder;
var tshirtArray = [];
// First scrape loads front page of shirts4mike and finds the first product pages/menus
function firstScrape(){
request(url, function(error, response, html) {
if(!error && response.statusCode == 200){
var $ = cheerio.load(html);
//iterate over links with 'shirt'
$('a[href*=shirt]').each(function(){
var a = $(this).attr('href');
//create new link
var scrapeLink = url + a;
//for each new link, go in and find out if there is a submit button.
//If there, add it to the set
request(scrapeLink, function(error,response, html){
if(!error && response.statusCode == 200) {
var $ = cheerio.load(html);
//if page has a submit it must be a product page
if($('[type=submit]').length !== 0){
//add page to set
urlSet.add(scrapeLink);
} else if(remainder == undefined) {
//if not a product page, add it to remainder so it another scrape can be performed.
remainder = scrapeLink;
}
}
});
});
}
});
}
//Scrape next level of menus to find remaning product pages to add to urlSet
function secondScrape() {
request(remainder, function(error, response, html) {
if(!error && response.statusCode == 200){
var $ = cheerio.load(html);
$('a[href*=shirt]').each(function(){
var a = $(this).attr('href');
//create new link
var scrapeLink = url + a;
request(scrapeLink, function(error,response, html){
if(!error && response.statusCode == 200){
var $ = cheerio.load(html);
//collect remaining product pages and add to set
if($('[type=submit]').length !== 0){
urlSet.add(scrapeLink);
}
}
});
});
}
});
}
//call lastScraper so we can grab data from the set (product pages)
function lastScraper(){
//scrape set, product pages
for(var item of urlSet){
var url = item;
request(url, function(error, response, html){
if(!error && response.statusCode == 200){
var $ = cheerio.load(html);
//grab data and store as variables
var price = $('.price').text();
var imgURL = $('.shirt-picture').find('img').attr('src');
var title = $('body').find('.shirt-details > h1').text().slice(4);
var tshirtObject = {};
//add values into tshirt object
tshirtObject.Title = title;
tshirtObject.Price = price;
tshirtObject.ImageURL = imgURL;
tshirtObject.URL = url;
tshirtObject.Date = moment().format('MMMM Do YYYY, h:mm:ss a');
//add the object into the array of tshirts
tshirtArray.push(tshirtObject);
}
});
}
}
//Convert array of tshirt objects and write to CSV file
function convertJson2Csv(){
//The scraper should generate a folder called `data` if it doesn’t exist.
var dir ='./data';
if(!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
var fields = ['Title', 'Price', 'ImageURL', 'URL', 'Date'];
//convert tshirt data into CSV and pass in fields
var csv = json2csv({ data: tshirtArray, fields: fields });
//Name of file will be the date
var fileDate = moment().format('MM-DD-YY');
var fileName = dir + '/' + fileDate + '.csv';
//Write file
fs.writeFile(fileName, csv, {overwrite: true}, function(err) {
console.log('file saved');
if (err) throw err;
});
}
If you want to chain those functions with promises, then they have to return promises.
If you want to chain them with async module, then they have to take callbacks as arguments.
Right now they neither return a promise (or anything else), nor do they take callbacks (or anything else) as arguments. If the function doesn't take a callback and doesn't return anything then all you can do is call it and that's it. You will not be notified of any result.
Example
Callbacks
If you have 3 functions that take callbacks:
function fun1(cb) {
setTimeout(() => {
cb(null, "fun1");
}, 1000);
}
function fun2(cb) {
setTimeout(() => {
cb(null, "fun2");
}, 3000);
}
function fun3(cb) {
setTimeout(() => {
cb(null, "fun3");
}, 100);
}
Then you can know when they finish:
fun3((err, value) => {
console.log('fun3 finished:', value);
});
And you can easily wait for one before you start the other:
fun1((err1, val1) => {
fun2((err2, val2) => {
console.log("fun1 + fun2:", val1, val2);
});
});
Promises
If your functions return promises:
function fun1() {
return new Promise((res, rej) => {
setTimeout(() => {
res("fun1");
}, 1000);
});
}
function fun2() {
return new Promise((res, rej) => {
setTimeout(() => {
res("fun2");
}, 3000);
});
}
function fun3() {
return new Promise((res, rej) => {
setTimeout(() => {
res("fun3");
}, 100);
});
}
Then you can also know when they finish:
fun3().then(value => {
console.log('fun3 finished:', value);
});
You can also easily nest the calls:
fun1().then(val1 => {
fun2().then(val2 => {
console.log("fun1 + fun2:", val1, val2);
});
});
Or:
fun1()
.then(val1 => fun2())
.then(val2 => fun3())
.then(val3 => console.log('All 3 finished in series'));
etc.
To be able to do much more with both style, see documentation for:
http://caolan.github.io/async/
http://bluebirdjs.com/docs/getting-started.html
https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Promise

Javascript file dropping and reading directories - Asynchronous Recursion

So I'm trying to create a file dropper web application. Right now, a user can drop files on the screen and I can read them, including all the files in a directory that was dropped. But, I don't know when the script is done reading the files.
Some code:
This first function handles a 'drop' event and will loop through each file and send it to another function that will read its contents.
function readDrop( evt )
{
for( var i = 0; i < evt.dataTransfer.files.length; i++)
{
var entry = evt.dataTransfer.items[i].webkitGetAsEntry();
if(entry)
readContents(entry, "");
}
//Do stuff after all files and directories have been read.
}
This function is a recursive FileEntry reader. If it is a file, I will read the FileEntry. If it is a directory, it will loop through the contents and pass it through this function.
function readContents(entry, path)
{
if( entry.isFile )
{
readFileData( entry, path, function(fileData)
{
_MyFiles.push( fileData );
});
}
else if( entry.isDirectory )
{
var directoryReader = entry.createReader();
var path = path + entry.name;
directoryReader.readEntries(function(results)
{
for( var j = 0; j < results.length; j++ )
{
readContents(entry, path);
}
}, errorHandler)
}
}
And here is my function for reading the files. The callback just pushes the fileData object to a global array
function readFileData(entry, path, callback)
{
var fileData = {"name": entry.name, "size": 0, "path": path, "file": entry};
entry.file(function(file)
{
fileData["size"] = file.size;
callback( fileData );
}
}
I'm not sure where to go from here so that I can have a callback when all files and directories have been read.
The FileSystem API doesn't seem well suited for the task of a full recursive traversal, perhaps that's part of the reason why other vendors are not adopting it. Anyway, with an arcane combination of Promises I think I was able to accomplish this goal:
function traverse_directory(entry) {
let reader = entry.createReader();
// Resolved when the entire directory is traversed
return new Promise((resolve_directory) => {
var iteration_attempts = [];
(function read_entries() {
// According to the FileSystem API spec, readEntries() must be called until
// it calls the callback with an empty array. Seriously??
reader.readEntries((entries) => {
if (!entries.length) {
// Done iterating this particular directory
resolve_directory(Promise.all(iteration_attempts));
} else {
// Add a list of promises for each directory entry. If the entry is itself
// a directory, then that promise won't resolve until it is fully traversed.
iteration_attempts.push(Promise.all(entries.map((entry) => {
if (entry.isFile) {
// DO SOMETHING WITH FILES
return entry;
} else {
// DO SOMETHING WITH DIRECTORIES
return traverse_directory(entry);
}
})));
// Try calling readEntries() again for the same dir, according to spec
read_entries();
}
}, errorHandler );
})();
});
}
traverse_directory(my_directory_entry).then(()=> {
// AT THIS POINT THE DIRECTORY SHOULD BE FULLY TRAVERSED.
});
Following on from the answer by drarmstr, I modified the function to be compliant with the Airbnb ESLint standards, and wanted to make some further comments on its usage and results
Here's the new function:
function traverseDirectory(entry) {
const reader = entry.createReader();
// Resolved when the entire directory is traversed
return new Promise((resolve, reject) => {
const iterationAttempts = [];
function readEntries() {
// According to the FileSystem API spec, readEntries() must be called until
// it calls the callback with an empty array. Seriously??
reader.readEntries((entries) => {
if (!entries.length) {
// Done iterating this particular directory
resolve(Promise.all(iterationAttempts));
} else {
// Add a list of promises for each directory entry. If the entry is itself
// a directory, then that promise won't resolve until it is fully traversed.
iterationAttempts.push(Promise.all(entries.map((ientry) => {
if (ientry.isFile) {
// DO SOMETHING WITH FILES
return ientry;
}
// DO SOMETHING WITH DIRECTORIES
return traverseDirectory(ientry);
})));
// Try calling readEntries() again for the same dir, according to spec
readEntries();
}
}, error => reject(error));
}
readEntries();
});
}
here's a drop event handler:
function dropHandler(evt) {
evt.preventDefault();
const data = evt.dataTransfer.items;
for (let i = 0; i < data.length; i += 1) {
const item = data[i];
const entry = item.webkitGetAsEntry();
traverseDirectory(entry).then(result => console.log(result));
}
}
The result variable at the end contains an array, mirroring the tree structure of the folder you dragged and dropped.
For example, here is a git repo for my own site, ran through the above code:
Here's the Git repo for comparison https://github.com/tomjn/tomjn.com

Categories