JavaScript how to make forEach loop synchronous - javascript

on my project I need to unzip a file and upload its content to a nodeJS server. I am able to unzip the file, e.g. I get the filenames of the content etc.
After unzipping I want to upload the content to my server. This is where the problem is. I am using formData to transmit the file. But every time I try to upload the files it shows me following error on the console in firefox:
"TypeError: Argument 2 of FormData.append does not implement interface Blob."
So it seems that there is no file-object given to the formData...
For unzipping I use JSZip [link to JSZip][1]
EDIT
I figured the first problem out myself. Now I am having trouble executing the code in the right order. My JS Code:
$("#file").on("change", function (evt) {
var formData = new FormData();
var counter = 0;
// remove content
$result.html("");
// be sure to show the results
$("#result_block").removeClass("hidden").addClass("show");
// Closure to capture the file information.
console.log('1');
function handleFile(f) {
var $fileContent = $("<ul>");
$result.append($fileContent);
JSZip.loadAsync(f)
.then(function (zip) {
zip.forEach(function (relativePath, zipEntry) {
console.log('2');
console.log(zipEntry);
zip.file(zipEntry.name).async("blob").then(function (u8) {
console.log('3');
console.log(u8);
formData.append('photo', u8, counter);
counter++;
$fileContent.append($("<li>", {
text: zipEntry.name
}));
})
});
}, function (e) {
$result.append($("<div>", {
"class": "alert alert-danger",
text: "Error reading " + f.name + ": " + e.message
}));
});
}
var files = evt.target.files;
handleFile(files[0]);
});
The console output is:
1
2
object
2
object
3
blob
3
blob
But what I want is:
1
2
object
3
blob
2
object
3
blob
But i dont know how to put another .then to make this work the way I want to

Related

Flushing file's with webkit's filesystem API with Safari

I am trying to use filesystem api to create permanent files, write and read data from them.
Although I succeeded creating the file and writing data to it, after calling flush() the file becomes empty (and it's size is 0).
The files that I created exist and I can still see them in a different running of safari, but the data is lost and the file's are all 0 sized.
Even if I try to read the file just after writing to it and flushing, the data is lost and it's size returns to 0.
Does anybody know what I am doing wrong?
I tried running this:
console.log("Starting");
async function files() {
// Set a Message
const message = "Thank you for reading this.";
const fileName = "Draft.txt";
// Get handle to draft file
const root = await navigator.storage.getDirectory();
const draftHandle = await root.getFileHandle(fileName, { create: true });
console.log("File Name: ", fileName);
// Get sync access handle
const accessHandle = await draftHandle.createSyncAccessHandle();
// Get size of the file.
const fileSize = await accessHandle.getSize();
console.log("File Size: ", fileSize); // getting 0 here
// Read file content to a buffer.
const buffer = new DataView(new ArrayBuffer(fileSize));
const readBuffer = accessHandle.read(buffer, { at: 0 });
console.log("Read: ", readBuffer); // getting 0 here because the file was just created
// Write the message to the file.
const encoder = new TextEncoder();
const encodedMessage = encoder.encode(message);
const writeBuffer = accessHandle.write(encodedMessage, { at: readBuffer });
console.log("Write: ", writeBuffer); // writing 27 bytes here, succeeding
// Persist changes to disk.
accessHandle.flush();
// Always close FileSystemSyncAccessHandle if done.
accessHandle.close();
console.log("Closed file ");
// Find files under root/ and print their details.
for await (const handle of root.values()) {
console.log('Item in root: ', handle.name);
if (handle.kind == "file") {
let f = await handle.getFile();
console.log("File Details: ", f); // I can see here the file I created now and earlier created files, but all of them are 0 sized
}
}
}
files();

nodejs download multiple files from web

I am trying to download multiple files from a OneDrive folder. Below has my code but it will only download the last file and not all of them
for(const f in files)
{
var fileURL = (files[f]["#microsoft.graph.downloadUrl"]);
var fileName = (JSON.stringify(files[f].name)).slice(1,-1);
var request = https.get(fileURL, function(response){
console.log(fileURL);
if(response.statusCode == 200){
var file = fs.createWriteStream(`./temp/${userId}/${fileName}`);
response.pipe(file);
}
request.setTimeout(60000,function(){
request.destroy();
});
});
}
i.e the console log would print
FILE_URL1
FILE_URL1
FILE_URL1
rather than
FILE_URL1
FILE_URL2
FILE_URL3
Note that if the console.log(fileURL) is placed before var request https.get... it prints out the 3 file urls. I'm not sure if its a problem with the loops or if there is something else. I am quite new at javascript so I dont know a lot.
Replace the var with const or let you will see the different result
Description: What's the difference between using "let" and "var"?

Node.js Stream :- Streams functions are not invoking for second time for same file

SFTB, i have a POST request coming to my server with multipart/form-data, now from that request i want to get contents of file.
I am reading the file supplied through streams and the piping that to cvsParser which is just an instance of csv-streamify after doing that we are passing the content to custom Transform function that fetch's the resource using http[ I am using got for that] and after fetching it i am compressing the image.
Now issue is, when i submit the file for first time it works like charm but, when i am trying to submit same file second time it skips the whole stream part and directly jumps to finish event handler.
Logs for first time :-
Submitting
Converting image at C:\image-minifier-sqd\build\src\1469004088476.bell.svg
Build is present at build\dest\1469004088476.bell.svg
Converting image at C:\image-minifier-sqd\build\src\1469004088996.mail.svg
Build is present at build\dest\1469004088996.mail.svg
Finished
Logs when i submit same file second time[Both with and without refresh on front-end]
Submitting
Finished
FYI, on front-end i am using fetch API to make POST request.
My Server code :-
function createParser() {
var parser = new Transform({objectMode: true});
parser._transform = function(data, encoding, done) {
const date = new Date();
const link = data.toString().slice(2,-3);
const fileName = date.getTime()+ '.' +link.split( '/' ).pop(),
filePath = path.resolve(__dirname,`build/src/${fileName}`);
got.stream(link)
.pipe(fs.createWriteStream(filePath,{flags:'a'}))
.on('close',_ => {
console.log(`Converting image at ${filePath}`)
//Compressing images
imagemin([filePath],'build/dest',{
plugins: [
imageminMozjpeg(),
imageminPngquant({speed: 10}),
imageminSvgo(),
imageminGifsicle()
]
})
.then(file => {
console.log(`Build is present at ${file[0].path}`);
this.push(file[0].path);
done();
});
});
};
return parser;
}
//A request comes here with multipart/form-data
app.post('/submit/csv',upload.array('data'),(req, res) => {
console.log('Submitting')
const stream = fs.createReadStream(path.resolve(__dirname,req.files[0].path))
.pipe(csvParser)
.pipe(createParser())
.pipe(res)
.on('finish',_ => {
log('Finished');
res.end();
});
});
Thanks.
I think the problem is related with the reuse of the csvParser. Try to wrap the creation of the csvParser in a function an use it instead:
function createCsvParser() {
const parser = csv();
parser.on('data', function (line) {
[...]
});
return parser;
}
and change .pipe(csvParser) into .pipe(createCsvParser()).
Hope this helps.

Meteor CollectionFS: Make sure the image is uploaded before displaying

I am using the CollectionFS package with the S3 adapter and I've looked at a few different solutions but cannot get this to work right.
The problem: Even though the file/image is being uploaded to S3 successfully, the callback for a successful upload is triggered before it is safe to display the image. This causes a broken image to be displayed sometimes.
I found out about the fileObj.once("uploaded", function(){}) callback but it seems "uploaded" basically means sending the image to the server. The S3 uploading does not happen by then. A temporary workaround I found is to just to just have setTimeout for 3-4 seconds but this is not reliable.
Here is my upload code:
FS.Utility.eachFile(event, function(file) {
Session.set('profilePhotoUploaded', false);
var newFile = new FS.File(file);
newFile.metadata = {owner: Meteor.userId()};
ProfileImages.insert(newFile, function (err, fileObj) {
if (err){
console.log("error! - " + err);
} else {
// handle success depending what you need to do
var userId = Meteor.userId();
// This does NOT run when image is stored in S3. I think it runs when the image reached the app server.
fileObj.once("uploaded", function () {
// timeout of 3 seconds to make sure image is ready to be displayed
// --- This is not a good solution and it image does is not always ready
setTimeout(function(){
var uploadedImage = {
"profile.image.url": "/cfs/files/profileImages/" + fileObj._id
};
Meteor.users.update(userId, {$set: uploadedImage});
Session.set('profilePhotoUploaded', true);
}, 3000);
console.log("Done uploading!");
});
}
});
});
Is there a different callback to check if the image has actually been stored in S3? I tried fileObj.once("stored", function(){}) but that does not work.
The issue is that the stored hook will fire when the original image is saved on the server, so if you're creating multiple copies (thumbnails) this hook will fire before your thumbnails are stored. You can check which version of the thumbnail was stored by checking the storeName argument. In the server side file, where you define the ProfileImages collection add the following code, replacing 'profilePhotoLarge' with the name assigned to your FS.Store.S3 store:
ProfileImages.on('stored', Meteor.bindEnvironment(function(fileObj, storeName) {
if (storeName === 'profilePhotoLarge') {
Meteor.users.update({_id: fileObj.metadata.owner}, {
$set: {
'profile.image.url': 'https://your AWS region domain/your bucket name/your folder path/' + fileObj._id + '-' +fileObj.name()
}
});
}
}, function() { console.log('Failed to bind environment'); }));
For profile photos I created an S3 bucket and set the permissions to allow anyone to read the files, so I'm storing the URL to the image on S3, which may not be correct in your case. Since the user object is reactive on the client side this update will cause the profile photo to update automatically.
I found that fileObj.hasStored("profileImages") specifies exactly when the image is stored on S3. So after starting the upload process, I just start a timer every 1 second to check if it is saved. This might not be the best solution but this is what worked for me.
FS.Utility.eachFile(event, function(file) {
Session.set('profilePhotoUploaded', false);
var newFile = new FS.File(file);
newFile.metadata = {owner: Meteor.userId()}; // TODO: check in deny that id is of the same user
ProfileImages.insert(newFile, function (err, fileObj) {
if (err){
console.log("error! - " + err);
} else {
// handle success depending what you need to do
var userId = Meteor.userId();
// Timer every 1 second
var intervalHandle = Meteor.setInterval(function () {
console.log("Inside interval");
if (fileObj.hasStored("profileImages")) {
// File has been uploaded and stored. Can safely display it on the page.
var uploadedImage = {
"profile.image.url": "/cfs/files/profileImages/" + fileObj._id
};
Meteor.users.update(userId, {$set: uploadedImage});
Session.set('profilePhotoUploaded', true);
// file has stored, close out interval
Meteor.clearInterval(intervalHandle);
}
}, 1000);
}
});
});

How to Zip files using jszip library

Am working on an offline application using HTML5 and jquery for mobile. i want to back up files from the local storage using jszip. below is a code snippet of what i have done...
if (localStorageKeys.length > 0) {
for (var i = 0; i < localStorageKeys.length; i++) {
var key = localStorageKeys[i];
if (key.search(_instrumentId) != -1) {
var data = localStorage.getItem(localStorageKeys[i])
var zip = new JSZip();
zip.file(localStorageKeys[i] + ".txt", data);
var datafile = document.getElementById('backupData');
datafile.download = "DataFiles.zip";
datafile.href = window.URL.createObjectURL(zip.generate({ type: "blob" }));
}
else {
}
}
}
in the code above am looping through the localstorage content and saving ezch file in a text format. the challenge that am facing is how to create several text files inside DataFiles.zip as currently am only able to create one text file inside the zipped folder. Am new to javascript so bare with any ambiguity in my question.
thanks in advance.
Just keep calling zip.file().
Look at the example from their documentation page (comments mine):
var zip = new JSZip();
// Add a text file with the contents "Hello World\n"
zip.file("Hello.txt", "Hello World\n");
// Add a another text file with the contents "Goodbye, cruel world\n"
zip.file("Goodbye.txt", "Goodbye, cruel world\n");
// Add a folder named "images"
var img = zip.folder("images");
// Add a file named "smile.gif" to that folder, from some Base64 data
img.file("smile.gif", imgData, {base64: true});
zip.generateAsync({type:"base64"}).then(function (content) {
location.href="data:application/zip;base64," + content;
});
The important thing is to understand the code you've written - learn what each line does. If you do this, you'd realize that you just need to call zip.file() again to add another file.
Adding to #Jonathon Reinhart answer,
You could also set both file name and path at the same time
// create a file and a folder
zip.file("nested/hello.txt", "Hello World\n");
// same as
zip.folder("nested").file("hello.txt", "Hello World\n");
If you receive a list of files ( from ui or array or whatever ) you can make a compress before and then archive. The code is something like this:
function upload(files){
var zip = new JSZip();
let archive = zip.folder("test");
files.map(function(file){
files.file(file.name, file.raw, {base64: true});
}.bind(this));
return archive.generateAsync({
type: "blob",
compression: "DEFLATE",
compressionOptions: {
level: 6
}
}).then(function(content){
// send to server or whatever operation
});
}
this worked for me at multiple json files. Maybe it helps.
In case you want to zip files and need a base64 output, you can use the below code-
import * as JSZip from 'jszip'
var zip = new JSZip();
zip.file("Hello.json", this.fileContent);
zip.generateAsync({ type: "base64" }).then(function (content) {
const base64Data = content

Categories