I am attempting to convert the following function to be async in React (using CompressorJS):
const UploadImages = ({ jobInfo, setLoading, getData, accountInfo }) => {
const [files, setFiles] = useState([]);
const onDrop = useCallback( uploadedFiles => {
uploadedFiles.forEach((file) => {
console.log(file)
new Compressor(file, {
quality: 0.5,
width: 500,
height: 500,
resize: "contain",function.
success(result) {
setFiles([...files, result]);
console.log(result)
},
error(err) {
console.log(err.message);
},
});
})
}, [files]);
return(<a bunch of stuff here>);
}
When using the snippet above, if uploadedFiles contains more than one file, there becomes an issue with compressing each photo and adding it to the state ([files, setFiles]). Only one of the images will be added and the others will be missed.
I have spent a number hours attempting to rework this function in different ways and the best solution I have come up with so far has been the following:
function compressFile(file) {
return new Promise((resolve, reject) => {
new Compressor(file, {
quality: 0.5,
width: 500,
height: 500,
resize: "contain",function.
success(result) {
resolve(result)
},
error(err) {
console.log(err.message);
reject(err)
},
});
});
}
function compressFiles(files) {
return new Promise((resolve, reject) => {
files.forEach((file) => {
console.log(file)
compressFile(file)
.then(compFile => {
console.log(compFile)
setFiles([...files, compFile]);
})
})
resolve()
})
}
const onDrop = useCallback( async acceptedFiles => {
message.loading('Compressing Images...');
compressFiles(acceptedFiles)
}, [files]);
Unfortunately, this still does not work and I was wondering if someone could explain where I am going wrong.
This should help:
const compressFiles = files => Promise.all(
files.map(file => compressFile(file))
)
.then(compressedFiles => {
console.log(compressedFiles);
setFiles([...files, ...compressedFiles]);
});
setFiles([...files, compFile]);
Every time this line runs, it is going to create an array starting with what's in files, and adding one new file. This will wipe out any other files that have been created since you started, because the files variable only stores what you started with, not any changes you've made. So you will need to use the function version of setFiles, to make sure you always have the latest version of the state:
setFiles(prev => [...prev, compFile]);
A couple other recommendations. First, i'd suggest that you don't set state multiple times in an async loop. This will result in the files being in an unpredictable order and will cause the component to render each step along the way. Instead, you can wait until all the compression is done, and then set state once at the end. To wait for an array of promises, you can use Promise.all. Secondly, since you've already made compressFile return a promise, you do not need to use the new Promise constructor again in compressFiles. Putting these together i recommend:
function compressFiles(files) {
const promises = files.map(file => compressFile(file));
Promise.all(promises).then(compressedFiles => {
setFiles(prev => [...prev, ...compressedFiles]);
});
}
Or using async/await:
async function compressFiles(files) {
const promises = files.map(file => compressFile(file));
const compressedFiles = await Promise.all(promises);
setFiles(prev => [...prev, ...compressedFiles]);
}
Related
I'm working on a microcontroller that would either take docx files or html strings in input and would transform it into a singular pdf file and return its link as an ouput.
My code looks like this so far:
// 'files' is an array of uploaded docx files.
const uploaded = files.map((file) => {
return new Promise((resolve, reject) => {
pump(
file.toBuffer(),
fs.createWriteStream(join(__dirname, 'files', file.filename))
.on('finish', resolve)
)
})
})
Promise.all(uploaded)
// Is triggered
.then(async () => await convertFiles())
// Is not triggered
.then(async () => {
// concatStoreFiles() is an external function because I need it somewhere else too
test = await concatStoreFiles(join(__dirname, 'files'))
console.log({test})
res.send(test)
})
const convertFiles = () => {
return new Promise((resolve, reject) => {
const cmd = `soffice --headless --convert-to pdf --outdir ${join(__dirname, 'files')} ${join(__dirname, 'files', '*.*')}`
exec(cmd, (error, stdout, stderror) => {
if (error) console.warn(error)
resolve(stdout ?? stderror)
})
})
}
concatStoreFile.js
module.exports = async function concatFiles (dirPath, outPath) {
return new Promise ((resolve, reject) => {
const existingFiles = []
fs.readdir(dirPath, (e, files) => {
files.forEach((file) => {
// is added to the files list only if finishing with ".pdf"
if (/[\d\w_-]+.pdf/.matches(file)) {
existingFiles.push(file)
}
});
resolve(existingFiles)
})
})
}
I'm working with Insomnia for my development / test process, and it tells me that I get an empty response. However, I'm supposed to get an array of pdf files existing in a specific directory. I'm not even getting console.log({test}), so I don't think my second then() is triggered.
I'm really rusty with async / await and Promise syntaxes, what should I do in this situation?
Thank you in advance
The #fastify/multipart's toBuffer() API returns a Promise, not a buffer. Checkout this article
So you need to write something like:
const uploaded = files.map(processFile)
async function processFile (file) {
const buffer = await file.toBuffer()
const storedFileName = join(__dirname, 'files', file.filename)
const writeStream = fs.createWriteStream(storedFileName)
return new Promise((resolve, reject) => {
pump(buffer, writeStream, (err) => {
if(err) { return reject(err) }
resolve(storedFileName)
})
}
}
Moreover, to improve the code, I returned the storedFileName instead of recalculating it.
You can convert this:
.then(async () => await convertFiles())
to this:
.then(() => convertFiles())
Mixing async/await and promise then/catch leads to hidden bugs hard to find
I'm doing my first ever react website and I need help to write an asynchronous JavaScript function.
Here I'm uploading the user input files to a firebase storage and then making a post request to the API to store the data on the database. However, since the firebase upload takes some time to upload the data to its storage, the API request happens before the upload finishes, therefore the data does not get uploaded to the db. Now I know I should use promises of async await keywords here, but I can't figure out how to. I'd appreciate if someone could help. Thanks in advance!
Here's the relevant code snippet:
const save = (items) => {
items.forEach((item) => {
const fileName = new Date().getTime() + item.label + item.file.name;
const uploadTask = storage.ref(`/items/${fileName}`).put(item.file);
uploadTask.on(
"state_changed",
(snapshot) => {
const progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log("Upload is" + progress + "% done.");
},
(err) => {
console.log(err)
},
() => {
storage.ref("items").child(fileName).getDownloadURL().then((url) => {
setSong((prev) => {
return { ...prev, [item.label]: url };
});
});
}
);
})
console.log(song)
axios.post("songs/create", song);
}
PS: Here, items is the array of input files from the user, each file is with a label and it is how the attributes are named on the json document.
setSong is a useState function. Here The JSON document already contains the other user inputs(which are not files), and the setSong method is used to append the firebase URLs of the files to it.
You have to wait for all files to get uploaded then you can call your API, in order to do that you should use Promise.all to wait to resolve all files :
const save = items => {
Promise.all(
items.map(item => {
return new Promise(resolve => {
const fileName = new Date().getTime() + item.label + item.file.name
const uploadTask = storage.ref(`/items/${fileName}`).put(item.file)
uploadTask.on(
'state_changed',
snapshot => {
const progress =
(snapshot.bytesTransferred / snapshot.totalBytes) * 100
console.log('Upload is' + progress + '% done.')
},
err => {
console.log(err)
},
() => {
storage
.ref('items')
.child(fileName)
.getDownloadURL()
.then(url => {
setSong(prev => {
return { ...prev, [item.label]: url }
})
resolve({[item.label]: url})
})
}
)
})
})
).then((res) => {
const song = {}
res.forEach(item => {
return {
...song,
...item
}
})
axios.post('songs/create', song)
})
}
Explanation
Functions and Async
Async/Await can be implemented wherever a function starts. Functions can be written in following forms:
function name(){};
function name() => {};
To write an async function, you would do the following:
async function name(){};
All of these functions are called functions though, to make functions run without calling them, we need to turn them into IIFE's, or Immediately Invoked Function Execution. If you want to create a function and execute it immediately you would surround the function in ()'s and end it with an ();.
(function () {})();
If we simplify this:
(() => {})();
Implementing async would go like this:
(async () => {})();
Await
The await operator is used to wait for a Promise, puting await in front of an expression that uses promises makes it wait for the promise. If it is used in front of an expression that doesn't use promises, it is redundant and your code editor/IDE will say so.
(async () => {
const str = await 'some string';
console.log(str);
})();
await is redundant here since the expression 'some string' does not relate to a promise, but a string.
(async () => {
const myPromise = new Promise((resolve, reject) =>
resolve('some string')
);
const str = await myPromise.then(x => console.log(x));
})();
await is properly used here since the expression myPromise is related to a promise that outputs a string.
Implementation
I'm not 100% sure how the api works within promises, I recommend you figure it out yourself, but this is my educated guess:
const save = (items) => {
Promise.all(
items.map((item) => {
return new Promise(async (resolve) => {
const fileName = new Date().getTime() + item.label + item.file.name;
const uploadTask = await storage
.ref(`/items/${fileName}`)
.put(item.file);
await uploadTask.on(
"state_changed",
(snapshot) => {
const progress =
(snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log("Upload is" + progress + "% done.");
},
(err) => {
console.log(err);
},
async () => {
await storage
.ref("items")
.child(fileName)
.getDownloadURL()
.then((url) => {
setSong((prev) => {
return { ...prev, [item.label]: url };
});
resolve({ [item.label]: url });
});
}
);
});
})
).then(async (res) => {
const song = {};
res.forEach((item) => {
return {
...song,
...item,
};
});
await axios.post("songs/create", song);
});
};
I have an async function that creates a folder and I want to be able to do a computation with this folder and then have the same function remove it.
What I have tried:
const t = async () => {
let createdFolderPath = ''
return new Promise(async (resolve, reject) => {
createdFolderPath = await createAFolder()
console.log('Folder Created')
resolve(createdFolderPath)
}).finally(async () => {
await deleteCreatedFolder(createdFolderPath)
console.log('Deleted Folder')
})
}
t().then(async (folderpath) => {
await doSomethingWithFolderThatIsAsync(folderPath)
console.log('Computation Done')
})
What I want to happen is:
Folder Created
Computation Done
Deleted Folder
What actually happens is:
Folder Created
Deleted Folder
Computation Done
I want the t function to also remove the folder so the person using it doesn't have to worry about removing the folder.
I'd pass the .then callback into t instead of using the returned Promise from t. You should also avoid the explicit Promise construction antipattern:
const t = async (thenCallback) => {
let createdFolderPath = ''
return createAFolder()
.then(thenCallback)
.finally(async () => {
await deleteCreatedFolder(createdFolderPath)
console.log('Deleted Folder')
})
}
t(async (folderpath) => {
await doSomethingWithFolderThatIsAsync(folderPath)
console.log('Computation Done')
})
.then(() => {
// everything finished
});
I'm working with a module called findit, which recursive find files within a target folder and return the events "file", "error", "end":
const finder = findit("folder/");
finder.on("file", (file) => {});
finder.on("error", ((error) => {});
finder.on("end", () => {});
The end event will be called when the finder finds all files... but, inside the file event i'm making an async operation, that separate files only if they have something inside:
const finder = findit("folder/");
let neededFiles = [];
finder.on("file", (file) => {
// async operation here to store only the files that i want
// neededFiles = [...neededFiles, file];
});
finder.on("error", ((error) => {});
finder.on("end", () => {
console.log(neededFiles); // empty array
});
The neededFile will be empty because the async operation has not finished yet. My question is: What chances do i need to do to wait the async operation in the end event?
Thanks.
Since you've provided only a scratch of you app, I tried to build around that, and show how you could wrap that into a promise to handle the async part.
function find(folder){
return new Promise((resolve, reject) => {
const finder = findit(folder);
const files = [];
finder.on("file", (file) => {
//do stuff like
//files.push(valueOrPromise);
//or
//files.push( find(anotherPath) );
});
finder.on("error", reject);
finder.on("end", () => {
//this line finally "returns" the result.
//to this point you can modify files as you wish
resolve( Promise.all(files) );
//hint, if you're working with recursive stuff, you may have nested Arrays, so you should flatten the result
//resolve( Promise.all(files).then(values => values.reduce((a,b) => a.concat(b), [])) )
});
})
}
Usually people ask at this point: why do I need promises? Because they implement state management of async tasks; so why would you want to implement it yourself?
And why no use the 'end' inside the 'file'? some like:
finder.on("file", (file) => {
// async operation here to store only the files that i want
// neededFiles = [...neededFiles, file]; // GENERATE A PROMISE SO U CAN CHAIN THEN FUNCTION
neededFilesPromise.then(function(neededFiles){
finder.on("end", () => {
console.log(neededFiles); // empty array
});
}).catch(function() {
finder.on("error", ((error) => {});;
})
});
I would say this is the perfect use case for Promise.all(). So something like this. You might want to 'promisify' your file operations so it is even cleaner (if you are using something like bluebird to replace the native promises). I am not sure if your lib is return the content of the file or a filename
const fs = require('fs');
require('bluebird').promisifyAll(fs);
const finder = findit("folder/");
let neededFiles = [];
finder.on("file", (file) => {
// if file is the file content
neededFiles.push(/* result of your op above */);
// if file is a file name
neededFiles.push(fs.readFile(/* file name */));
});
finder.on("error", ((error) => {});
finder.on("end", () => {
Promise.all(neededFiles)
.then((nf) => {
console.log(nf); // you should now have something here...
});
});
I'm trying to extend some existing code with additional promises, but they are a new topic for me at the moment and i'm obviously missing something. This is running as part of a build scrip for npm.
All i am currently trying to make happen is for the final then to be called after the pack operation has happened for each architecture. I have tried wrapping it in a
return new Promise
But at the moment i am not returning anything from that function so i'm not sure what i should include in the resolve call at the end. If i just call the resolve with a true nothing happens, and wrapping it in a promise seems to cause the function to not actually run, and no errors are caught anywhere?
I'm guessing i am going about this completely wrong, all i want to achieve is to run another function once the previous one has completed?
Here's the code as it stands with the additional .then that i can't get to be called.
function build(cfg) {
return new Promise((resolve, reject) => {
webpack(cfg, (err, stats) => {
if (err) return reject(err);
resolve(stats);
});
});
}
function startPack() {
console.log('start pack...');
build(electronCfg)
.then(() => build(cfg))
.then(() => del('release'))
.then(paths => {
if (shouldBuildAll) {
// build for all platforms
const archs = ['ia32', 'x64'];
const platforms = ['linux', 'win32', 'darwin'];
platforms.forEach(plat => {
archs.forEach(arch => {
pack(plat, arch, log(plat, arch));
});
});
} else {
// build for current platform only
pack(os.platform(), os.arch(), log(os.platform(), os.arch()));
}
})
.then(() => {
console.log('then!');
})
.catch(err => {
console.error(err);
});
}
function pack(plat, arch, cb) {
// there is no darwin ia32 electron
if (plat === 'darwin' && arch === 'ia32') return;
const iconObj = {
icon: DEFAULT_OPTS.icon + (() => {
let extension = '.png';
if (plat === 'darwin') {
extension = '.icns';
} else if (plat === 'win32') {
extension = '.ico';
}
return extension;
})()
};
const opts = Object.assign({}, DEFAULT_OPTS, iconObj, {
platform: plat,
arch,
prune: true,
'app-version': pkg.version || DEFAULT_OPTS.version,
out: `release/${plat}-${arch}`,
'osx-sign': true
});
packager(opts, cb);
}
You didn't say what log is, but if it's a plain logging function, then it looks like you're passing in undefined (the result from calling log(...)) as the cb argument to pack. Perhaps you meant:
pack(plat, arch, () => log(plat, arch));
In any case, this won't do anything to wait for packing to finish. I don't know why you're not seeing any console output, but if you're looking for this output to happen after all the packing has finished, then you need to wrap packager in a promise. Something like:
var pack = (plat, arch) => new Promise(resolve => {
// ...
packager(opts, resolve);
});
And then use Promise.all instead of forEach to do all the packaging (in parallel if that's OK):
.then(paths => {
if (!shouldBuildAll) {
return pack(os.platform(), os.arch());
}
return Promise.all(['linux', 'win32', 'darwin'].map(plat =>
Promise.all(['ia32', 'x64'].map(arch => pack(plat, arch))));
})
.then(() => console.log('then!'))
.catch(err => console.error(err));