How can I post an array of objects in axios-http - javascript

I am using react-dropzone to upload mp3 files and using a metadata npm to grab all the contents of the file. When sending it to axios.post(), I am getting an Error of "Body Exceeded 1mb limit"
This is where I'm sending the new data:
async function insertArtists(uploadedArtists, doneCallback) {
// console.log(uploadedArtists); // [5]
// console.log(artistsData);// []
const originalArtists = [...artistsData];
const newArtists = [...uploadedArtists, ...artistsData];
try {
setArtistsData(newArtists);
const results = await axios.post(`${restUrl}/multi`, newArtists);
console.log(results);
if (doneCallback) {
doneCallback();
}
} catch (error) {
console.log("error thrown inside insertArtists", error);
setArtistsData(originalArtists);
}
}
found this doc here https://github.com/vercel/next.js/issues/19684. But it didnt explain how to add other params.
my Dropzone:
function Dropzone() {
const { insertArtists } = useReqRest();
const { getRootProps, getInputProps } = useDropzone({
accept: "audio/*",
onDrop: useCallback(async (acceptedFiles) => {
const filesData = await Promise.all(
acceptedFiles.map(async (file) => {
let fileContents = {
_id: uuidv4(),
path: file.path,
fileName: file.name,
fileSize: file.size,
fileType: file.type,
};
const meta = await musicMetadata.parseBlob(file);
return { ...fileContents, ...meta.common };
})
);
const fullDb = [...artists, ...filesData];
insertArtists(fullDb);
}),
});
}

If your issue is just "Body Exceeded 1MB limit" you can add this at the end of your API file and it will work
export const config = {
api: {
bodyParser: {
sizeLimit: '1mb',
},
},
}
if you want to add all the detail in files consider using FormData() and then append files and send it as the body of the request

You need to set custom config inorder to send data more than 1mb
export const config = {
api: {
bodyParser : {
sizeLimit : '1mb' // change this
},
},
}
For more info check this out: https://nextjs.org/docs/api-routes/api-middlewares#custom-config

Related

How to solve Firebase Storage: Invalid URL when trying to upload and image react native

I am trying to upload an image to firebase storage, the problem is I get Firebase Storage: Invalid URL when I try to upload it.
First I get the uri from the react-native-image-picker then I use it to make the reference.
This is my code:
export async function uploadImage() {
const options = {
storageOptions: {
path: "images",
mediaType: "photo"
},
includeBase64: true
}
const result = await launchImageLibrary(options);
const imagePath = result.assets[0].uri
console.log(imagePath)
//firebase
const imageRef = ref(storage, imagePath);
const snapshot = await uploadBytes(imageRef, file, {
contentType: "image/jpeg",
});
console.log("uploaded!")
}
this is the uri printed by the console.log:
file:///data/user/0/com.chatapp/cache/rn_image_picker_lib_temp_f85b1089-267f-4271-9ccb-2f1487d83619.jpg
while uploading any to firebase storage
you should have permission to upload the file.
what do you want to upload the file or any Base64 content
const uploadImage = async () => {
const options: ImageLibraryOptions = {
storageOptions: {
path: 'images',
mediaType: 'photo',
},
includeBase64: true,
};
const result = await launchImageLibrary(options);
if (result) {
const {assets} = result;
if (assets && assets.length > 0) {
try {
const imagePath = result.assets[0].uri;
console.log(imagePath);
//firebase
const reference = storage().ref('black-t-shirt-sm.png');
const imageRef = await reference.putFile(imagePath, {
contentType: 'image/jpeg',
});
console.log('imageRef', imageRef);
// const snapshot = await uploadBytes(imageRef, file, {
// contentType: 'image/jpeg',
// });
console.log('uploaded!');
} catch (error) {
console.log('error', error);
}
}
}
};
for uploading the file you need to follow its guideline RN Firebase
storage upload requires platform specific code. I have code snippet at my github playground with storage upload and react-native-image-picker Snippet
Essentially
const uri = Platform.OS === 'android' ? uploadUri : uploadUri.replace('file://', '');
const task = storage().ref(ref);
return task.putFile(uri);

Incorrect response mime type (Expected 'application/wasm) PSPDFKIT and the reserved size is not enough to contain the signature issue oracle apex?

I followed the PSPDFKit Standalone Integration steps ,and all file in public directory (folder "i" in Apache tomcat),
I get the "PSPDFKit for Web successfully loaded!" message but it hangs up and the console gives me a single error :-
Failed to execute 'compile' on 'WebAssembly': Incorrect response MIME type. Expected 'application/wasm'.
also when i try to add digital signature faced this issue
Cannot add the container structure because the reserved size is not enough to contain the signature. Available size 8193, actual size 89694
I am tried that by below code
PSPDFKit.load({
container: "#pspdfkit",
document: 'f?p=&APP_ID.:0:&APP_SESSION.:APPLICATION_PROCESS=PREVIEW_FILE:::FILE_ID:' + apex.item('P6_ID').getValue(),
licenseKey: "",
initialViewState: new PSPDFKit.ViewState({
showSignatureValidationStatus:
PSPDFKit.ShowSignatureValidationStatusMode.IF_SIGNED
}),
async trustedCAsCallback() {
const response = await fetch("http://localhost:90/i/certs/ca.pem");
const cert = await response.text();
return [cert];
}
})
.then(function(instance) {
var item = {
type: "custom",
id: "my-button",
title: "digital sign",
onPress: function(event) {
instance
.signDocument(null, generatePKCS7)
.then(() => {
console.log("document signed.");
})
.catch(error => {
console.error("The document could not be signed.", error);
});
}
};
if (app_user =="aaaa") {
instance.setToolbarItems(function(items) {
items.push(item);
return items;
});
}
function generatePKCS7({ fileContents }) {
const certificatePromise = fetch("http://localhost:90/i/certs/certificate.pem").then(response =>
response.text()
);
const privateKeyPromise = fetch("http://localhost:90/i/certs/private-key.pem").then(response =>
response.text()
);
return new Promise((resolve, reject) => {
Promise.all([certificatePromise, privateKeyPromise])
.then(([certificatePem, privateKeyPem]) => {
const certificate = forge.pki.certificateFromPem(certificatePem);
const privateKey = forge.pki.privateKeyFromPem(privateKeyPem);
const p7 = forge.pkcs7.createSignedData();
p7.content = new forge.util.ByteBuffer(fileContents);
p7.addCertificate(certificate);
p7.addSigner({
key: privateKey,
certificate: certificate,
digestAlgorithm: forge.pki.oids.sha256,
authenticatedAttributes: [
{
type: forge.pki.oids.contentType,
value: forge.pki.oids.data
},
{
type: forge.pki.oids.messageDigest
},
{
type: forge.pki.oids.signingTime,
Value: new Date()
}
]
}
);
p7.sign({ detached: true })
const result = stringToArrayBuffer(
forge.asn1.toDer(p7.toAsn1()).getBytes()
);
resolve(result);
})
.catch(reject);
});
}
function stringToArrayBuffer(binaryString) {
const buffer = new ArrayBuffer(binaryString.length);
let bufferView = new Uint8Array(buffer);
for (let i = 0, len = binaryString.length; i < len; i++) {
bufferView[i] = binaryString.charCodeAt(i);
}
return buffer;
}
})
.catch(function(error) {
console.error(error.message);
})
apex version 19.2
tomcat 8.5 ,
ords
It looks like the WASM file is not served with the correct content type. The solution is either to fix your server to return the correct content type or disable streaming instantiation for WebAssembly when loading PSPDFKit, as described here: https://pspdfkit.com/guides/web/current/troubleshooting/common-issues/#response-has-unsupported-mime-type-error
In the future, please reach out to https://pspdfkit.com/support/request and our support team will help you in there.

Upload Image from form-data to S3 using a Lambda

So I am writing a Lambda that will take in some form data via a straight POST through API Gateway (testing using Postman for now) and then send that image to S3 for storage. Every time I run it, the image uploaded to S3 is corrupted and won't open properly. I have seen people having to decode/encode the incoming data but I feel like I have tried everything using Buffer.from. I am only looking to store either .png or .jpg. The below code does not reflect my attempts using Base64 encoding/decoding seeing they all failed. Here is what I have so far -
Sample Request in postman
{
image: (uploaded .jpg/.png),
metadata: {tag: 'iPhone'}
}
Lambda
const AWS = require('aws-sdk')
const multipart = require('aws-lambda-multipart-parser')
const s3 = new AWS.S3();
exports.handler = async (event) => {
const form = multipart.parse(event, false)
const s3_response = await upload_s3(form)
return {
statusCode: '200',
body: JSON.stringify({ data: data })
}
};
const upload_s3 = async (form) => {
const uniqueId = Math.random().toString(36).substr(2, 9);
const key = `${uniqueId}_${form.image.filename}`
const request = {
Bucket: 'bucket-name',
Key: key,
Body: form.image.content,
ContentType: form.image.contentType,
}
try {
const data = await s3.putObject(request).promise()
return data
} catch (e) {
console.log('Error uploading to S3: ', e)
return e
}
}
EDIT:
I am now atempting to save the image into the /tmp directory then use a read stream to upload to s3. Here is some code for that
s3 upload function
const AWS = require('aws-sdk')
const fs = require('fs')
const s3 = new AWS.S3()
module.exports = {
upload: (file) => {
return new Promise((resolve, reject) => {
const key = `${Date.now()}.${file.extension}`
const bodyStream = fs.createReadStream(file.path)
const params = {
Bucket: process.env.S3_BucketName,
Key: key,
Body: bodyStream,
ContentType: file.type
}
s3.upload(params, (err, data) => {
if (err) {
return reject(err)
}
return resolve(data)
}
)
})
}
}
form parser function
const busboy = require('busboy')
module.exports = {
parse: (req, temp) => {
const ctype = req.headers['Content-Type'] || req.headers['content-type']
let parsed_file = {}
return new Promise((resolve) => {
try {
const bb = new busboy({
headers: { 'content-type': ctype },
limits: {
fileSize: 31457280,
files: 1,
}
})
bb.on('file', function (fieldname, file, filename, encoding, mimetype) {
const stream = temp.createWriteStream()
const ext = filename.split('.')[1]
console.log('parser -- ext ', ext)
parsed_file = { name: filename, path: stream.path, f: file, type: mimetype, extension: ext }
file.pipe(stream)
}).on('finish', () => {
resolve(parsed_file)
}).on('error', err => {
console.err(err)
resolve({ err: 'Form data is invalid: parsing error' })
})
if (req.end) {
req.pipe(bb)
} else {
bb.write(req.body, req.isBase64Encoded ? 'base64' : 'binary')
}
return bb.end()
} catch (e) {
console.error(e)
return resolve({ err: 'Form data is invalid: parsing error' })
}
})
}
}
handler
const form_parser = require('./form-parser').parse
const s3_upload = require('./s3-upload').upload
const temp = require('temp')
exports.handler = async (event, context) => {
temp.track()
const parsed_file = await form_parser(event, temp)
console.log('index -- parsed form', parsed_file)
const result = await s3_upload(parsed_file)
console.log('index -- s3 result', result)
temp.cleanup()
return {
statusCode: '200',
body: JSON.stringify(result)
}
}
The above edited code is a combination of other code and a github repo I found that is trying to achieve the same results. Even with this solution the file is still corrupted
Figured out this issue. Code works perfectly fine - it was an issue with API Gateway. Need to go into the API Gateway settings and set thee Binary Media Type to multipart/form-data then re-deploy the API. Hope this helps someone else who is banging their head against the wall on figuring out sending images via form data to a lambda.

Firebase cloud function storage trigger first thumbnail urls are fine then the next ones are all the same thumbnails urls as the first

I am trying to upload an image to firebase and then produce 2 thumbnails. I am able to do this with no problems. My current road block is when I write the urls to the realtime database, I am always getting the same url as the initial upload.
For example:
1st upload I get my uploaded image with the two proper thumbnails for the image
2nd upload I get my uploaded image with the two previous thumbnails (first image)
3rd upload I get my uploaded image with the first images thumbnails...
...this continues to reproduce the urls for the first upload
In my storage the correct thumbnails are being generated, but the urls are always for the first upload?
I don't know if this is a problem with the getSignedUrl() or not, really not sure whats going on here.
Here is my cloud function:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket); // The Storage object.
// console.log(object);
console.log(object.name);
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Sounrce fileName
await bucket.file(filePath).download({
destination: tmpFilePath
});
//3. resize the images and define an array of upload promises
const sizes = [64, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
//Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
//upload to gcs
return bucket.upload(thumbPath, {
destination: join(bucketDir, thumbName),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
// console.log(data)
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}).then((response) => {
const url = response[0];
if (size === 64) {
// console.log('generated 64');
return admin.database().ref('profileThumbs').child(fileName).set({ thumb: url });
} else {
// console.log('generated 128');
return admin.database().ref('categories').child(fileName).child('thumb').set(url);
}
})
.catch(function (error) {
console.error(err);
return;
});
})
});
//4. Run the upload operations
await Promise.all(uploadPromises);
//5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
})
Cleaned up my code and solved my problem, here is how I generated the urls and passed them to the proper URLs by accessing the users UID and postId in the file path:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const userUid = filePath.split('/')[2];
const sizes = [64, 256];
const bucketDir = dirname(filePath);
console.log(userUid);
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(tmpdir(), fileName);
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
sizes.map(size => {
const newFileName = `thumb#${size}_${fileName}.png`
const newFileTemp = path.join(tmpdir(), newFileName);
const newFilePath = `thumbs/${newFileName}`
return sharp(tempFilePath)
.resize(size, size)
.toFile(newFileTemp, () => {
return bucket.upload(newFileTemp, {
destination: join(bucketDir, newFilePath),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
console.log(data)
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}, function(err, url) {
console.log(url);
if (err) {
console.error(err);
return;
}
if (size === 64) {
return admin.database().ref('profileThumbs').child(userUid).child(fileName).set({ thumb: url });
} else {
return admin.database().ref('categories').child(fileName).child('thumb').set(url);
}
})
})
})
})
}).catch(error =>{
console.log(error);
});
})

how to pipe an archive (zip) to an S3 bucket

I’m a bit confused with how to proceed. I am using Archive ( node js module) as a means to write data to a zip file. Currently, I have my code working when I write to a file (local storage).
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream(__dirname + '/example.zip');
var archive = archiver('zip', {
zlib: { level: 9 }
});
archive.pipe(output);
archive.append(mybuffer, {name: ‘msg001.txt’});
I’d like to modify the code so that the archive target file is an AWS S3 bucket. Looking at the code examples, I can specify the bucket name and key (and body) when I create the bucket object as in:
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myMsgArchive.zip' Body: myStream};
s3.upload( params, function(err,data){
…
});
Or
s3 = new AWS.S3({ parms: {Bucket: ‘myBucket’ Key: ‘myMsgArchive.zip’}});
s3.upload( {Body: myStream})
.send(function(err,data) {
…
});
With regards to my S3 example(s), myStream appears to be a readable stream and I am confused as how to make this work as archive.pipe requires a writeable stream. Is this something where we need to use a pass-through stream? I’ve found an example where someone created a pass-through stream but the example is too terse to gain proper understanding. The specific example I am referring to is:
Pipe a stream to s3.upload()
Any help someone can give me would greatly be appreciated. Thanks.
This could be useful for anyone else wondering how to use pipe.
Since you correctly referenced the example using the pass-through stream, here's my working code:
1 - The routine itself, zipping files with node-archiver
exports.downloadFromS3AndZipToS3 = () => {
// These are my input files I'm willing to read from S3 to ZIP them
const files = [
`${s3Folder}/myFile.pdf`,
`${s3Folder}/anotherFile.xml`
]
// Just in case you like to rename them as they have a different name in the final ZIP
const fileNames = [
'finalPDFName.pdf',
'finalXMLName.xml'
]
// Use promises to get them all
const promises = []
files.map((file) => {
promises.push(s3client.getObject({
Bucket: yourBubucket,
Key: file
}).promise())
})
// Define the ZIP target archive
let archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
})
// Pipe!
archive.pipe(uploadFromStream(s3client, 'someDestinationFolderPathOnS3', 'zipFileName.zip'))
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
})
// Good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
})
// The actual archive is populated here
return Promise
.all(promises)
.then((data) => {
data.map((thisFile, index) => {
archive.append(thisFile.Body, { name: fileNames[index] })
})
archive.finalize()
})
}
2 - The helper method
const uploadFromStream = (s3client) => {
const pass = new stream.PassThrough()
const s3params = {
Bucket: yourBucket,
Key: `${someFolder}/${aFilename}`,
Body: pass,
ContentType: 'application/zip'
}
s3client.upload(s3params, (err, data) => {
if (err)
console.log(err)
if (data)
console.log('Success')
})
return pass
}
The following example takes the accepted answer and makes it work with local files as requested.
const archiver = require("archiver")
const fs = require("fs")
const AWS = require("aws-sdk")
const s3 = new AWS.S3()
const stream = require("stream")
const zipAndUpload = async () => {
const files = [`test1.txt`, `test2.txt`]
const fileNames = [`test1target.txt`, `test2target.txt`]
const archive = archiver("zip", {
zlib: { level: 9 } // Sets the compression level.
})
files.map((thisFile, index) => {
archive.append(fs.createReadStream(thisFile), { name: fileNames[index] })
})
const uploadStream = new stream.PassThrough()
archive.pipe(uploadStream)
archive.finalize()
archive.on("warning", function (err) {
if (err.code === "ENOENT") {
console.log(err)
} else {
throw err
}
})
archive.on("error", function (err) {
throw err
})
archive.on("end", function () {
console.log("archive end")
})
await uploadFromStream(uploadStream)
console.log("all done")
}
const uploadFromStream = async pass => {
const s3params = {
Bucket: "bucket-name",
Key: `streamtest.zip`,
Body: pass,
ContentType: "application/zip"
}
return s3.upload(s3params).promise()
}
zipAndUpload()

Categories