Moodle corrupted uploaded files - javascript

I am using moodle api core_files_upload with node js using this script
const { CallService } = require("../MoodleWS")
var Upload = async function(userid, file) {
var base64 = await toBase64(file)
.then(r => {
return r;
})
.catch((e) => {
console.log(e)
});
console.log(base64);
var param = {
itemid: 0,
instanceid: userid,
filearea: 'draft',
filecontent: base64,
component: 'user',
filepath: '/',
filename: file.name,
contextlevel: 'user'
}
// return promise calling web service, basically returned axios
return CallService('POST', 'core_files_upload', false, null, param);
}
const toBase64 = file => new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result);
reader.onerror = () => reject(reader.error);
})
module.exports = { Upload }
the function returned success and files uploaded, I have checked the uploaded file size is the same as the original files, unfortunately I failed to open the files and keeps saying like image below
and images also can't be displayed
the uploaded base64 also have the mime/type header like data:application/pdf;base64,JVBERi0xLjQNJeL... I don't really know what went wrong. When I tried to upload files using the native web version of moodle, the files uploaded correctly. So anyone can help? Thanks

So it turned out that I don't need to include the mime/type from the base64. So I just remove the data:application/pdf;base64 and modify my param a little so it became like
var base64 = await toBase64(file)
.then(r => {
return r;
})
.catch((e) => {
console.log(e)
});
var param = {
itemid: 0,
instanceid: userid,
filearea: 'draft',
filecontent: base64.split(',')[1],
component: 'user',
filepath: '/',
filename: file.name,
contextlevel: 'user'
}

Related

How can I post an array of objects in axios-http

I am using react-dropzone to upload mp3 files and using a metadata npm to grab all the contents of the file. When sending it to axios.post(), I am getting an Error of "Body Exceeded 1mb limit"
This is where I'm sending the new data:
async function insertArtists(uploadedArtists, doneCallback) {
// console.log(uploadedArtists); // [5]
// console.log(artistsData);// []
const originalArtists = [...artistsData];
const newArtists = [...uploadedArtists, ...artistsData];
try {
setArtistsData(newArtists);
const results = await axios.post(`${restUrl}/multi`, newArtists);
console.log(results);
if (doneCallback) {
doneCallback();
}
} catch (error) {
console.log("error thrown inside insertArtists", error);
setArtistsData(originalArtists);
}
}
found this doc here https://github.com/vercel/next.js/issues/19684. But it didnt explain how to add other params.
my Dropzone:
function Dropzone() {
const { insertArtists } = useReqRest();
const { getRootProps, getInputProps } = useDropzone({
accept: "audio/*",
onDrop: useCallback(async (acceptedFiles) => {
const filesData = await Promise.all(
acceptedFiles.map(async (file) => {
let fileContents = {
_id: uuidv4(),
path: file.path,
fileName: file.name,
fileSize: file.size,
fileType: file.type,
};
const meta = await musicMetadata.parseBlob(file);
return { ...fileContents, ...meta.common };
})
);
const fullDb = [...artists, ...filesData];
insertArtists(fullDb);
}),
});
}
If your issue is just "Body Exceeded 1MB limit" you can add this at the end of your API file and it will work
export const config = {
api: {
bodyParser: {
sizeLimit: '1mb',
},
},
}
if you want to add all the detail in files consider using FormData() and then append files and send it as the body of the request
You need to set custom config inorder to send data more than 1mb
export const config = {
api: {
bodyParser : {
sizeLimit : '1mb' // change this
},
},
}
For more info check this out: https://nextjs.org/docs/api-routes/api-middlewares#custom-config

Upload Image from form-data to S3 using a Lambda

So I am writing a Lambda that will take in some form data via a straight POST through API Gateway (testing using Postman for now) and then send that image to S3 for storage. Every time I run it, the image uploaded to S3 is corrupted and won't open properly. I have seen people having to decode/encode the incoming data but I feel like I have tried everything using Buffer.from. I am only looking to store either .png or .jpg. The below code does not reflect my attempts using Base64 encoding/decoding seeing they all failed. Here is what I have so far -
Sample Request in postman
{
image: (uploaded .jpg/.png),
metadata: {tag: 'iPhone'}
}
Lambda
const AWS = require('aws-sdk')
const multipart = require('aws-lambda-multipart-parser')
const s3 = new AWS.S3();
exports.handler = async (event) => {
const form = multipart.parse(event, false)
const s3_response = await upload_s3(form)
return {
statusCode: '200',
body: JSON.stringify({ data: data })
}
};
const upload_s3 = async (form) => {
const uniqueId = Math.random().toString(36).substr(2, 9);
const key = `${uniqueId}_${form.image.filename}`
const request = {
Bucket: 'bucket-name',
Key: key,
Body: form.image.content,
ContentType: form.image.contentType,
}
try {
const data = await s3.putObject(request).promise()
return data
} catch (e) {
console.log('Error uploading to S3: ', e)
return e
}
}
EDIT:
I am now atempting to save the image into the /tmp directory then use a read stream to upload to s3. Here is some code for that
s3 upload function
const AWS = require('aws-sdk')
const fs = require('fs')
const s3 = new AWS.S3()
module.exports = {
upload: (file) => {
return new Promise((resolve, reject) => {
const key = `${Date.now()}.${file.extension}`
const bodyStream = fs.createReadStream(file.path)
const params = {
Bucket: process.env.S3_BucketName,
Key: key,
Body: bodyStream,
ContentType: file.type
}
s3.upload(params, (err, data) => {
if (err) {
return reject(err)
}
return resolve(data)
}
)
})
}
}
form parser function
const busboy = require('busboy')
module.exports = {
parse: (req, temp) => {
const ctype = req.headers['Content-Type'] || req.headers['content-type']
let parsed_file = {}
return new Promise((resolve) => {
try {
const bb = new busboy({
headers: { 'content-type': ctype },
limits: {
fileSize: 31457280,
files: 1,
}
})
bb.on('file', function (fieldname, file, filename, encoding, mimetype) {
const stream = temp.createWriteStream()
const ext = filename.split('.')[1]
console.log('parser -- ext ', ext)
parsed_file = { name: filename, path: stream.path, f: file, type: mimetype, extension: ext }
file.pipe(stream)
}).on('finish', () => {
resolve(parsed_file)
}).on('error', err => {
console.err(err)
resolve({ err: 'Form data is invalid: parsing error' })
})
if (req.end) {
req.pipe(bb)
} else {
bb.write(req.body, req.isBase64Encoded ? 'base64' : 'binary')
}
return bb.end()
} catch (e) {
console.error(e)
return resolve({ err: 'Form data is invalid: parsing error' })
}
})
}
}
handler
const form_parser = require('./form-parser').parse
const s3_upload = require('./s3-upload').upload
const temp = require('temp')
exports.handler = async (event, context) => {
temp.track()
const parsed_file = await form_parser(event, temp)
console.log('index -- parsed form', parsed_file)
const result = await s3_upload(parsed_file)
console.log('index -- s3 result', result)
temp.cleanup()
return {
statusCode: '200',
body: JSON.stringify(result)
}
}
The above edited code is a combination of other code and a github repo I found that is trying to achieve the same results. Even with this solution the file is still corrupted
Figured out this issue. Code works perfectly fine - it was an issue with API Gateway. Need to go into the API Gateway settings and set thee Binary Media Type to multipart/form-data then re-deploy the API. Hope this helps someone else who is banging their head against the wall on figuring out sending images via form data to a lambda.

Creating a read stream to a directory and piping entire directory to MongoDB Bucket

I am trying to save an entire directory to a MongoDB Bucket.
It works when I zip the file, but I need the files to be unzipped in the bucket.
When I try to stream the directory an error reads "Illegal operation on a directory".
I've been trying to find a solution but cannot find one for a directory.
let conn = mongoose.connection;
let gfb;
conn.once('open', () => {
//init the stream
console.log('- Connection open -');
gfb = new mongoose.mongo.GridFSBucket(conn.db, {
chunkSizeBytes: 1024,
bucketName: 'uploads'
})
})
const uploaddb = (req, res, vile) => {
conn.once('open', () => {
console.log('- Connection open -');
gfb = new mongoose.mongo.GridFSBucket(conn.db, {
chunkSizeBytes: 1024,
bucketName: 'uploads'
})
try {
uploadDir('/Users/nancycollins/virtuload-beta/backend/uploads/unzipped/')
} catch (e) {
console.error(e);
}
});
}
async function uploadDir (folder, subFolder = '') {
const actualFolder = path.join(folder, subFolder)
const contents = await fs.readdir(actualFolder, { withFileTypes: true })
await Promise.all(contents.map((fileOrDirectory) =>
fileOrDirectory.isDirectory()
? uploadDir(folder, path.join(subFolder, fileOrDirectory.name))
: uploadFile(folder, path.join(actualFolder, fileOrDirectory.name))
))
}
async function uploadFile (baseFolder, filePath) {
return new Promise((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(gfb.openUploadStream(filePath, { metadata: path.relative(baseFolder, filePath) }))
.on('error', function(error) {
reject(error);
})
.on('finish', () => {
resolve()
})
})
}
-I have also tried change the try-catch to this:
try {
const folder = '/Users/nancycollins/virtuload-beta/backend/uploads/unzipped/';
uploadDir(folder)
} catch (e) {
console.error(e);
}
EDIT:
This is the updated version.
The error currently reads:
TypeError [ERR_INVALID_ARG_TYPE]: The "path" argument must be of type string. Received type object
at validateString (internal/validators.js:125:11)
at Object.join (path.js:1147:7)
etc...
Workaround:
Upload the folder files and keep their relative folder structure stored in GridFS's metadata. In this example uploadDir is a recursive function that goes through the given folder and its subfolders and calls uploadFile for every file found.
In this example I assume you are using a promisified version of fs.
async function uploadDir (folder, subFolder = '') {
const actualFolder = path.join(folder, subFolder)
const contents = await fs.readdir(actualFolder, { withFileTypes: true })
await Promise.all(contents.map((fileOrDirectory) =>
fileOrDirectory.isDirectory()
? uploadDir(folder, path.join(subFolder, fileOrDirectory.name))
: uploadFile(folder, path.join(actualFolder, fileOrDirectory.name))
))
}
async function uploadFile (baseFolder, filePath) {
return new Promise((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(gfb.openUploadStream(filePath, { metadata: path.relative(baseFolder, filePath) }))
.on('error', function(error) {
reject(error);
})
.on('finish', () => {
resolve()
})
})
}
Edit:
If you don't need to store more info you can just set the GridFS file's name to be the relative path.

How upload image to firebase storage from Ionic?

I need help to send an image to firebase storage from ionic. So, i take a picture with this function:
async takePicture() {
const options: CameraOptions = {
quality: 100,
destinationType: this.camera.DestinationType.FILE_URI,
encodingType: this.camera.EncodingType.JPEG,
mediaType: this.camera.MediaType.PICTURE
}
try {
let imageURI = await this.camera.getPicture(options);
let newImageURI = await this.cropImage(imageURI);
let imageSanitized = await this.encodeFile(newImageURI);
this.imgSrc = imageSanitized;
} catch (e) {
console.error(JSON.stringify(e));
}
}
And I crop with this function:
cropImage(imgURI): Promise<string> {
return new Promise((resolve,reject) => {
this.cropService.crop(imgURI, { quality: 100 }).then((newImageURI: string) => {
resolve(newImageURI);
}, (err) => {
reject(err);
})
})
}
finishing I encode with this function:
encodeFile(ImageURI: string): Promise<any>{
return new Promise((resolve, reject) => {
this.base64.encodeFile(ImageURI).then((base64File: string) => {
this.imgUri = base64File;
resolve(this.domSanitizer.bypassSecurityTrustResourceUrl(base64File));
}, (err) => {
reject(err);
})
})
}
this.imgSrc is my sanitized image and this show very well in my file.html. However, I need send this image to firebase storage. For that, I created this function:
uploadToStorage(imgString) {
console.log(imgString);
this.storage.child('exemplo.JPEG').putString(imgString, 'data_url').then((res) => {
console.log(res);
}, (error) => {
console.error(error);
});
}
imgString is who gets the value of the this.domSanitizer.bypassSecurityTrustResourceUrl(base64File) or base64File from function encodeFile.
I don't get an error in my upload function, however, I don't get success, nothing shows up for me.
How I can send correctly the image to the server?
I think you can capture the image as base64 by using:
destinationType: this.camera.DestinationType.DATA_URL
instead of FILE_URI.
Anyways, to record an image I used the following code with AngularFire2 that works. this.data.image is where I saved base64 of image. Yet, it may get tricky if your encoder is adding ""data:image/jpeg;base64" to the beginning of your base64 string. You may try with or without the added string if this code doesn't work as expected.
import { AngularFireStorage, AngularFireStorageReference } from 'angularfire2/storage/public_api';
//...
const storageRef: AngularFireStorageReference = this.afStorage.ref(`images/${this.userId}/profilePic/`);
storageRef.putString(this.data.image, 'data_url', {
contentType: 'image/jpeg'
}).then(() => {
storageRef.getDownloadURL().subscribe(url => {
console.log('download url: ' + url);
//function to save download URL of saved image in database
});
})
})
I did this:
encodeFile(ImageURI: string): Promise<any>{
return new Promise((resolve, reject) => {
this.base64.encodeFile(ImageURI).then((base64File: string) => {
this.imgUri = base64File;
...
}, (err) => {
...
})
})
}
this.imgUri store my image base64 encoding. But is necessary to modify the beginning of it. So split:
let messageSplit = this.imgUri.split('data:image/*;charset=utf-8;base64,')[1];
add in her place
let message64 = 'data:image/jpg;base64,' + messageSplit;
So upload to the firebase storage.
const filePath = `my-pet-crocodile_${ new Date().getTime() }.jpg`;
let task = this.afStorage.ref(filePath).putString(message64, 'data_url');

how to pipe an archive (zip) to an S3 bucket

I’m a bit confused with how to proceed. I am using Archive ( node js module) as a means to write data to a zip file. Currently, I have my code working when I write to a file (local storage).
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream(__dirname + '/example.zip');
var archive = archiver('zip', {
zlib: { level: 9 }
});
archive.pipe(output);
archive.append(mybuffer, {name: ‘msg001.txt’});
I’d like to modify the code so that the archive target file is an AWS S3 bucket. Looking at the code examples, I can specify the bucket name and key (and body) when I create the bucket object as in:
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myMsgArchive.zip' Body: myStream};
s3.upload( params, function(err,data){
…
});
Or
s3 = new AWS.S3({ parms: {Bucket: ‘myBucket’ Key: ‘myMsgArchive.zip’}});
s3.upload( {Body: myStream})
.send(function(err,data) {
…
});
With regards to my S3 example(s), myStream appears to be a readable stream and I am confused as how to make this work as archive.pipe requires a writeable stream. Is this something where we need to use a pass-through stream? I’ve found an example where someone created a pass-through stream but the example is too terse to gain proper understanding. The specific example I am referring to is:
Pipe a stream to s3.upload()
Any help someone can give me would greatly be appreciated. Thanks.
This could be useful for anyone else wondering how to use pipe.
Since you correctly referenced the example using the pass-through stream, here's my working code:
1 - The routine itself, zipping files with node-archiver
exports.downloadFromS3AndZipToS3 = () => {
// These are my input files I'm willing to read from S3 to ZIP them
const files = [
`${s3Folder}/myFile.pdf`,
`${s3Folder}/anotherFile.xml`
]
// Just in case you like to rename them as they have a different name in the final ZIP
const fileNames = [
'finalPDFName.pdf',
'finalXMLName.xml'
]
// Use promises to get them all
const promises = []
files.map((file) => {
promises.push(s3client.getObject({
Bucket: yourBubucket,
Key: file
}).promise())
})
// Define the ZIP target archive
let archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
})
// Pipe!
archive.pipe(uploadFromStream(s3client, 'someDestinationFolderPathOnS3', 'zipFileName.zip'))
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
})
// Good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
})
// The actual archive is populated here
return Promise
.all(promises)
.then((data) => {
data.map((thisFile, index) => {
archive.append(thisFile.Body, { name: fileNames[index] })
})
archive.finalize()
})
}
2 - The helper method
const uploadFromStream = (s3client) => {
const pass = new stream.PassThrough()
const s3params = {
Bucket: yourBucket,
Key: `${someFolder}/${aFilename}`,
Body: pass,
ContentType: 'application/zip'
}
s3client.upload(s3params, (err, data) => {
if (err)
console.log(err)
if (data)
console.log('Success')
})
return pass
}
The following example takes the accepted answer and makes it work with local files as requested.
const archiver = require("archiver")
const fs = require("fs")
const AWS = require("aws-sdk")
const s3 = new AWS.S3()
const stream = require("stream")
const zipAndUpload = async () => {
const files = [`test1.txt`, `test2.txt`]
const fileNames = [`test1target.txt`, `test2target.txt`]
const archive = archiver("zip", {
zlib: { level: 9 } // Sets the compression level.
})
files.map((thisFile, index) => {
archive.append(fs.createReadStream(thisFile), { name: fileNames[index] })
})
const uploadStream = new stream.PassThrough()
archive.pipe(uploadStream)
archive.finalize()
archive.on("warning", function (err) {
if (err.code === "ENOENT") {
console.log(err)
} else {
throw err
}
})
archive.on("error", function (err) {
throw err
})
archive.on("end", function () {
console.log("archive end")
})
await uploadFromStream(uploadStream)
console.log("all done")
}
const uploadFromStream = async pass => {
const s3params = {
Bucket: "bucket-name",
Key: `streamtest.zip`,
Body: pass,
ContentType: "application/zip"
}
return s3.upload(s3params).promise()
}
zipAndUpload()

Categories