I was using this code to upload files to dropbox with Javascript (vuejs); however, I have not been able to get the files larger than 350 MB to load. I have been trying to use chunks to load the file. The code doesn't error but I have been getting a 400 error when the dropbox api returns the result:
Dropbox-sdk.min.js?0032:1 POST https://content.dropboxapi.com/2/files/upload_session/append_v2 400 (Bad Request)
I am wondering if there was something wrong with the code or could there be something that needs to be changed in the dropbox settings? I have been using this code as a guide: https://github.com/dropbox/dropbox-sdk-js/blob/master/examples/javascript/upload/index.html#L2
uploadToDropbox: function (path, file) {
var dbx = this.dropbox()
console.log("File upload .. " + path)
console.log("File upload .. " + file)
console.log(UPLOAD_FILE_SIZE_LIMIT)
if (file.size < UPLOAD_FILE_SIZE_LIMIT) {
this.dropbox().filesUpload({ path: path, contents: file })
//__PIPELINE\assets\test
.then(response => {
console.log(response)
//this.structure = response.result.entries;
console.log("This was successful")
})
.catch(error => {
console.log(error)
console.log("This is an error")
});
}
else {
// File is bigger than 150 Mb - use filesUploadSession* API
const maxBlob = 8 * 1000 * 1000; // 8Mb - Dropbox JavaScript API suggested max file / chunk size
var workItems = [];
var offset = 0;
while (offset < file.size) {
var chunkSize = Math.min(maxBlob, file.size - offset);
workItems.push(file.slice(offset, offset + chunkSize));
offset += chunkSize;
}
console.log ("Work Items : ")
console.log (workItems)
const task = workItems.reduce((acc, blob, idx, items) => {
if (idx == 0) {
// Starting multipart upload of file
console.log("idx is 0")
return acc.then(function () {
return dbx.filesUploadSessionStart({ close: false, contents: blob })
.then(response => response.session_id)
});
} else if (idx < items.length - 1) {
console.log("idx is less than items.length")
// Append part to the upload session
return acc.then(function (sessionId) {
var cursor = { session_id: sessionId, offset: idx * maxBlob };
return dbx.filesUploadSessionAppendV2({ cursor: cursor, close: false, contents: blob }).then(() => sessionId);
});
} else {
// Last chunk of data, close session
console.log("finishing session")
return acc.then(function (sessionId) {
var cursor = { session_id: sessionId, offset: file.size - blob.size };
var commit = { path: '/' + file.name, mode: 'add', autorename: true, mute: false };
return dbx.filesUploadSessionFinish({ cursor: cursor, commit: commit, contents: blob });
});
}
}, Promise.resolve());
task.then(function (result) {
console.log(result)
//var results = document.getElementById('results');
//results.appendChild(document.createTextNode('File uploaded!'));
}).catch(function (error) {
console.error(error);
});
}
},
The session id was missing in the code.
The response object was updated in a new version of the sdk and the example code doesn't work anymore:
https://github.com/dropbox/dropbox-sdk-js/blob/master/UPGRADING.md#4-updating-the-response-object
The fix is changing this line:
.then(response => response.result.session_id)
Here is a link to a thread on github with the same issue:
https://github.com/dropbox/dropbox-sdk-js/issues/351
Related
I am tring to download around 20.000 images from a website by scraping it.
(I am authorized by the owner)
The path of the image is composed like this:
VolumeId/ChapterId/PageId.jpg
There are around 100 volumes, and every volume has x chapters, and every chapter has y pages.
In the database I have stored for every volume the number of chapters, but i don't have
the number of pages, so I have to navigate to the image url and check if it exists.
I know for sure that there are less than 30 pages per chapter so I did something like this:
let exists = true;
for (let i = 0; i < 30 && exists; i++) {
fetch(`imgUrl/${i}.jpg`)
.then(data => {
if (data.ok)
return data.arrayBuffer();
else
exists = false;
.then(arrayBuffer => {
if (exists) {
let buffer = Buffer.from(arrayBuffer );
if (!fs.existsSync(path.join(__dirname, imgPath))) {
fs.mkdirSync(path.join(__dirname, imgPath), {
recursive: true,
});
}
fs.writeFile(
path.join(__dirname, imgPath + "/" + img + ".jpg"),
buffer,
(err) => {
if (err) throw err;
}
);
}
});
}
The problem:
The problem is that the loop does not wait for the image to be fetched and saved locally.
I have tried with async/await and promises (I think I have implemented them wrong)
Is there a better way to download a large quantity of data? Maybe with streams?
It can be a little bit complicated to implement your code with just async/await and at the same time assure the "exists" condition between iterations, I suggest you use a class that implements an async iterator, refer to the official documentation for more details, the following code achieve what you are looking for (note: the code snippet you provided didn't show where "imgPath" is coming from so just fix my code accordingly) :
class FetchImages {
constructor(urls) {
this.urls = urls;
this.index = 0;
}
[Symbol.asyncIterator]() {
const urlsIterator = this.urls[Symbol.iterator]();
return {
async next() {
if (++index == 30) {
return {
done: true
};
}
const iteratorResult = urlsIterator.next();
if (iteratorResult.done) {
return { done: true };
}
const url = iteratorResult.value;
try {
let response = await fetch(url);
let data;
if (response.status == 200) {
data = await response.arrayBuffer();
} else {
// equivalent to exists == false, exit condition of the iterator
return {
done: true
};
};
let buffer = Buffer.from(data);
if (!fs.existsSync(path.join(__dirname, imgPath))) {
fs.mkdirSync(path.join(__dirname, imgPath), {
recursive: true,
});
}
fs.writeFileSync(
path.join(__dirname, imgPath),
buffer,
);
return {
done: false,
value: imgPath
};
} catch (err) {
return {
done: false,
value: err.message
};
}
}
}
}
}
(async function () {
const fetchImages = new FetchImages(urls);
for await (const fetchImage of fetchImages) {
// check status of each fetch
console.log(fetchImage);
}
})();
I am implementing a multifile upload component that shows the upload progress percentage of each file using axios in Vue.js.
But the "onUploadProgress" in axios seems to return the overall progress, not the progress of each file. So the progress of the first file is not displayed.
The total value of the ProgressEvent object matches the sum of the sizes of the two files in the image below.
[Code]
let formData = new FormData();
payload.data.files.forEach((data) => {
formData.append('files[]', data);
});
...
let response = await axios.post(url, formData, {
headers: {
'Content-Type': 'multipart/form-data'
},
onUploadProgress: (progressEvent) => {
let percentage = parseInt(Math.round((progressEvent.loaded / progressEvent.total) * 100));
let files = store.state.uploadFiles.map((value, index, row) => {
if(index === row.length - 1) {
value.uploadPercentage = percentage;
}
return value;
});
store.commit(Constant.UPLOAD_FILES, { files: files });
}
});
if (response.data.success) {
let files = store.state.uploadFiles.map((value, index, row) => {
if(index === row.length - 1) {
value.uploadPercentage = 100;
}
return value;
});
store.commit(Constant.UPLOAD_FILES, { files: files });
}
Looping through a single file upload request is the solution I've been looking into.
I wonder if there is any other way at all.
Please Help. Any ideas would be greatly appreciated.
I am trying to upload an image to firebase and then produce 2 thumbnails. I am able to do this with no problems. My current road block is when I write the urls to the realtime database, I am always getting the same url as the initial upload.
For example:
1st upload I get my uploaded image with the two proper thumbnails for the image
2nd upload I get my uploaded image with the two previous thumbnails (first image)
3rd upload I get my uploaded image with the first images thumbnails...
...this continues to reproduce the urls for the first upload
In my storage the correct thumbnails are being generated, but the urls are always for the first upload?
I don't know if this is a problem with the getSignedUrl() or not, really not sure whats going on here.
Here is my cloud function:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket); // The Storage object.
// console.log(object);
console.log(object.name);
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Sounrce fileName
await bucket.file(filePath).download({
destination: tmpFilePath
});
//3. resize the images and define an array of upload promises
const sizes = [64, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
//Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
//upload to gcs
return bucket.upload(thumbPath, {
destination: join(bucketDir, thumbName),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
// console.log(data)
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}).then((response) => {
const url = response[0];
if (size === 64) {
// console.log('generated 64');
return admin.database().ref('profileThumbs').child(fileName).set({ thumb: url });
} else {
// console.log('generated 128');
return admin.database().ref('categories').child(fileName).child('thumb').set(url);
}
})
.catch(function (error) {
console.error(err);
return;
});
})
});
//4. Run the upload operations
await Promise.all(uploadPromises);
//5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
})
Cleaned up my code and solved my problem, here is how I generated the urls and passed them to the proper URLs by accessing the users UID and postId in the file path:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const userUid = filePath.split('/')[2];
const sizes = [64, 256];
const bucketDir = dirname(filePath);
console.log(userUid);
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(tmpdir(), fileName);
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
sizes.map(size => {
const newFileName = `thumb#${size}_${fileName}.png`
const newFileTemp = path.join(tmpdir(), newFileName);
const newFilePath = `thumbs/${newFileName}`
return sharp(tempFilePath)
.resize(size, size)
.toFile(newFileTemp, () => {
return bucket.upload(newFileTemp, {
destination: join(bucketDir, newFilePath),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
console.log(data)
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}, function(err, url) {
console.log(url);
if (err) {
console.error(err);
return;
}
if (size === 64) {
return admin.database().ref('profileThumbs').child(userUid).child(fileName).set({ thumb: url });
} else {
return admin.database().ref('categories').child(fileName).child('thumb').set(url);
}
})
})
})
})
}).catch(error =>{
console.log(error);
});
})
I am trying to resize the width and height of input Stream-image from the user to the server with sharp function but nothing happens with the image. It keeps on his original size, How should I use the sharp function so that I can get the image smaller or bigger?
Please help me
This is how my code looks like:
'use strict';
const builder = require('botbuilder');
const restify = require('restify');
const utils = require('./utils.js');
const request = require('request');
const sharp = require('sharp');
const fs = require('fs');
const resizeImage = require('resize-image');
// Create chat connector for communicating with the Bot Framework Service
const connector = new builder.ChatConnector({
appId: process.env.MICROSOFT_APP_ID,
appPassword: process.env.MICROSOFT_APP_PASSWORD
});
// Setup Restify Server
const server = restify.createServer();
server.listen(process.env.port || process.env.PORT || 3978, () => {
console.log(`${server.name} listening to ${server.url}`);
});
// Listen for messages from users
server.post('/api/messages', connector.listen());
const bot = new builder.UniversalBot(connector);
// default dialog
//resize the images
//Sends greeting message when the bot is first added to a conversation
bot.on('conversationUpdate', function (message) {
if (message.membersAdded) {
message.membersAdded.forEach(function (identity) {
if (identity.id === message.address.bot.id) {
var reply = new builder.Message()
.address(message.address)
.text('Hi, please send a screenshot for the error');
bot.send(reply);
}
});
}
}
);
bot.dialog('/', function(session) {
if(utils.hasImageAttachment(session)){
//--others
var stream = utils.getImageStreamFromMessage(session.message);
***//resize the image stream
sharp('stream')
.resize(100, 100)
.toFile('stream', function(err) {
// output.jpg is a 200 pixels wide and 200 pixels high image
// containing a scaled and cropped version of input.jpg
});
//***
const params = {
'language': 'en',
'detectOrientation': 'true',
};
const options = {
uri: uriBase,
qs: params,
body: stream ,
headers: {
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key' : subscriptionKey
}
};
request.post(options, (error, response, body) => {
if (error) {
console.log('Error: ', error);
return;
}
const obj = JSON.parse(body);
console.log(obj);
//------------ get the texts from json as string
if(obj.regions =="" ){
session.send('OOOOPS I CANNOT READ ANYTHING IN THISE IMAGE :(');
}else{
let buf = ''
if(obj && obj.regions) {
obj.regions.forEach((a, b, c) => {
if(a && a.lines) {
a.lines.forEach((p, q, r) => {
if(p && p.words) {
p.words.forEach((x, y, z) => {
buf += ` ${x.text} `
})
}
})
}
})
}
session.send(buf);
}
});
} else {
session.send('nothing');
}
});
Thanks
According the doc of Sharp of function toFile(), this function returns a promise when no callback is provided.
So there should be no I/O block when excusing toFile function, and continue runing the following code which is request.post in your code snippet. At that time, the image may not be modified.
You can try to either use promise style code flow, like:
sharp('stream')
.resize(100, 100)
.toFile('stream')
.then((err,info)=>{
//do request post
})
or put the request code inside the callback function of toFile(), like:
sharp('stream')
.resize(100, 100)
.toFile('stream',function(err,info)=>{
//do request post
})
Your use of sharp('stream') doesn't work because the function is looking for a string as its input and you are trying to feed it a stream. Per the docs, you need to read from the readableStream and then process the image.
The example below I tested (locally) and runs. As is, it will save the image file on the server in the location of the app.js file. The commented-out ".pipe(stream)" creates a writeableStream you could then access at a later point if that is what you need. In that case, you wouldn't use .toFile().
Hope of help!
bot.dialog('/', function (session) {
if (utils.hasImageAttachment(session)) {
//--others
var stream = utils.getImageStreamFromMessage(session.message);
var transformer = sharp()
.resize(100)
.jpeg()
.toFile('image.jpg', function (err) {
if (err)
console.log(err);
})
.on('info', function (err, info) {
session.send('Image height is ' + info.height);
});
stream.pipe(transformer); //.pipe(stream);
const params = {
'language': 'en',
'detectOrientation': 'true',
};
const options = {
uri: "https://smba.trafficmanager.net/apis",
qs: params,
body: stream,
headers: {
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key': ""
}
};
request.post(options, (error, response, body) => {
if (error) {
console.log('Error: ', error);
return;
}
console.log(body);
const obj = JSON.stringify(body);
console.log(body);
//------------ get the texts from json as string
if (obj.regions == "") {
session.send('OOOOPS I CANNOT READ ANYTHING IN THISE IMAGE :(');
} else {
let buf = ''
if (obj && obj.regions) {
obj.regions.forEach((a, b, c) => {
if (a && a.lines) {
a.lines.forEach((p, q, r) => {
if (p && p.words) {
p.words.forEach((x, y, z) => {
buf += ` ${x.text} `
})
}
})
}
})
}
session.send(buf);
}
});
} else {
session.send('nothing');
}
});
I have used Sharp in the below way in my case which works perfectly fine.
sharp('stream')
.png()
.resize(100, 100)
.toBuffer((err, buffer, info) => {
if (err)
console.log(err);
if (buffer) {
return buffer;
}
});
Sharp's toFile() saves the output in a file, so you could give a file name as an argument. toBuffer() will return a buffered object.
Hope it helps!
I can upload a single image converted to a blob to firebase 3 successfully. However, when I try to upload multiple images to firebase 3 from my ionic 1 app, it fails.
The console logs that the data was successfully uploaded. I cannot see it in the firebase storage UI. I only see the first image I selected.
This is the code that gets the images:
$scope.getImages = function () {
var options = {
maximumImagesCount: 10,
width: 1000,
height: 1000,
quality: 100
};
$cordovaImagePicker.getPictures(options)
.then(function (results) {
for (var i = 0; i < results.length; i++) {
$scope.selectedImages.push(results[i]);
var fileName = results[i].replace(/^.*[\\\/]/, '');
// the image storage path is different on android
var path = '';
if ($ionicPlatform.is("android")) {
path = cordova.file.cacheDirectory;
}
else {
path = cordova.file.tempDirectory;
} // end of android image rectification
$cordovaFile.readAsArrayBuffer(path, fileName)
.then(function (realImage) {
var imageBlob = new Blob([realImage], { type: "image/jpeg" });
imgUpload(imageBlob, fileName)
})
}
}, function (error) {
// error getting photos
console.log(error.name);
})
Below is the code for firebase service
function imgUpload(_imgBlob, _filename) {
var uploadsMetadata = {
cacheControl: "max-age=" + (60 * 60 * 24 * 365) // One year of seconds
};
//create the storage reference and use it to access
var storeRef = firebase.storage().ref();
var uploadTask = storeRef.child('images/' + _filename).put(_imgBlob, uploadsMetadata);
return new Promise(function (resolve, reject) {
uploadTask.on('state_changed', function (snap) {
console.log('Progress: ', snap.bytesTransferred, '/', snap.totalBytes, ' bytes');
}, function (err) {
console.log('upload error', err);
reject(err);
}, function () {
var metadata = uploadTask.snapshot.metadata;
var key = metadata.md5Hash.replace(/\//g, ':');
var fileRecord = {
downloadURL: uploadTask.snapshot.downloadURL,
key: key,
metadata: {
fullPath: metadata.fullPath,
md5Hash: metadata.md5Hash,
name: metadata.name
}
};
// uploadsRef.child(key).set(fileRecord).then(resolve, reject);
});
}); // end of Promise
// return snapshot;
} // end of function imgUpload
[Edited 2/15/2017]
Padrian, without knowing what your specific error(s) were in the code above I can only assume that your issue(s) are the same as what was dealing with, namely that the metadata.md5Hash was failing since the metadata wasn't defined. My code and your code are nearly identical barring the UI framework differences.
My first refactoring to remove the error was to removed the listening on events and went with just having a callback on the .put() like so:
storageRef.child(uploadFile.name).put(uploadFile).then(cb(snap)).catch(errCB(err))
I further refactored my code and just as mysteriously as there was an issue, there was no longer an issue. Below is my full code for processing the upload file. I placed the code inside an async.queue so I could limit the file uploads to 4 files at a time.
const q = async.queue(function (file, callback) {
let reader = new window.FileReader()
reader.onload = function (e) {
const tags = ExifReader.load(e.target.result)
if (tags['Orientation'].description === 'left-bottom') {
file.rotation = 'rotate(-90deg)'
}
}
reader.readAsArrayBuffer(file.file.slice(0, 128 * 1024))
let uploadTask = storageRef.child(file.file.name).put(file.file, uploadsMetadata)
file.uploadSuccess = false
file.uploadError = false
file.active = 'active'
uploadTask.on('state_changed',
function (snap) {
file.progress = snap.bytesTransferred / snap.totalBytes * 100
},
function (err) {
file.uploadError = true
file.errorMessage = err
callback(err)
},
function () {
let metadata = uploadTask.snapshot.metadata
let key = metadata.md5Hash.replace(/\//g, ':')
let pendingInventoryRecord = {
downloadURL: uploadTask.snapshot.downloadURL,
key: key,
metadata: {
fullPath: metadata.fullPath,
md5Hash: metadata.md5Hash,
name: metadata.name
},
style: file.invStyle,
size: file.invSize,
count: file.invCount,
rotate: file.rotation || ''
}
uploadRef.child(key).set(pendingInventoryRecord)
.then(function () {
pendingInventoryCountRef.child('counter').transaction(function (currentVal) {
return (currentVal || 0) + 1
})
callback(null, file)
})
.catch(function (err) { console.log(err) })
})
}, 4)