I am using the CollectionFS package with the S3 adapter and I've looked at a few different solutions but cannot get this to work right.
The problem: Even though the file/image is being uploaded to S3 successfully, the callback for a successful upload is triggered before it is safe to display the image. This causes a broken image to be displayed sometimes.
I found out about the fileObj.once("uploaded", function(){}) callback but it seems "uploaded" basically means sending the image to the server. The S3 uploading does not happen by then. A temporary workaround I found is to just to just have setTimeout for 3-4 seconds but this is not reliable.
Here is my upload code:
FS.Utility.eachFile(event, function(file) {
Session.set('profilePhotoUploaded', false);
var newFile = new FS.File(file);
newFile.metadata = {owner: Meteor.userId()};
ProfileImages.insert(newFile, function (err, fileObj) {
if (err){
console.log("error! - " + err);
} else {
// handle success depending what you need to do
var userId = Meteor.userId();
// This does NOT run when image is stored in S3. I think it runs when the image reached the app server.
fileObj.once("uploaded", function () {
// timeout of 3 seconds to make sure image is ready to be displayed
// --- This is not a good solution and it image does is not always ready
setTimeout(function(){
var uploadedImage = {
"profile.image.url": "/cfs/files/profileImages/" + fileObj._id
};
Meteor.users.update(userId, {$set: uploadedImage});
Session.set('profilePhotoUploaded', true);
}, 3000);
console.log("Done uploading!");
});
}
});
});
Is there a different callback to check if the image has actually been stored in S3? I tried fileObj.once("stored", function(){}) but that does not work.
The issue is that the stored hook will fire when the original image is saved on the server, so if you're creating multiple copies (thumbnails) this hook will fire before your thumbnails are stored. You can check which version of the thumbnail was stored by checking the storeName argument. In the server side file, where you define the ProfileImages collection add the following code, replacing 'profilePhotoLarge' with the name assigned to your FS.Store.S3 store:
ProfileImages.on('stored', Meteor.bindEnvironment(function(fileObj, storeName) {
if (storeName === 'profilePhotoLarge') {
Meteor.users.update({_id: fileObj.metadata.owner}, {
$set: {
'profile.image.url': 'https://your AWS region domain/your bucket name/your folder path/' + fileObj._id + '-' +fileObj.name()
}
});
}
}, function() { console.log('Failed to bind environment'); }));
For profile photos I created an S3 bucket and set the permissions to allow anyone to read the files, so I'm storing the URL to the image on S3, which may not be correct in your case. Since the user object is reactive on the client side this update will cause the profile photo to update automatically.
I found that fileObj.hasStored("profileImages") specifies exactly when the image is stored on S3. So after starting the upload process, I just start a timer every 1 second to check if it is saved. This might not be the best solution but this is what worked for me.
FS.Utility.eachFile(event, function(file) {
Session.set('profilePhotoUploaded', false);
var newFile = new FS.File(file);
newFile.metadata = {owner: Meteor.userId()}; // TODO: check in deny that id is of the same user
ProfileImages.insert(newFile, function (err, fileObj) {
if (err){
console.log("error! - " + err);
} else {
// handle success depending what you need to do
var userId = Meteor.userId();
// Timer every 1 second
var intervalHandle = Meteor.setInterval(function () {
console.log("Inside interval");
if (fileObj.hasStored("profileImages")) {
// File has been uploaded and stored. Can safely display it on the page.
var uploadedImage = {
"profile.image.url": "/cfs/files/profileImages/" + fileObj._id
};
Meteor.users.update(userId, {$set: uploadedImage});
Session.set('profilePhotoUploaded', true);
// file has stored, close out interval
Meteor.clearInterval(intervalHandle);
}
}, 1000);
}
});
});
Related
When I hit upload the picture gets uploaded to firebase Storage, but i cant seem to get the image url, I can go to the Storage tab on firebase click on the image and click on its ling , copy the link in and then past it into the DATABASE were i need it for display and it will work, But i dont see an image URL or A download URL when i consolo.log the snapshot of the const TASK!
I fear that the method:
const task = uploadBytesResumable(storeref, ImSRC, metdata)
that iam using to upload the image does not produce an image url! Could This be So?
!!!HERE IS ALL THE CODE FOR THE UPLOAD BUTTON !!
Upload.addEventListener('click', (e) =>{
let ImSRC = files[0];
if(ImSRC == null ){
alert('no picture selected');
}else{
const metdata = {
contentType: ImSRC.type
}
const storeref = sRef(storage,"UsersProPic/" + cUserID);
const task = uploadBytesResumable(storeref, ImSRC, metdata).then((snapshot)=>{
console.log(snapshot);
function getData(){
snapshot.getDownloadURL().then(function(url){
ProPicUrl = url;
})
}
console.log(ProPicUrl);
});
}
});
Getting the download URL from Firebase Storage is (like uploading the data itself) an asynchronous operation. Just like any code that needs to run after the upload has completed needs to be inside the then() block for that task, the code that needs to run after the download URL has been determined has to be inside the then() block for that task too.
So:
const storeref = sRef(storage,"UsersProPic/" + cUserID);
const task = uploadBytesResumable(storeref, ImSRC, metdata).then((snapshot)=>{
console.log(snapshot);
function getData(){
snapshot.getDownloadURL().then(function(url){
ProPicUrl = url;
console.log(ProPicUrl);
})
}
});
on my project I need to unzip a file and upload its content to a nodeJS server. I am able to unzip the file, e.g. I get the filenames of the content etc.
After unzipping I want to upload the content to my server. This is where the problem is. I am using formData to transmit the file. But every time I try to upload the files it shows me following error on the console in firefox:
"TypeError: Argument 2 of FormData.append does not implement interface Blob."
So it seems that there is no file-object given to the formData...
For unzipping I use JSZip [link to JSZip][1]
EDIT
I figured the first problem out myself. Now I am having trouble executing the code in the right order. My JS Code:
$("#file").on("change", function (evt) {
var formData = new FormData();
var counter = 0;
// remove content
$result.html("");
// be sure to show the results
$("#result_block").removeClass("hidden").addClass("show");
// Closure to capture the file information.
console.log('1');
function handleFile(f) {
var $fileContent = $("<ul>");
$result.append($fileContent);
JSZip.loadAsync(f)
.then(function (zip) {
zip.forEach(function (relativePath, zipEntry) {
console.log('2');
console.log(zipEntry);
zip.file(zipEntry.name).async("blob").then(function (u8) {
console.log('3');
console.log(u8);
formData.append('photo', u8, counter);
counter++;
$fileContent.append($("<li>", {
text: zipEntry.name
}));
})
});
}, function (e) {
$result.append($("<div>", {
"class": "alert alert-danger",
text: "Error reading " + f.name + ": " + e.message
}));
});
}
var files = evt.target.files;
handleFile(files[0]);
});
The console output is:
1
2
object
2
object
3
blob
3
blob
But what I want is:
1
2
object
3
blob
2
object
3
blob
But i dont know how to put another .then to make this work the way I want to
I have written a function to upload a local file (using cordova File) to my firebase storage. It looks like there is no API to iterate all the files in storage (based on other SO threads), so I decided to write the download location to my realtime DB.
At the moment, authentication is OFF in my realtime DB and storage for testing. The file is uploaded correctly and I can see it in my storage, but I don't see any DB entry. Can someone help on what is going wrong?
// upload trip data to firebase. currently a public bucket
cloudUpload(prg) {
console.log ("cloud upload");
//this.presentLoader("loading...");
let storageRef = firebase.storage().ref();
console.log ("storage ref is "+storageRef);
this.file.readAsArrayBuffer(this.file.dataDirectory, this.logFile)
.then (succ=>{
console.log ("File read");
console.log (succ);
let blob = new Blob([succ],{type:"text/plain"});
console.log ("Blob created");
let name = "file-"+Date()+".txt";
let uploadUrl = storageRef.child(`tripdata/${name}`);
let uploadTask = uploadUrl.put(blob);
uploadTask.on(firebase.storage.TaskEvent.STATE_CHANGED,
(snapshot) => {
let progress = Math.round((snapshot.bytesTransferred / snapshot.totalBytes) * 100);
prg.val = progress;
},
(error) => {
console.log ("Firebase put error "+error);
setTimeout(()=>{prg.val = -1; },500);
this.presentToast("upload error","error") },
() => { prg.val = 100;
setTimeout(()=>{prg.val = -1; },500);
// write download URL to realtime DB so we can iter it later
// there is no API in storage today to iterate
let downloadURL = uploadTask.snapshot.downloadURL;
console.log ("Download url is "+downloadURL);
let key = 'tripDataIndex/'+name;
console.log ("key="+key);
firebase.database().ref(key)
.set ({'url':downloadURL, 'uploadedon':Date()}); // nothing created
.catch (err=> {console.log ("ERROR "+err);this.presentToast("error creating index","error")})
this.presentToast("upload complete")}
)
})
.catch (err=>{console.log ("Cordova Read Error "+err);})
}
It seems the problem was in the key value. My "name" was
let name = "file-"+Date()+".txt";
Date() includes spaces/parenthesis etc, which conflict with the naming rules -- using a different key name worked perfectly!
What is very odd, however, is it did not throw an error. I added a .catch at the end like Frank suggested, but it never went to the catch handler.
I'm using the below code to store image to pouchdb, result.base_64 contains the blob. When I inspect using PouchDB Inspector it shows db size as zero bite and on clicking on the attachment it shows file not found
var db = new PouchDB('xyz');
db.putAttachment('skul', 'skul', result.base_64, 'image/jpg').then(function() {
return db.get('skul', {
attachments: true
});
}).then(function(doc) {
console.log(doc);
});
tried this for getting attachment
db.getAttachment('skul', 'skul' function(err, blob_buffer) {
if (err) {
return console.log(err);
} else {
// console.log(blob_buffer);
var url = URL.createObjectURL(blob_buffer);
var img = document.createElement('img');
img.src = url;
document.body.appendChild(img);
}
})
this displays the image on browser but url of the image is
src="blob:http://localhost:8000/d1388aaa-f2c8-45ae-af39-e2b384e25c7c"
which is referring to the server not local machine
Did you verify that result.base_64 isn't an empty string or that it's valid base64?
We have live working examples in the docs showing how to insert base64 data. You can run these examples in your browser and confirm that they work. :) https://pouchdb.com/guides/attachments.html
I am running node.js on raspbian and trying to save/update a file every 2/3 seconds using the following code:
var saveFileSaving = false;
function loop() {
mainLoop = setTimeout(function() {
// update data
saveSaveFile(data, function() {
//console.log("Saved data to file");
loop();
});
}, 1500);
}
function saveSaveFile(data, callback) {
if(!saveFileSaving) {
saveFileSaving = true;
var wstream = fs.createWriteStream(path.join(__dirname, 'save.json'));
wstream.on('finish', function () {
saveFileSaving = false;
callback(data);
});
wstream.on('error', function (error) {
console.log(error);
saveFileSaving = false;
wstream.end();
callback(null);
});
wstream.write(JSON.stringify(data));
wstream.end();
} else {
callback(null);
}
}
When I run this it works fine for an hour then starts spitting out:
[25/May/2016 11:3:4 am] { [Error: EROFS, open '<path to file>']
errno: 56,
code: 'EROFS',
path: '<path to file>' }
I have tried jsonfile plugin which also sends out a similiar write error after an hour.
I have tried both fileSystem.writeFile and fileSystem.writeFileSync both give the same error after an hour.
I was thinking it had to do with the handler not being let go before a new save occurs which is why I started using the saveFileSaving flag.
Resetting the system via hard reset fixes the issue (soft reset does not work as the system seems to be locked up).
Any suggestions guys? I have searched the web and so only found one other question slightly similar from 4 years ago which was left in limbo.
Note: I am using the callback function from the code to continue with the main loop.
I was able to get this working by unlinking the file and saving the file every time I save while it is not pretty it works and shouldn't cause too much overhead.
I also added a backup solution which saves a backup every 5 minutes in case the save file has issues.
Thank you for everyone's help.
Here is my ideas:
1) Check free space when this problem happens by typing in terminal:
df -h
2) Also check if file is editable when problem occurs. with nano or vim and etc.
3) Your code too complicated for simply scheduling data manipulation and writing it to file. Because of even Your file will be busy (saveFileSaving) You will lose data until next iteration, try to use that code:
var
async = require('async'),
fs = require('fs'),
path = require('path');
async.forever(function(next) {
// some data manipulation
try {
fs.writeFileSync(path.join(__dirname, 'save.json'), JSON.stringify(data));
}
catch(ex) {
console.error('Error writing data to file:', ex);
}
setTimeout(next, 2000);
});
4) How about keeping file descriptor open?
var
async = require('async'),
fs = require('fs'),
path = require('path');
var file = fs.createWriteStream(path.join(__dirname, 'save.json'));
async.forever(function(next) {
// some data manipulation
file.write(JSON.stringify(data));
setTimeout(next, 2000);
});
var handleSignal = function (exc) {
// close file
file.end();
if(exc) {
console.log('STOPPING PROCESS BECAUSE OF:', exc);
}
process.exit(-1);
}
process.on('uncaughtException', handleSignal);
process.on('SIGHUP', handleSignal);
5) hardware or software problems (maybe because of OS drivers) with raspberry's storage controller.