Right now I create a new url based on rowid and stream the picture back using fs. Below is my code that works right now.
This saves the picture from the blob pulled from the database.
exports.picFileSave = function (name, data) {
try{
// Query the entry
stats = fs.lstatSync(name);
// Is it a directory?
if (stats.isFile()) {
//do nothing;
}
} catch (e) {
var wstream = fs.createWriteStream(name);
wstream.write(data);
wstream.end();
}
};
This function gets the picture filename and sends it back
exports.getLocBg = function (rowid, callback) {
var filename = "tmp/loc/" + rowid + ".png";
try{
// Query the entry
stats = fs.lstatSync(filename);
// Is it a directory?
if (stats.isFile()) {
callback(filename);
}
} catch (e) {
locationdb.location
.findOne({where: {id: rowid}})
.then(function (locations) {
var wstream = fs.createWriteStream(filename);
wstream.write(locations.locationBackground);
wstream.end();
callback(filename);
});
}
};
This is simply the route that connects the client to the server
app.get('/mobile/locations/locbg/:rowid', function (req, res) {
var options = {
root: "./",
dotfiles: 'deny',
headers: {
'x-timestamp': Date.now(),
'x-sent': true
}
};
var rowid = req.params.rowid;
location.getLocBg(rowid, function (callback) {
res.sendFile(callback, options, function (err) {
if (err) {
console.log(err);
res.status(err.status).end();
}
else {
console.log('Sent:', callback);
}
});
});
});
I want to be able to simply pull the blob from the database and send the picture back without writing a file and sending that back. How would I go about this?
Related
I'm working on node js app that is creating pdf by user id (using pdfkit), fills with user data and then uploads to Google Drive using Google Drive API. Client is getting back URL of this file. My problem is that when I upload it once, it works perfectly. But when I'm trying to do it again, the file uploads but it's empty. If I restart server it works again.
Pdf creation is also fine on the second attempt, the only problem is second create/upload. I have tried making the name of the file dynamic but that did not help. Only server restart works. Thank you.
Function for creating pdf
const createPdf = async (req, res, missions) => {
try {
const { _id } = req.user;
const pdfPath = path.join('data', 'pdf', _id + '.pdf');
let doc = new PDFDocument({ margin: 50 });
doc.pipe(fs.createWriteStream(pdfPath));
generateInnerPdf(doc, missions);
doc.end();
return { success: true };
} catch (err) {
return { success: false };
}
};
Function for upload on google drive and retrieve url
exports.uploads = (_id) => {
return new Promise((resolve) => {
const auth = new google.auth.JWT(
credentials.client_email,
null,
credentials.private_key,
scopes
);
const drive = google.drive({ version: 'v3', auth });
var folderId = 'xxxxxxxxxxx';
const pdfPath = path.join('data', 'pdf', _id + '.pdf');
var fileMetadata = {
name: `${_id}.pdf`,
parents: [folderId],
};
var media = {
mimeType: 'application/pdf',
body: fs.createReadStream(pdfPath),
};
drive.files.create(
{
resource: fileMetadata,
media: media,
fields: 'id',
},
(error, result) => {
resolve({
fileUrl: `https://drive.google.com/file/d/${result.data.id}/view?usp=sharing`,
});
}
);
});
};
My pdf controller
exports.create = async (req, res) => {
try {
const { missions } = await getMissions(req.user._id);
const { success } = await createPdf(req, res, missions);
if (success) {
googleApi.uploads(req.user._id).then((result) => {
res.status(200).send(result);
});
} else {
res.status(422).send(err);
}
} catch (err) {
console.log(err);
res.status(422).send(err.message);
}
};
EDIT: Should be a problem when I'm resolving promise again?
Fixed when setting timeout
if (success) {
setTimeout(function () {
googleApi.uploads(req.user._id).then((result) => {
res.status(200).send(result);
});
}, 500);
I am trying to get the name and created date of the files. In the code below it throws error when I call the api. It is reading the directory and printing all the file names but it's not sending back to callback. Any idea what is implemented wrong?
service.js
var fs = require('fs');
var path = require('path');
var async = require('async');
var currentDate = new Date();
var objToReturn = [];
var logsDirectory = './logs'
function readDirectory(env, callback) {
fs.readdir(logsDirectory + '/' + env, function(err, files) {
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + '/' + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
});
},
function(err) {
if (err) {
console.info('error', err);
return;
}
// when you're done reading all the files, do something...
console.log('before Callback', objToReturn);
callback(objToReturn);
});
}
exports.readDirectory = readDirectory;
app.js
var stDirectory = require('./app/serverfiles/stDir');
app.get('/getAllFiles',function(req,res){
var env = req.query.env
console.log('printing',env);
stDirectory.readDirectory(env,function(files){
res.json(files);
console.log('Api files',files);
});
});
There are a few issues:
instead of passing the "final" handler to async.eachSeries(), you're passing it to fs.readdir(), so callback will never get called;
you're declaring objToReturn outside of the function, which isn't a good idea because multiple requests could be handled in parallel;
you're not handling any errors properly;
you should really use the Node.js callback idiom of calling callbacks with two arguments, the first being errors (if there are any) and the second being the result of the asynchronous operation.
The code below should fix these issues:
function readDirectory(env, callback) {
let objToReturn = [];
fs.readdir(
logsDirectory + "/" + env,
function(err, files) {
if (err) return callback(err);
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + "/" + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
return done(err);
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
}, function(err) {
if (err) {
console.info("error", err);
return callback(err);
}
// when you're done reading all the files, do something...
console.log("before Callback", objToReturn);
callback(null, objToReturn);
}
);
}
// To call it:
stDirectory.readDirectory(env, function(err, files) {
if (err) {
res.sendStatus(500);
} else {
res.json(files);
console.log('Api files',files);
}
});
You should also consider using async.mapSeries() instead of async.eachSeries() and using a separate array (objToReturn).
I have a NodeJS script that calls the API for users, gets multiple data for each user and writes it all to local file. I am trying to upload that file to server once all of the data is written into the file. The problem is that the code that should upload the file gets executed before the file is entirely populated. The code is written below. I can't figure out how to make promise wait for first function to complete.
var fs = require('fs');
var server = require('some-server');
var service = require('./some-service.js');
var moment = require('moment-timezone');
var csvWriter = require('csv-write-stream');
var writer = csvWriter({
sendHeaders: false
});
var users = require('./some-users')
writer.pipe(fs.createWriteStream('myFile' + '.txt'))
service.login().then(function (response) {
users.forEach(function (user) {
service.getSpecificUser(user).then(function (response) {
var myUser = JSON.parse(response)
service.getDataForUser(user.Info).then(function (response) {
var userData = JSON.parse(response);
if (userData.IsValid) {
userData.AdditionalInfo.forEach(function (additionalInfo) {
service.getAdditionalInfo(myUser.Info, userData.data).then(function (response) {
//Collect additional info and combine final results to write into file
// write to output csv file
writer.write({
//write information that is of interest
})
}, function (error) {
console.log('error getting additional data', error);
})
}
)
}
}, function (error) {
console.log('error getting user data', error)
})
}, function (error) {
console.log('error', myUser, error)
})
});
}, function (error) {
console.log('not logged', response);
}).then(function () {
//perform uploading to server
var fpath = 'path of file that contains downloaded data'
console.log("Trying to upload to file: " +fpath)
service.UploadFile(fpath, function (error, result, response) {
if (!error) {
console.log("Uploaded " + name);
}
else {
console.log(error);
}
})
})
Any help would be appreciated.
You can substitute Promise.all(), Array.prototytpe.map() for .forEach(). The documentation for csv-write-steam appears to use .end() to complete call .write() at last .then().
service.login().then(function(response) {
return Promise.all(users.map(function(user) {
return service.getSpecificUser(user).then(function(response) {
var myUser = JSON.parse(response)
return service.getDataForUser(user.Info).then(function(response) {
var userData = JSON.parse(response);
if (userData.IsValid) {
return Promise.all(userData.AdditionalInfo.map(function(additionalInfo) {
return service.getAdditionalInfo(myUser.Info, userData.data).then(function(response) {
//Collect additional info and combine final results to write into file
// write to output csv file
writer.write({
//write information that is of interest
});
})
}))
}
})
})
}));
})
.then(function() {
writer.end();
//perform uploading to server
var fpath = 'path of file that contains downloaded data'
console.log("Trying to upload to file: " + fpath)
service.UploadFile(fpath, function(error, result, response) {
if (!error) {
console.log("Uploaded " + name);
} else {
console.log(error);
}
})
})
.catch(function(e) {
console.log(e)
})
I'm trying to write a piece of my upload code as a service, because I need that function overall in my software. My project use sails.js - here the doc for a service.
In a controller I got this code, which uploads a file and after success it calls the function saveTheCampaign() and saves the file information in the DB.
req.file('logo').upload({
maxBytes: 10000000,
saveAs: function (uploadFile, cb) {
cb(null, Date.now() + uploadFile.filename);
},
dirname: sails.config.appPath + '/assets/images/campaign/'
}, function (err, uploadedFiles) {
if (err) {
return res.json(500, err);
}
else if (uploadedFiles.length === 0) {
// proceed without files
res.json({ error: "No image found for upload!"})
}
else {
// Success: handle uploaded file
var fileName = uploadedFiles[0].fd.split('\\');
params["logo"] = fileName[fileName.length - 1];
sails.controllers.campaign.saveTheCampaign(params, req, res);
}
});
saveTheCampaign: function (params, req, res) { //...}
Now I wanted to write this snippet as a service. My service is called UploadService and has a function called upload(), services can take two(2) arguments, option and a callback function. So I tried this to call the upload function of my service:
UploadService.upload(options, sails.controllers.campaign.saveTheCampaign(params, req, res));
The problem is, the params of the callback function (params, req, res) are not known at the time of the call, I get them AFTER the upload function is finished. How can I handle this?
One way to make this happen by using Q Promise Library. The snippet below is a working example for the same. You'll need to set value for sails.config.appPath.
Routes.js
'POST /upload' : 'CampaignController.upload'
UploadService.js
let q = require("q"); // https://github.com/kriskowal/q
module.exports = {
upload: function(options) {
let deferred = q.defer();
options['req'].file(options['fileFieldName']).upload({
maxBytes: 10,
saveAs: function(uploadedFile, cb) {
cb(null, Date.now() + uploadedFile.filename);
},
dirname: sails.config.appPath + '/assets/images/'
}, function(err, uploadedFiles) {
if (err) {
deferred.reject(err);
} else if (uploadedFiles.length === 0) {
// proceed without files
deferred.reject("No image found for upload!");
} else {
// Success: handle uploaded file
let params = [];
var fileName = uploadedFiles[0].fd.split('\\');
params["logo"] = fileName[fileName.length - 1];
deferred.resolve(params)
}
});
return deferred.promise;
}
}
CampaignController.js
module.exports = {
upload: function(req, res) {
let options = [];
options['fileFieldName'] = 'logo';
options['req'] = req;
UploadService.upload(options)
.then((params) => {
sails.controllers.campaign.saveTheCampaign(params);
res.send("Campaign Saved Successfully");
})
.catch((err) => res.send(err))
},
saveTheCampaign: function(params) {
console.log(`campaign ${params['logo']} saved`);
}
}
I'm trying to bulk upload attachments to CouchDB using node.js and nano.
First, the walk module is used to find all files in upload folder and create array from them.
Next, each file from the array is supposed to be inserted into CouchDB via pipe and nano module.
However, the final result is that only one attachment has been uploaded.
var nano = require('nano')('http://localhost:5984')
var alice = nano.use('alice');
var fs = require('fs');
var walk = require('walk');
var files = [];
// Walker options
var walker = walk.walk('./uploads', {
followLinks: false
});
// find all files and add to array
walker.on('file', function (root, stat, next) {
files.push(root + '/' + stat.name);
next();
});
walker.on('end', function () {
// files array ["./uploads/2.jpg","./uploads/3.jpg","./uploads/1.jpg"]
files.forEach(function (file) {
//extract file name
fname = file.split("/")[2]
alice.get('rabbit', {revs_info: true}, function (err, body) {
fs.createReadStream(file).pipe(
alice.attachment.insert('rabbit', fname, null, 'image/jpeg', {
rev: body._rev
}, function (err, body) {
if (!err) console.log(body);
})
)
});
});
});
This is because you are mixing an asynchronous api with assumptions of this being synchronous.
After the first request you will get conflicts, cause the rabbit document has changed.
Can you confirm this using NANO_ENV=testing node yourapp.js?
I recommend using async if this is the problem
var nano = require('nano')('http://localhost:5984')
var alice = nano.use('alice');
var fs = require('fs');
var walk = require('walk');
var files = [];
// Walker options
var walker = walk.walk('./uploads', {
followLinks: false
});
walker.on('file', function (root, stat, next) {
files.push(root + '/' + stat.name);
next();
});
walker.on('end', function () {
series(files.shift());
});
function async(arg, callback) {
setTimeout(function () {callback(arg); }, 100);
}
function final() {console.log('Done');}
function series(item) {
if (item) {
async(item, function (result) {
fname = item.split("/")[2]
alice.get('rabbit', { revs_info: true }, function (err, body) {
if (!err) {
fs.createReadStream(item).pipe(
alice.attachment.insert('rabbit', fname, null, 'image/jpeg', {
rev: body._rev
}, function (err, body) {
if (!err) console.log(body);
})
)
}
});
return series(files.shift());
});
}
else {
return final();
}
}