I'm using ng-file-upload directive to send an image to server from my angular application. Here is my code:
Upload.upload({
url: 'http://localhost:5000/upload',
data: { file: blobFile },
resumeChunkSize: 10000,
}).then(function (resp) { //upload function returns a promise
console.log('resp: ', resp);
});
Image is being transferred in chunks. But now, I'm struck here. I don't know how to receive these chunks and merge to create a full image. My server code is as follows:
handler: function (req, res) {
var size = 0;
req.on('data', function (data) {
size += data.length;
console.log('Got chunk: ' + data.length + ' total: ' + size);
});
req.on('end', function () {
console.log("total size = " + size);
res.send("response");
});
req.on('error', function (e) {
console.log("ERROR ERROR: " + e.message);
});
}
Every time, I receive a chunk request, req.on('end', ...) triggers. I'm a newbie so confused here.
Yes.. a chunked image isn't so simple to upload...
Here a solution i use:
var request = require('request');
var fs = require('fs');
function uploadChunkedImage(url, callback){
// request options
var options = {
url: url, // the url of the image
method: 'GET'
};
// array of chunks
var chunks = [];
// request
request(options, function (err, res, body) {
console.log('END')
// body is corrupted, you can't use it for images... :-(
// so, we will callback the concactened buffer of it instead
// concact chunk buffers
chunks = Buffer.concat(chunks); // image buffer that you can use
// callback the result
callback(err, res, chunks);
}).on('response', function (response) {
response.on('data', function (chunk) {
// collect chunk buffer
chunks.push(chunk);
});
});
}
uploadChunkedImage('http://localhost:5000/upload', function(err, res, buffer){
if(!err){
// save
fs.writeFile('./myDirPath/image.jpg', buffer);
}else{
console.log(err);
}
});
Don't forget to install request and fs in your project with npm, Buffer is native
npm install request --save
npm install fs --save
for more informations:
request repository
fs documentation
You could do the same trick with ng-file-upload, there is a good example right here, else i suggest to try something like below (not tested):
handler: function (req, res) {
// array of chunks
var chunks= [];
req.on('data', function (data) {
// collect
chunks.push(data);
console.log('Got chunk: ' + data.length);
});
req.on('end', function () {
// concact
chunks = Buffer.concat(chunks);
console.log("Total size = " + chunks.length);
// res.send() is deprecated, use res.write instead
res.write(chunks,'binary');
res.end(null, 'binary');
});
req.on('error', function (e) {
console.log("ERROR ERROR: " + e.message);
});
}
Hope that will help
Related
Im following a article about http requests to nasa's pic of the day. I'm trying to display the JSON object in browser from my server. But all Node.js' examples outputs the api results to a server's console. is it possible to have my server save/forward the response to the browser? I'd like to understand the native http module before relying on any dependencies. Also I'm not sure if it makes a difference but I'm using express to create my server. anything will help even a high level explanation because I'm so confused.
const https = require('https');
app.get('/', (req, res) => {
var url = 'https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY';
var nasa_obj
var request = https.get(url, function (resp) {
var body = '';
resp.on('data', function (chunk) {
body += chunk;
});
resp.on('end', function () {
nasa_obj = JSON.parse(body);
console.log("Got a response: ", nasa_obj);
res.send(nasa_obj)
});
}).on('error', function (e) {
console.log("Got an error: ", e);
});
request.end()
})
UPDATED: CODE IS CORRECT
You only want to send the response once it has been returned to you:
const https = require('https');
app.get('/', (req, res, next) => {
var url = 'https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY';
var nasa_obj
var request = https.get(url, function (response) {
var body = '';
response.on('data', function (chunk) {
body += chunk;
});
response.on('end', function () {
console.log("Got a response: ", body);
res.send(body);
});
}).on('error', function (e) {
console.log("Got an error: ", e);
next(e); // Pass error to error handling middleware
});
request.end()
})
Also make sure you are properly handling errors. Either send back a response to the browser to, as the code above is doing, pass it on to error handling middleware.
So in my react front-end, I am using the 'react-drop-to-upload' module to allow the user to drag a file and upload. I followed the example on the npm module page and created a handler called handleDrop. The code looks like:
handleDrop(files) {
var data = new FormData();
alert((files[0]) instanceof File);
files.forEach((file, index) => {
data.append('file' + index, file);
});
fetch('/file_upload', {
method: 'POST',
body: data
});
}
At my express backend, I have the following code:
app.post('/file_upload', function(req , res){
var body = '';
req.on('data', function (data) {
body += data;
});
var post = "";
req.on('end', function () {
//post = qs.parse(body);
console.log(body);
// this won't create a buffer for me
//var fileBuffer = new Buffer(body);
//console.log(fileBuffer.toString('ascii'));
//pdfText(body, function(err, chunks) {
//console.log(chunks);
//});
});
//console.log(typeof post);
});
If I drop a txt file and do a console log on the body, it would give me:
------WebKitFormBoundaryqlp9eomS0BxhFJkQ
Content-Disposition: form-data; name="file0"; filename="lec16.txt"
Content-Type: text/plain
The content of my data!
------WebKitFormBoundaryqlp9eomS0BxhFJkQ--
I am trying to use the pdfText module which takes in a buffer or a pathname to the pdf file, extract text from it into an array of text 'chunks' . I want to convert the body object into a buffer using var fileBuffer = new Buffer(body); but that won't work. Can someone help me with this? Thanks!
You need a parser for multi-part data. You can look into multer regarding that.
Example code for you,
app.post('/file_upload', function(req , res){
var storage = multer.diskStorage({
destination: tmpUploadsPath
});
var upload = multer({
storage: storage
}).any();
upload(req, res, function(err) {
if (err) {
console.log(err);
return res.end('Error');
} else {
console.log(req.body);
req.files.forEach(function(item) {
// console.log(item);
// do something with the item,
const data = fs.readFileSync(item.path);
console.log(data);
});
res.end('File uploaded');
}
});
});
To understand the example code in depth, head here. Remember, you will get the file data as a buffer and not as actual data.
I want to do a very simple task, yet I am stuck!
The scenario is like this:
After a get request to my api, I want to http.get from some external site, and then send back the response from this external site to the original api request.
Obviously, the calls are asychronous so the string loremParagraph, doesn't load correctly before sending it back to the api.
Also I get the error: Error: Can't set headers after they are sent.
Here is my code:
module.exports = function(app, express) {
var myLoremRouter = express.Router();
var loremParagraph = '';
//HTTP GET accessed at localhost:8081/mylorem
myLoremRouter.get('/', function(req, res) {
// Fetch one paragpraphlorem ipsum text from http://www.faux-texte.com/text-random-1.htm
http.get("http://www.faux-texte.com/text-random-1.html", function(resp) {
resp.on('data', function(chunk) {
// console.log('BODY: ' + chunk);
var $ = cheerio.load(chunk);
loremParagraph = $('div.Texte').text();
console.log(loremParagraph);
// console.log(resp.status);
});
})
// If any error has occured, log error to console
.on('error', function(e) {
console.log("Got error: " + e.message);
});
//Finally send the result back to the api call
res.json({ message: loremParagraph });
});
return myLoremRouter;
};
Try this. Here chunks are added till we are ready to use the complete data.
https://stackoverflow.com/a/21953201/6219247
myLoremRouter.get('/', function(req, res) {
var body = '';
http.get({
host: 'www.faux-texte.com',
port: 80,
path: '/text-random-1.html'
}, function(resp) {
resp.on('data', function(chunk) {
body += chunk;
});
resp.on('end', function(chunk) {
var $ = cheerio.load(body);
loremParagraph = $('div.Texte').text();
res.json({ message: loremParagraph });
});
})
.on('error', function(e) {
// handle/send error
res.send(/*...*/);
});
});
I would like to download (not clone) archive from my GitLab repository, but I get this error
incorrect header check (Zlib._handle.onerror)
This is my function:
var fs = require('fs');
var url = require('url');
var https = require('https');
var path = require('path');
var targz = require('tar.gz');
function downloadFile(source, destination, name) {
var options = {
host: url.parse(source).host,
port: 443,
path: url.parse(source).pathname
};
var file = fs.createWriteStream(destination + path.sep + name);
https.get(options, function(res) {
res.on('data', function(data) {
file.write(data);
}).on('end', function() {
file.end();
console.log('File ' + name + ' downloaded to ' + destination);
targz().extract(destination + '/' + name, destination)
.then(function(){
console.log('Job done!');
})
.catch(function(err){
console.log('Something is wrong ', err.stack);
});
});
});
}
The file which is download is type of tar.gz. I try to set some headers but unsuccessful. Source param is like: https://gitlab.com/api/v3/projects/:ID/repository/archive?token=XXYYZZ
Any help please?
The issue is that your file is not correctly downloaded by https module which result in extraction error from tar.gz module.
You can use request module coordinated with tar.gz with createWriteStream to pipe the extraction directly to the destination folder :
var request = require('request');
var targz = require('tar.gz');
function downloadFile(source, destination, cb) {
var read = request.get(source);
var write = targz().createWriteStream(destination);
read.pipe(write);
write.on('finish', function() {
cb(null);
});
write.on('error', function(err) {
cb(err);
});
}
var source = "https://gitlab.com/api/v3/projects/:ID/repository/archive?token=XXYYZZ";
var destination = "/home/user/some/dir";
downloadFile(source, destination, function(err) {
if (err) {
console.log('Something is wrong ', err.stack);
} else {
console.log('Job done!');
}
});
Note that, for the finish event to be dispatched you will need version 1.0.2 of tar.gz (see this issue) :
npm install tar.gz#1.0.2
I am using node.js to do some interactions with an API that returns gzipped data. I browsed through the package manager and the wiki for a good compression library but couldn't find one that hadn't been abandoned / didn't work at all. Any idea how I can either deflate the compressed data using javascript or node? (Or how to avoid the data all together?)
Here is what I have with comments:
app.get('/', function(req, res){
// rest is a restler instance
rest.get('http://api.stackoverflow.com/1.1/questions?type=jsontext', {
headers: {"Accept-Encoding": 'deflate'},
//tried deflate, gzip, etc. No changes
}).on('complete', function(data) {
// If I do: sys.puts(data); I get an exception
// Maybe I could do something like this:
/*
var child = exec("gunzip " + data,
function(error, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
*/
});
});
I used this one with success:
https://github.com/waveto/node-compress
this.get = function(options, cb){
http.get({host: 'api.stackoverflow.com', path:'/1.1/questions?' + querystring.stringify(vectorize(options || {}))}, function(res){
var body = [];
var gunzip = new compress.Gunzip();
gunzip.init();
res.setEncoding('binary');
res
.on('data', function(chunk){
body.push(gunzip.inflate(chunk, 'binary'));
})
.on('end', function(){
console.log(res.headers);
gunzip.end();
cb(null, JSON.parse(body.join('')));
});
}).on('error', function(e){
cb(e);
})
}