Gunzip an API stream using javascript / node? - javascript

I am using node.js to do some interactions with an API that returns gzipped data. I browsed through the package manager and the wiki for a good compression library but couldn't find one that hadn't been abandoned / didn't work at all. Any idea how I can either deflate the compressed data using javascript or node? (Or how to avoid the data all together?)
Here is what I have with comments:
app.get('/', function(req, res){
// rest is a restler instance
rest.get('http://api.stackoverflow.com/1.1/questions?type=jsontext', {
headers: {"Accept-Encoding": 'deflate'},
//tried deflate, gzip, etc. No changes
}).on('complete', function(data) {
// If I do: sys.puts(data); I get an exception
// Maybe I could do something like this:
/*
var child = exec("gunzip " + data,
function(error, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
*/
});
});

I used this one with success:
https://github.com/waveto/node-compress
this.get = function(options, cb){
http.get({host: 'api.stackoverflow.com', path:'/1.1/questions?' + querystring.stringify(vectorize(options || {}))}, function(res){
var body = [];
var gunzip = new compress.Gunzip();
gunzip.init();
res.setEncoding('binary');
res
.on('data', function(chunk){
body.push(gunzip.inflate(chunk, 'binary'));
})
.on('end', function(){
console.log(res.headers);
gunzip.end();
cb(null, JSON.parse(body.join('')));
});
}).on('error', function(e){
cb(e);
})
}

Related

Issue calling a https REST service in Node

I am trying to call an internally hosted REST webservice from Node.js but I am getting "problem with request: unable to verify the first certificate"
The web service is https there is no http version
There is my code
var https = require('https');
const fs = require('fs');
var options = {
host: 'path.to.application.rest.service',
port: 443,
path: '/Function',
method: 'GET',
cert: fs.readFileSync('<Path to Cert>')
};
var req = https.request(options, function(res) {
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
req.end();
This is the certificate chain of the service
I have tried using all the different certs in the chain as the one in the options but they all fail with the same error. What am I doing wrong/missing?

Upload a chunked image file issue

I'm using ng-file-upload directive to send an image to server from my angular application. Here is my code:
Upload.upload({
url: 'http://localhost:5000/upload',
data: { file: blobFile },
resumeChunkSize: 10000,
}).then(function (resp) { //upload function returns a promise
console.log('resp: ', resp);
});
Image is being transferred in chunks. But now, I'm struck here. I don't know how to receive these chunks and merge to create a full image. My server code is as follows:
handler: function (req, res) {
var size = 0;
req.on('data', function (data) {
size += data.length;
console.log('Got chunk: ' + data.length + ' total: ' + size);
});
req.on('end', function () {
console.log("total size = " + size);
res.send("response");
});
req.on('error', function (e) {
console.log("ERROR ERROR: " + e.message);
});
}
Every time, I receive a chunk request, req.on('end', ...) triggers. I'm a newbie so confused here.
Yes.. a chunked image isn't so simple to upload...
Here a solution i use:
var request = require('request');
var fs = require('fs');
function uploadChunkedImage(url, callback){
// request options
var options = {
url: url, // the url of the image
method: 'GET'
};
// array of chunks
var chunks = [];
// request
request(options, function (err, res, body) {
console.log('END')
// body is corrupted, you can't use it for images... :-(
// so, we will callback the concactened buffer of it instead
// concact chunk buffers
chunks = Buffer.concat(chunks); // image buffer that you can use
// callback the result
callback(err, res, chunks);
}).on('response', function (response) {
response.on('data', function (chunk) {
// collect chunk buffer
chunks.push(chunk);
});
});
}
uploadChunkedImage('http://localhost:5000/upload', function(err, res, buffer){
if(!err){
// save
fs.writeFile('./myDirPath/image.jpg', buffer);
}else{
console.log(err);
}
});
Don't forget to install request and fs in your project with npm, Buffer is native
npm install request --save
npm install fs --save
for more informations:
request repository
fs documentation
You could do the same trick with ng-file-upload, there is a good example right here, else i suggest to try something like below (not tested):
handler: function (req, res) {
// array of chunks
var chunks= [];
req.on('data', function (data) {
// collect
chunks.push(data);
console.log('Got chunk: ' + data.length);
});
req.on('end', function () {
// concact
chunks = Buffer.concat(chunks);
console.log("Total size = " + chunks.length);
// res.send() is deprecated, use res.write instead
res.write(chunks,'binary');
res.end(null, 'binary');
});
req.on('error', function (e) {
console.log("ERROR ERROR: " + e.message);
});
}
Hope that will help

Download GitLab repository archive using GitLab API and Node.js

I would like to download (not clone) archive from my GitLab repository, but I get this error
incorrect header check (Zlib._handle.onerror)
This is my function:
var fs = require('fs');
var url = require('url');
var https = require('https');
var path = require('path');
var targz = require('tar.gz');
function downloadFile(source, destination, name) {
var options = {
host: url.parse(source).host,
port: 443,
path: url.parse(source).pathname
};
var file = fs.createWriteStream(destination + path.sep + name);
https.get(options, function(res) {
res.on('data', function(data) {
file.write(data);
}).on('end', function() {
file.end();
console.log('File ' + name + ' downloaded to ' + destination);
targz().extract(destination + '/' + name, destination)
.then(function(){
console.log('Job done!');
})
.catch(function(err){
console.log('Something is wrong ', err.stack);
});
});
});
}
The file which is download is type of tar.gz. I try to set some headers but unsuccessful. Source param is like: https://gitlab.com/api/v3/projects/:ID/repository/archive?token=XXYYZZ
Any help please?
The issue is that your file is not correctly downloaded by https module which result in extraction error from tar.gz module.
You can use request module coordinated with tar.gz with createWriteStream to pipe the extraction directly to the destination folder :
var request = require('request');
var targz = require('tar.gz');
function downloadFile(source, destination, cb) {
var read = request.get(source);
var write = targz().createWriteStream(destination);
read.pipe(write);
write.on('finish', function() {
cb(null);
});
write.on('error', function(err) {
cb(err);
});
}
var source = "https://gitlab.com/api/v3/projects/:ID/repository/archive?token=XXYYZZ";
var destination = "/home/user/some/dir";
downloadFile(source, destination, function(err) {
if (err) {
console.log('Something is wrong ', err.stack);
} else {
console.log('Job done!');
}
});
Note that, for the finish event to be dispatched you will need version 1.0.2 of tar.gz (see this issue) :
npm install tar.gz#1.0.2

Why does node.js display same content multiple times?

I have a code that displays HTML document (which includes .jpg image) and a code that creates a new .txt files, saves data into it and displays it in console.
But everytime i run this code with google chrome, the data gets displayed 3 times...
Code
var http = require('http');
var fs = require('fs');
var path = require('path');
http.createServer(function (request, response) {
//branje HTML datoteke
var file_path = '.' + request.url;
if (file_path == './')
file_path = './func.html';
var extname = path.extname(file_path);
var contentType = 'text/html';
switch (extname) {
case '.jpg':
contentType = 'image/jpg';
break;
}
fs.readFile(file_path, function (error, content) {
if (error) {
if (error.code == 'ENOENT') {
fs.readFile('./404.html', function (error, content) {
response.writeHead(200, { 'Content-Type': contentType });
response.end(content, 'utf-8');
});
}
else {
response.writeHead(500);
response.end('Server error!: ' + error.code + ' ..\n');
response.end();
}
}
else {
response.writeHead(200, { 'Content-Type': contentType });
response.end(content, 'utf-8');
}
});
fs.writeFile("external-data.txt", "- TEXTFILE-CONTENT\n", function (err) {
if (err) {
return console.log(err);
}
console.log("Data saved!\n");
});
fs.readFile(__dirname + '//external-data.txt', function (err, data) {
console.log(data.toString());
response.end(data);
});
Result
Any suggestions? What's wrong with the code, why does it display same text multiple times instead of once? Thank you!
Open the developer tools in Chrome. Look at the Network tab. Make the request.
You will see Chrome make multiple requests. For example, for /favicon.ico.
Each request triggers your code.

communicate between a server and a client with node.js

I have a node.js Server:-
// *********** Server that receives orders ************ //
// to use features of the http protocol. //
var http = require('http');
// initialize to empty string. //
var req = "";
// create the server that will receive an order Request. //
var server = http.createServer(function(req,res) {
res.writeHead(200, {'content-type': 'text/plain'});
// when data is successfully received, a success message is displayed. //
res.on('data', function(data){
req += data; // received data is appended. //
console.log("We have received your request successfully.");
});
});
// An error message is displayed - error event. //
server.on('error', function(e){
console.log("There is a problem with the request:\n" + e.message);
});
// server listens at the following port and localhost (IP). //
server.listen(8000, '127.0.0.1');
and then I have a node.js Client:-
var http = require("http");
var querystring = require("querystring");
var postOrder = querystring.stringify({
'msg': 'Hello World!'
});
var options = {
hostname: '127.0.0.1',
port: 8000,
path:'/order',
method:'POST',
headers:{
'Content-Type' :'application/x-www-form-urlencoded',
'Content-Length' : postOrder.length
}
};
var req = http.request(options, function(res) {
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
// write data to request body
req.write(postOrder);
req.end();
I am trying to figure out how I can make the client post its order to the server and get a response back from the server...either a success message or an error message...using command line.
currently I run the server on cmd line $ node server.js
and then a run the client $ node client.js
but i get no responses.
I think that have problems from the server:
The Server must be:
http.createServer(function(req, res) {
if (req.method == 'GET') {
} else if (req.method == 'POST') {
var body = '';
req.on('data', function(data) {
body += data;
});
req.on('end', function() {
console.log("We have received your request successfully.");
});
}
res.end("ok");
})

Categories