I would like to download (not clone) archive from my GitLab repository, but I get this error
incorrect header check (Zlib._handle.onerror)
This is my function:
var fs = require('fs');
var url = require('url');
var https = require('https');
var path = require('path');
var targz = require('tar.gz');
function downloadFile(source, destination, name) {
var options = {
host: url.parse(source).host,
port: 443,
path: url.parse(source).pathname
};
var file = fs.createWriteStream(destination + path.sep + name);
https.get(options, function(res) {
res.on('data', function(data) {
file.write(data);
}).on('end', function() {
file.end();
console.log('File ' + name + ' downloaded to ' + destination);
targz().extract(destination + '/' + name, destination)
.then(function(){
console.log('Job done!');
})
.catch(function(err){
console.log('Something is wrong ', err.stack);
});
});
});
}
The file which is download is type of tar.gz. I try to set some headers but unsuccessful. Source param is like: https://gitlab.com/api/v3/projects/:ID/repository/archive?token=XXYYZZ
Any help please?
The issue is that your file is not correctly downloaded by https module which result in extraction error from tar.gz module.
You can use request module coordinated with tar.gz with createWriteStream to pipe the extraction directly to the destination folder :
var request = require('request');
var targz = require('tar.gz');
function downloadFile(source, destination, cb) {
var read = request.get(source);
var write = targz().createWriteStream(destination);
read.pipe(write);
write.on('finish', function() {
cb(null);
});
write.on('error', function(err) {
cb(err);
});
}
var source = "https://gitlab.com/api/v3/projects/:ID/repository/archive?token=XXYYZZ";
var destination = "/home/user/some/dir";
downloadFile(source, destination, function(err) {
if (err) {
console.log('Something is wrong ', err.stack);
} else {
console.log('Job done!');
}
});
Note that, for the finish event to be dispatched you will need version 1.0.2 of tar.gz (see this issue) :
npm install tar.gz#1.0.2
Related
I'm learning server-side javascript and am trying to test a GET request using postman where the server (server.js) receives a request for products.html (products.js) and returns the products JSON.
My files are packaged via npm, products.js is held in node_modules, and when I run server.js in command and then open localhost:3000 in browser, I can see that it's connecting. But, the browser returns a 404 and command shows a 400.
I feel like this is likely a syntax or file path error (or possibly I just don't know how to use postman), but I've been running myself in circles trying to fix. Anything stand out as wrong / any advice on how to correct?
//server.js
var fs = require('fs');
var http = require('http');
var url = require('url');
var product_mgr = require('product_manager'),
path=require('path');
//create server that listens on port 3000
http.createServer(function(req, res) {
var urlObj = url.parse(req.url, true, false);
var filename = urlObj.pathname;
fs.readFile(filename, function (err, data) {
// if url not returned, show error code 404
if (err) {
res.writeHead(404, {'Content-Type': 'application/json'});
return res.end("404 Not Found");
} else {
// if url returned, show success code 200
res.writeHead(200, {'Content-Type': 'application/json'});
res.write(data);
return res.end();
}});
}).listen(3000, function() {
console.log('Listening on port 3000.');
});
//products.js
//create class that represents a product
//include name, price, description and qty
class Product {
constructor(name, price, description, qty) {
this.name = name;
this.price = price;
this.description = description;
this.qty = qty;
};
};
var product_1 = new Product('Yo Yo', 2.99, 'Spinning Toy', 40);
var product_2 = new Product('Hot Wheel', 1.99, 'Tiny Toy Car', 30);
var product_3 = new Product('Glove', 23.49, 'Baseball Glove', 12);
var productArray = [product_1, product_2, product_3];
//create function called products which returns JSON array of product info
function products() {
return JSON.stringify(productArray)
};
//export products function
exports.products = products;
var fs = require('fs');
var http = require('http');
var url = require('url');
var product_mgr = require('product_manager');
http.createServer(function(req, res) {
var urlObj = url.parse(req.url, true, false);
var filename = "." + urlObj.pathname;
if (req.method == "GET" && req.url == "/products.html") {
res.writeHead(200, {
'Content-Type': 'application/json'
});
res.end(JSON.stringify({
error: null
}));
} else {
res.writeHead(404, {
'Content-Type': 'application/json'
});
res.end(JSON.stringify({
error: "Invalid Request"
}));
}
}).listen(3000, function() {
console.log('Listening on port 3000.');
});
I've an express server route which receives a xml file, then parses and return it as json.
When a user sends a file, it saves in a directory './upload', parses it with a Python script then output json in './json-output', which is served.
When I first upload a file, the response comes empty. But when I do the same upload steps (there is a json already created from last upload on './json-output' dir), it serves the json. It seems some asynchronous issue but I couldn't fix it.
app.post('/upload', function(req, res) {
upload(req, res, async function(err) {
if (err) {
res.json({ error_code: 1, err_desc: err });
return;
}
if (!req.file) {
res.json({ error_code: 1, err_desc: 'No file passed' });
return;
}
let fileName = req.file.originalname;
const options = {
args: [ fileName ]
};
const parserPath = path.join(__dirname + '/parser/parser.py');
const readFile = promisify(PythonShell.run);
await readFile(parserPath, options);
fileName = fileName.split('.')[0];
res.sendFile(path.join(__dirname + `/json-output/${fileName}.json`));
});
});
I'm running it inside a docker images
This a quite a "Dirty fix" in my eyes but you could do a while loop EG:
fileName = fileName.split('.')[0];
while (!fs.existsSync(path.join(__dirname + `/json-output/${fileName}.json`)){
console.log('File does not exist!')
}
//Becareful you should delete the file once the res.send is done
res.sendFile(path.join(__dirname + `/json-output/${fileName}.json`));
Decided to read the python-shell docs here an idea:
https://www.npmjs.com/package/python-shell#exchanging-data-between-node-and-python
So, in theory, you can start a new
let pyshell = new PythonShell(path.join(__dirname + '/parser/parser.py'),options);
pyshell.end(function (err,code,signal) {
if (err) throw err;
fileName = fileName.split('.')[0];
res.sendFile(path.join(__dirname + `/json-output/${fileName}.json`));
});
I'm using ng-file-upload directive to send an image to server from my angular application. Here is my code:
Upload.upload({
url: 'http://localhost:5000/upload',
data: { file: blobFile },
resumeChunkSize: 10000,
}).then(function (resp) { //upload function returns a promise
console.log('resp: ', resp);
});
Image is being transferred in chunks. But now, I'm struck here. I don't know how to receive these chunks and merge to create a full image. My server code is as follows:
handler: function (req, res) {
var size = 0;
req.on('data', function (data) {
size += data.length;
console.log('Got chunk: ' + data.length + ' total: ' + size);
});
req.on('end', function () {
console.log("total size = " + size);
res.send("response");
});
req.on('error', function (e) {
console.log("ERROR ERROR: " + e.message);
});
}
Every time, I receive a chunk request, req.on('end', ...) triggers. I'm a newbie so confused here.
Yes.. a chunked image isn't so simple to upload...
Here a solution i use:
var request = require('request');
var fs = require('fs');
function uploadChunkedImage(url, callback){
// request options
var options = {
url: url, // the url of the image
method: 'GET'
};
// array of chunks
var chunks = [];
// request
request(options, function (err, res, body) {
console.log('END')
// body is corrupted, you can't use it for images... :-(
// so, we will callback the concactened buffer of it instead
// concact chunk buffers
chunks = Buffer.concat(chunks); // image buffer that you can use
// callback the result
callback(err, res, chunks);
}).on('response', function (response) {
response.on('data', function (chunk) {
// collect chunk buffer
chunks.push(chunk);
});
});
}
uploadChunkedImage('http://localhost:5000/upload', function(err, res, buffer){
if(!err){
// save
fs.writeFile('./myDirPath/image.jpg', buffer);
}else{
console.log(err);
}
});
Don't forget to install request and fs in your project with npm, Buffer is native
npm install request --save
npm install fs --save
for more informations:
request repository
fs documentation
You could do the same trick with ng-file-upload, there is a good example right here, else i suggest to try something like below (not tested):
handler: function (req, res) {
// array of chunks
var chunks= [];
req.on('data', function (data) {
// collect
chunks.push(data);
console.log('Got chunk: ' + data.length);
});
req.on('end', function () {
// concact
chunks = Buffer.concat(chunks);
console.log("Total size = " + chunks.length);
// res.send() is deprecated, use res.write instead
res.write(chunks,'binary');
res.end(null, 'binary');
});
req.on('error', function (e) {
console.log("ERROR ERROR: " + e.message);
});
}
Hope that will help
I am currently trying to upload the assets of a website to amazon s3 using aws-sdk and gulp, but for now I just achieved uploading single files using this code:
gulp.task('publish', function() {
var AWS = require('aws-sdk'),
fs = require('fs');
AWS.config.accessKeyId = 'access_id';
AWS.config.secretAccessKey = 'secret_key';
AWS.config.region = 'eu-central-1';
var fileStream = fs.createReadStream('folder/filename');
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'bucket_name',
Key: 'assets/filename',
Body: fileStream,
ACL:'public-read'
}, function (err) {
if (err) { throw err; }
else { console.log("Upload successfull"); }
});
});
});
Since I am neither a node.js nor a JS dev, I have no Idea on how to upload all my assets in the folder assets of the S3.
Idealy, applying the action I use to upload one file, but for each file would be neat. How would this be doable?
Found the solution to my problem. With this code, I finaly managed to upload all my assets to my bucket with the right ACL's. Hope this can help people not to spend as much time as I did on such a stupid problem.
/*
* Dependencies
*/
var gulp = require('gulp');
var AWS = require('aws-sdk');
var fs = require('fs');
var walk = require('walk');
/*
* Declaration of global variables
*/
var isPaused = false;
/*
* Bucket access informations
*/
AWS.config.accessKeyId = 'access_keyid'
AWS.config.secretAccessKey = 'secret_access_key'
AWS.config.region = 'region';
/*
* Publishing function: uses a stream to push the files on the AWS Bucket
*/
function publishit(filename) {
var file = filename.substring('./'.length);
var key = file.substring('src/'.length);
var fileStream = fs.createReadStream(file);
isPaused = true;
// Check if there is an error on the file
fileStream.on('error', function (err) {
if (err) { throw err; }
});
// Action to do on opening of the file
fileStream.on('open', function () {
var s3 = new AWS.S3();
// Uploading the stream to the bucket
s3.putObject({
Bucket: 'bucket_name',
Key: key,
Body: fileStream,
ACL:'public-read'
}, function (err) {
// Show the error if there is any
if (err) { throw err; }
// If everything went successfully, print which file is being uploaded
else { console.log("Uploading asset "+ file); }
// Closing the stream to avoid leaks and socket timeouts
fileStream.close();
// Changing the status of 'isPaused' to false to continue uploading the other assets
isPaused = false;
});
});
}
gulp.task('assets', function() {
var files = [];
// Walker options (first arg is the folder you want to upload)
var walker = walk.walk('./assets', { followLinks: false });
walker.on('file', function(root, stat, next) {
// Add this file to the list of files
files.push(root + '/' + stat.name);
next();
});
// Action after every file has been added to 'files'
walker.on('end', function() {
for (var filename in files){
// Publish every file added to 'files'
publishit(files[filename]);
// Wait for one push on the server to be done before calling the next one
function waitForIt(){
if (isPaused) {
setTimeout(function(){waitForIt()},100);
}
};
};
});
});
I am using node.js to do some interactions with an API that returns gzipped data. I browsed through the package manager and the wiki for a good compression library but couldn't find one that hadn't been abandoned / didn't work at all. Any idea how I can either deflate the compressed data using javascript or node? (Or how to avoid the data all together?)
Here is what I have with comments:
app.get('/', function(req, res){
// rest is a restler instance
rest.get('http://api.stackoverflow.com/1.1/questions?type=jsontext', {
headers: {"Accept-Encoding": 'deflate'},
//tried deflate, gzip, etc. No changes
}).on('complete', function(data) {
// If I do: sys.puts(data); I get an exception
// Maybe I could do something like this:
/*
var child = exec("gunzip " + data,
function(error, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
*/
});
});
I used this one with success:
https://github.com/waveto/node-compress
this.get = function(options, cb){
http.get({host: 'api.stackoverflow.com', path:'/1.1/questions?' + querystring.stringify(vectorize(options || {}))}, function(res){
var body = [];
var gunzip = new compress.Gunzip();
gunzip.init();
res.setEncoding('binary');
res
.on('data', function(chunk){
body.push(gunzip.inflate(chunk, 'binary'));
})
.on('end', function(){
console.log(res.headers);
gunzip.end();
cb(null, JSON.parse(body.join('')));
});
}).on('error', function(e){
cb(e);
})
}