how download file with okhttp library in node js - javascript

I need download and save file only with 'okhttp' library in 'node js' and not in java. but I don't know way to save my file on my computer. I need to use only okhttp library. please help me.
okhttp: https://www.npmjs.com/package/okhttp
const okhttp = require('okhttp');
var MimeBuilder = okhttp.MimeBuilder;
var Request = okhttp.Request;
var RequestBody = okhttp.RequestBody;
var RequestBuilder = okhttp.RequestBuilder;
var FormEncodingBuilder = okhttp.FormEncodingBuilder;
var MultiPartBuilder = okhttp.MultiPartBuilder;
new RequestBuilder().GET('http://maps.google.com/mapfiles/kml/pushpin/blue-pushpin.png').bufferResponse().buildAndExecute().then(onComplete).catch(onError);
function onComplete(msg) {
console.log('success');
// !!!!!!!
// !!!!!!!
// now I need save blue-pushpin.png in my computer !
// !!!!!!!
// !!!!!!!
}
function onError(err) {
console.log('error');
}

First require fs (filesystem). The response data is available at msg.data; pass that to fs.writeFileSync along with the filename.
const fs = require('fs');
// ...
function onComplete(msg) {
console.log('success');
fs.writeFileSync('blue-pushpin.png', msg.data);
}

Related

How to save JSON data in a file using writeFileSync?

I'm working on a home-automation project (IoT) where my websocket server is a subscriber to a MQTT broker. It gets temperature and light intensity data from microcontroller. In a nutshell, the data are saved as JSON data, then I'll have to save the log of all the received data in a file. I used .writeFileSync() but it only resulted in [object Object] and I have to edit the data.JSON manually before I can run my program because it'll give an error if I don't.
This is the script:
var config = require('./data.json');
function writeData() {
fs.writeFileSync('data.json', config);
}
Then I tried changing it into
var config = require('./data.json');
let data2 = JSON.stringify(config);
function writeData() {
fs.writeFileSync('data-2.json', data2);
}
but I can't find the file named data-2.json.
Please help.
EDIT
I've called the writeData() function on this part:
s.on('dev:on', function (id) {
if (id == 'lamp1') {
config.lamp1 = true;
} else if (id == 'fan1') {
config.fan1 = true;
}
client.publish(id, "true");
writeData();
console.log('Device ON RECEIVED for ' + id);
});
And the rest of the codes which follow are similar to this.
This should work:
const fs = require('fs');
let config = require('./data.json');
function writeData() {
fs.writeFileSync('data-2.json', JSON.stringify(config));
}
writeData();

Writing to file only writes last item, not all items, why?

i'm trying to write a feed to a file using node.js. the problem is, it doesn't write all the feeds, only the last 1.
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
feedParser.parseUrl(url).on('article', function(article) {
console.log('title; ', article.title);
fs.writeFile("articles.json", JSON.stringify(article.title), function(err) {
if(err) {
console.log(err);
}
});
});
Why?
Just change fs.writeFile( to fs.appendFile( and you're fine.
fs.writeFile overwrites your file each time you call it whereas fs.appendFile adds to a file.
As #Robert says you should use appendFile, but also note that that change won't write out valid json. I'm not sure what output you're trying to achieve - it you just want the titles you could write out a txt file with a title on each line like so:
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
feedParser.parseUrl(url).on('article', function(article) {
console.log('title; ', article.title);
fs.appendFile("articles.txt", article.title + "\n", function(err) {
if(err) {
console.log(err);
}
});
});
To write out json you can do:
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
let titles = [];
feedParser.parseUrl(url)
.on('article', function (article) {
console.log('title; ', article.title);
titles.push(article.title);
})
.on('end', function () {
fs.writeFile('articles.json', JSON.stringify({ titles }), function (err) {
if (err) {
console.log(err);
}
});
});
fs.writeFile comes with some options like flag. Default value of flag is w for write, so your data are replaced by the new one.
Use 'a' instead
{flag:'a'}
and you'll be fine.
But don't forget that WriteFile or AppendFile are upper layer in fs library which open and close file each time you need to add data.
Preferably, use fs.createWriteStream which returns a writable stream (writable file handle in other languages). Then use and reuse this stream when you need to write data in your file.

Node JS - How to write stream big json data into json array file?

I have difficult to write a json data into json file using stream module.
I learn about this from several blog tutorial, one of them is this page
Let say i am working with big json data on a json file. I think it is not possible to store all json object inside my memory. So i decided to do it using stream module.
Here the codes i have done:
writeStream.js
var Writable = require('stream').Writable,
util = require('util');
var WriteStream = function() {
Writable.call(this, {
objectMode: true
});
};
util.inherits(WriteStream, Writable);
WriteStream.prototype._write = function(chunk, encoding, callback) {
console.log('write : ' + JSON.stringify(chunk));
callback();
};
module.exports = WriteStream;
readStream.js
var data = require('./test_data.json'),
Readable = require('stream').Readable,
util = require('util');
var ReadStream = function() {
Readable.call(this, {
objectMode: true
});
this.data = data;
this.curIndex = 0;
};
util.inherits(ReadStream, Readable);
ReadStream.prototype._read = function() {
if (this.curIndex === this.data.length) {
return this.push(null);
}
var data = this.data[this.curIndex++];
console.log('read : ' + JSON.stringify(data));
this.push(data);
};
module.exports = ReadStream;
Called with this code:
var ReadStream = require('./readStream.js'),
WriteStream = require('./writeStream.js');
var rs = new ReadStream();
var ws = new WriteStream();
rs.pipe(ws);
Problem: I want to write it into different file, how is it possible?
Can you please help me?
If you are looking for a solution to just write the data from your ReadStream into a different file, you can try fs.createWriteStream. It will return you a writeable stream which can be piped directly to your ReadStream.
You will have to make a minor change in your readStream.js. You are currently pushing an object thus making it an object stream while a write stream expects either String or Buffer unless started in the ObjectMode. So you can do one of the following:
Start the write stream in the object mode. More info here.
Push String or Buffer in your read stream as writable stream internally calls writable.write which expects either String or Buffer. More info here.
If we follow the second option as an example, then your readStream.js should look like this:
var data = require('./test_data.json'),
Readable = require('stream').Readable,
util = require('util');
var ReadStream = function() {
Readable.call(this, {
objectMode: true
});
this.data = data;
this.curIndex = 0;
};
util.inherits(ReadStream, Readable);
ReadStream.prototype._read = function() {
if (this.curIndex === this.data.length) {
return this.push(null);
}
var data = this.data[this.curIndex++];
console.log('read : ' + JSON.stringify(data));
this.push(JSON.stringify(data));
};
module.exports = ReadStream;
You can call the above by using the following code
var ReadStream = require('./readStream.js');
const fs = require('fs');
var rs = new ReadStream();
const file = fs.createWriteStream('/path/to/output/file');
rs.pipe(file);
This will write the data from test_data.json to the output file.
Also as a good practice and to reliably detect write errors, add a listener for the 'error' event. For the above code, you can add the following:
file.on('error',function(err){
console.log("err:", err);
});
Hope this helps.

local PDF file scraping in node.js

I have uploaded a pdf via a MEAN stack web application using fs. I want to extract certain fields from the pdf and display them on the web app. I have looked at a couple npm packages like pdf.js, pdf2json. I can't figure out the documentation and javascript callbacks used in the examples available. Please help!
I hope I can help answer your question. Using pdf2json can be used to parse a pdf and extract the text. There are a couple of steps that need to be taken to get it working. I have adapted the example from https://github.com/modesty/pdf2json.
The setup is to install pdf2json in the node app, and also underscore. The example page didn't explain the need to define your own callback functions. It also used self instead of this to register them. So, with the appropriate changes the code to extract all the text from the pdf will be something like this:
// Get the dependencies that have already been installed
// to ./node_modules with `npm install <dep>`in the root director
// of your app
var _ = require('underscore'),
PDFParser = require('pdf2json');
var pdfParser = new PDFParser();
// Create a function to handle the pdf once it has been parsed.
// In this case we cycle through all the pages and extraxt
// All the text blocks and print them to console.
// If you do `console.log(JSON.stringify(pdf))` you will
// see how the parsed pdf is composed. Drill down into it
// to find the data you are looking for.
var _onPDFBinDataReady = function (pdf) {
console.log('Loaded pdf:\n');
for (var i in pdf.data.Pages) {
var page = pdf.data.Pages[i];
for (var j in page.Texts) {
var text = page.Texts[j];
console.log(text.R[0].T);
}
}
};
// Create an error handling function
var _onPDFBinDataError = function (error) {
console.log(error);
};
// Use underscore to bind the data ready function to the pdfParser
// so that when the data ready event is emitted your function will
// be called. As opposed to the example, I have used `this` instead
// of `self` since self had no meaning in this context
pdfParser.on('pdfParser_dataReady', _.bind(_onPDFBinDataReady, this));
// Register error handling function
pdfParser.on('pdfParser_dataError', _.bind(_onPDFBinDataError, this));
// Construct the file path of the pdf
var pdfFilePath = 'test3.pdf';
// Load the pdf. When it is loaded your data ready function will be called.
pdfParser.loadPDF(pdfFilePath);
I am running the code out of my server side controller.
module.exports = (function() {
return {
add: function(req, res) {
var tmp_path = req.files.pdf.path;
var target_path = './uploads/' + req.files.pdf.name;
fs.rename(tmp_path, target_path, function(err) {
if (err) throw err;
// delete the temporary file, so that the explicitly set temporary upload dir does not get filled with unwanted files
fs.unlink(tmp_path, function() {
if (err) throw err;
//edit here pdf parser
res.redirect('#/');
});
})
},
show: function(req, res) {
var pdfParser = new PDFParser();
var _onPDFBinDataReady = function (pdf) {
console.log('Loaded pdf:\n');
for (var i in pdf.data.Pages) {
var page = pdf.data.Pages[i];
// console.log(page.Texts);
for (var j in page.Texts) {
var text = page.Texts[j];
// console.log(text.R[0].T);
}
}
console.log(JSON.stringify(pdf));
};
// Create an error handling function
var _onPDFBinDataError = function (error) {
console.log(error);
};
pdfParser.on('pdfParser_dataReady', _.bind(_onPDFBinDataReady, this));
// Register error handling function
pdfParser.on('pdfParser_dataError', _.bind(_onPDFBinDataError, this));
// Construct the file path of the pdf
var pdfFilePath = './uploads/Invoice_template.pdf';
// Load the pdf. When it is loaded your data ready function will be called.
pdfParser.loadPDF(pdfFilePath);
},
//end controller
}

Create plugin gulp with stream

I created plugin for send json data in json file.
But I don't understand why send my object json in pipe, and not write file directly in my plugin.
I want use my plugin whit this syntax:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('/build/js/'))
.pipe(gulp.dest('./build/js/hash.json'));
});
And not that:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('./build/js/hash.json', '/build/js/'));
});
This is my plugin:
var through = require('through2');
var gutil = require('gulp-util');
var crypto = require('crypto');
var fs = require('fs');
var PluginError = gutil.PluginError;
// Consts
const PLUGIN_NAME = 'get-hash-file';
var json = {};
function getHashFile(filename, basename)
{
if (!filename) {
throw PluginError(PLUGIN_NAME, "Missing filename !");
}
// Creating a stream through which each file will pass
var stream = through.obj(function (file, enc, callback) {
if (file.isNull()) {
this.push(file); // Do nothing if no contents
return callback();
}
if (file.isBuffer()) {
var hash = crypto.createHash('sha256').update(String(file.contents)).digest('hex');
json[file.path.replace(file.cwd+basename, '')] = hash;
return callback();
}
if (file.isStream()) {
this.emit('error', new PluginError(PLUGIN_NAME, 'Stream not supported!'));
return callback();
}
}).on('finish', function () {
fs.writeFile(filename, JSON.stringify(json), function(err) {
if (err) {
throw err;
}
});
});
// returning the file stream
return stream;
}
// Exporting the plugin main function
module.exports = getHashFile;
Your are idea
Nothing prevents you from doing this... besides not respecting plugins guidelines!
Users actually assume a plugin will stream files and that they can pipe them to other plugins.
If I get your code right, you're trying to generate a file that contains all sha hashes of inbound files. Why not let users take this file and pipe it to other plugins? You'd be surprised what people could do.
While this question looks a bit opinion-based, you could definitely put the focus on how to deal with files that may not belong to the main stream of files. Issues like this can be found in many plugins; for example, gulp-uglify authors are wondering how they can add source-maps without mixing js and source map downstream.

Categories