Nodejs decode base64 and save them into a file using streams - javascript

Over my node.js application I decode base64 encoded images using the following line of code:
const fileDataDecoded = Buffer.from(base64EncodedfileData,'base64');
So far I can write a file with the following piece of code:
const fs = require('fs');
....
const fileDataDecoded = Buffer.from(base64EncodedfileData,'base64');
fs.writeFile("/tmp/test.png", fileDataDecoded, function(err) {
//Handle Error
});
Now what I want to achieve is the decoded files via the buffer to get written into a file via streams in order to acheive better efficiency on the executed application.
In other words I want the filedata to get written at the same time that the data is getting base64-decoded, in order to write large files with an efficient way. If is not possible via a Buffer to stream base64 decoded data then I would like to know how is possible to decode base64 data.

You can create a readable stream and push the image buffer. Then pipe it to a writeable stream like this.
var base64 = 'iVBORw0KGgoAAAANSUhEUgAAAJAAAACQCAYAAADnRuK4AAAMcklEQVR42u2deVBUVxbGGxHQBOPEZdxr3KJJcO+INGsDDXGJC8RWBKMGFIhRGVxYLDCgINAoq9J2I2jiGCPDKGClZvKXmaWmZq1U4lRiJslkU2dMTWapmiQVWzlzz+vXLCrQ0G/vc6q+egVF1Tvvfr8+5977mvd0OgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKisEFgE9RUdEwc0uLL0k+oQfohWqgMV4tGs4lTaGoQE/QG8XCxCXX6+czI+LbmqfFdzTOWH7ZOl1+nZFB8l4zjj16gF7055XsgaXSlVhch32bqaPxTaZPmP7HdIfpezkUd+W0YiTTGNzhPUAv3kRvXPC4PFMMPCy5KJbkB0zQpXa75Ipj51W65BiXHr58YLpsNSoCIlcCse32lO4E7Y7YdpvD1Ga/x37fKY0aO9m5VSfMW4rxcXphc/rCPGK/g8g36rbICpGrFCLNLnhYYncwOanUq9qpXJKNW5sd2xrEtDZAZFNZvDxzIn4mr7el+7FP0XWp4TFpDJ6eLV8qiPAYcfbYR1PNhpE9PZWy+vhEt1pTu9sWVRw1VSR2DofxQi2E1BTuQS+xGEi6p8AOvrFttp87L9bmELfiNDrlRQC5rlk8gOyOmEtWMDQcvsq8HG42m32lbF8+c3NSRzGAPuUu1jlhpqqjomqEnmHLDLWVfjlxRcR49FSSNuaqPsssBU+wBL7h+3YnwaM6iHAlCOHNFd8GZWxeiJ5KUoWMRdz8Z/jS8twgLINiAOR17WoQbU3AiTQHUMRZi+Pp9KRl6Kk+XYJ5EA+Qn75w13xceQkLEEHiXjVqFAyg8KYKx5NbEgycp1IAxJ8kYNGBHYuEBMhELUvaJX8PgGalrAlDT4PMZn/JAFq4N22xUAAREDLMjXoANHPT6nDVAkQQyASRzACNEAIgMl9GiB4EaISqACLTZYZIzQCR2QqASK0AkckKgUiNAJG5CoJIbQDRPo/C9onUBRAZqrgdazUBRGYqsJWpBSC6MarQG7CqAUjsT1iLFaKKiiBiXz5E7FWRWL6YN+YvSxVSA0BSfMKMtRUQan4JQterUCzvqMoSZqZNeoiUDpBUJTrKcpQzI2rTbtj5ShVkl55QvDBPzBfzjiguhKjXa6SHSMkASTnvcQG0IjUH/njtOnx+67bihXlivph3eFEhRJ6t5CGySzcfIoB6A/Tcjjx4/5Mv4Pa//qt4YZ6Yb0+AOIjO14i7X6YGgKReZbgAWptxEG599U/4/o5D8cI8Md/7AXJCVC36pisB9BCA1mUehK++/jfcu3dP8cI8Md+HASQFRIoFyEQACQKQq52JNidSLEAybJRpFaDuibU4qzPFASTXTquWARIVIucTTQggrQMkGkTKAqiRABIRIFEg4jw7rQyA+AcmEUAiAiQ4RAyguCsEkFcB1LU6E2LBohSA4pwJEUASASTYPhEPUESzhQDyNoAEgYgA8m6AXBANuZ0pASB8DjEBJB9A3TvWNg8BSiCAvBWgIa/OCCACyCOI5AcokwBSEECDhkjzALU3wsoLVkg8VQfrT/StxOIyWJuRDynZh+HSW2/DL375O8UL88R8Me91BUcg4ZilT62psoDplAgQaR2gVecaYOuhSkjLq/B6bS2ogPiGwU6s7d4N0Maqaq+AI/UgO+Z3/7ydKT23/IG/W19mGfTqrN99Iq0DlFxepXl4tpQeh9U/tcG6nzTAi4UWBk85NGZlw28ytoJlb36vv006UiHsZiMBpH4lnj0Jsb86C6a3z0ByVQ3szD0KH6eZ4cvURPh1xjZ4KbfMI4D6hYgAUr+Sj1fDs281wSq2YNhafAzSWQXq2JkBH27fAE17smFHXrnHAPU5JyKANCA293mRLRSwfbl+l8mqTlZOCQeTpy2s39UZAeRd8hSgByAigAggjyAigAggjyC6bCOACCCP7uJ34uqMACKAhqSIM5Wdxtdr+a+0EkAE0BAAinz1GITUF8v1hTICSAsALasudMw0LyeACCAPAEoggAggAogAIoAIIAKIACKACCACiAB6/nA5bMoqJvFKPFRCAA3qvy1+nK/Oh4eLpMhd+wkgAmjoiniZACKACCACiAAigAggAogAIoAEAsjwfCaEJOyAZeu2q06YN+bvpQClyQ6QITED9Ku2wKLlm2Dhs0mqE+aN+eN1EEAyAPTM6m2qBOd+4XUQQDIAtHjl5i4T9CtTYHVaNiSk71e8ME/M15U7XoesAJmXa+8Zie4AhOXfZUJ0Ugb8/t334ePPbypemCfm2wU/uw5ZAdLiQzbdASgkMR0Wr3B+kmOSMuFP167Dpzf+rnhhnpgvV31Y/ngd8gF0yHsBcq7CMiB4bSrEv5AF7374N/jH1/9RvDBPzBfzxvzlXIXJ+KV6cZ9UP9h9oJVpOfDeXz9TxSsvMU/MVwn7QKGnSgkgAmhoAEWdq4LwpnJtvq2HABIfoOiL9dp9XxgBJC5ArH11xl4+pWGA9h4kgHoCtPuAwP8bX6vtd6Yaj5ZAqHknAYRi4xBZWCBk9YGY1gaNv7W5xQpReQUQlrIbQpNeHlAr0/PgvY9UAhDLE/N157rCUnZBxL5ciLQL/HgXr3hvfJsNos5VQ2STZUCtOX8Crt24oQqAME/M153rimy2CNu6sPr8zOolAPHvOMdPzEADs+aNk+oCiOUr6KTYXYAu1HI+KRYgZyuTHiICyA14XjsOMWzl5X0AuQERATSwjC31XR4pGiBRWtkAEBFAbrzdkM0pVQOQWBDhA7MfBhEBNNDEuaG3P2oASDyIbA9AtPx8Hbz6599C21/eUbwwT8xXstZ1sf5Bb1QDUHujZBCR3GhdagNItPkQQTSwXjvWvepSM0CxIlWh/uZEXi8274luPdm3J+oCSODbHEPcbPQmRbfU9+uH6gASs5URRH3sNmsNIIJIAnj4l6loFiCCSMwVF/cSFfd8UDNAokPkhRNrXK73ueLSIkBiQ+RNqzOsPIOCRysAiQ+R9veJ+Hd/DX7stQIQQeThaqvNNrRx1xJAYu8TaQ4i3CS8WO/ReMsO0IID6UtMHXaHUADRjrW7tyeO97/DPEiAIporHDM2r42QDCCj0TicO9nu1IUMnm+EBUjcG7BqX+IPeqXlBkBh9rJvf7R+VSh6qtfr/UQHSGc2++LJxoUtnhx7yfoF13ra7PeEBEiuL6UpFhzWsrivZAx1vvOwsWWe4VgY6g7deuzpWbPRU52zOIgew6ZOnTqSHccaL9Rf5QG6KwZABBFfde7/MpgwVf5uTGsDLCnd/wfm5Tje02GSADQhfsGjCFBw5cFcXEKa2hpFA6irpbV7F0T4BXhjywlBq053++LeG383vLkCgvZsKUMvxxuDAqUCyEc3e3YAOz4eMP6xJ8KaLJ9xEHXYHGJCJEY1UuLEGtsVLs9jLp0SZxwRHrb4iblkBX1Zzk3/sYFPoZe66dNHcN5KApBO5zdqzuRx7DhtbkZSJlsK8okxiMT4xIi45FfMEh/BYXlw//Qn1tjx8OB1h9QcgplJq7PRQ95LPykBwslW4MgxY6ay45x5WamlodYj+P/WXGnkhBNrNtMXUyZ8rIwAYhW0M+p8TSc+8kRq4VMy8NzRrQ3ijRM3YXb6goAuqy6EuTuSKtE75uE09JL3VBKAdHyvxJI3NmD06FnsGDT7hbXZz1Tk3gxrLINo1rtNYt8w7aXTEHfFM2G+RtY6sBJIIXy4E7cZyJblUowRfrjDTpeDvmTfrVkbnstBz3jvxvJeDtNJGD58yUNyJ/qPGvUkO84fOWaUYW56cu3iV7LeWWrJux1youi7MFvpXYO1xCGV8JFtQ5WhocQRXFXgWFqRJ44s+Y7g4wWOkLoidq4joo9FmK3kruFk8XfBlfm3FzFP5mzfWMc8wj2f+bxnE3kP/aSsPj2rEE6mf8A0hZ+MzfPz81vEtCRwyoSYiSFLN0yKNCRPCtPLpOAhSJ88wcAULKAMvKS+/sjg5AkhSzeiF8wTPXqDHvFeTeG9C5C6+vSsQr58+cNEJusCAmb7Px44z9/ffwH7eQEPE0lmoRfoCXqDHnFeOT0bwXvoo5MpekL0GNN4JpxYz2CJzmVJP+UfyIAiySfmAfNiDueJ05vxvFeyw3M/RHgj7hE+OZyY/ZDvsZN4TSZJKte4T+S9QE9G8x75KwWenhC5QPLj++pIPtlHeQWSJJVr3B/hvQjgvfHt4ZfiwpXYMF6+JEXI5YdiwRkIKJK8oqCgoKCgoKCgoKCgoKCgoKCg8Cj+Dyqrhq0vA9X1AAAAAElFTkSuQmCC';
var fs = require('fs')
var Readable = require('stream').Readable
const imgBuffer = Buffer.from(base64, 'base64')
var s = new Readable()
s.push(imgBuffer)
s.push(null)
s.pipe(fs.createWriteStream("test.png"));

Just another solution with base-64 & utf8 in case we're using react-native which doesn't have Buffer global object:
function (base64Str) {
return utf8.decode(base64.decode(base64Str))
}

Related

How to load a protocol buffer binary file in javascript?

I have generated a binary file containing some data using protobuf in Python, and I need to load the binary file now in Javascript and read its content. I tried to use FileReader() but I wasn't able to succeed. How can I do this?
You can make use of protobufjs.
Here's an example in JavaScript:
const fs = require('fs');
const protobuf = require('protobufjs');
// Load the binary data
const binaryData = fs.readFileSync('your-protocol-buffer-file.bin');
// Load the root of the protocol buffer definition
const root = protobuf.Root.fromJSON(yourProtoJSON);
// Look up the desired message type
const MyMessageType = root.lookupType('MyMessageType');
// Decode the binary data using the message type
const message = MyMessageType.decode(binaryData);
// The message object is a JavaScript object that represents the data in the binary file
console.log(message);
You will need to have the Protocol Buffer definition (.proto file) that was used to generate the binary file in order to use protobufjs to decode the data.
For more information on protobufjs, you can visit https://github.com/protobufjs/protobuf.js.

Convert Raw PNG data to base64 in node js

I have an api that gives me raw png data as response, Want to convert the raw data to a file or a base64 string that can be invoked later. Below is the sample of the response that I get from the api. Can someone tell me on how this can be achieved
�PNG
IHDR���(-sRGB���sBIT|d� IDATx���w|SU��'I��i��[(��7(�NDQ� �*����EPq�" ���""�Be�e�M�޻i����(���mҦM�����y�7�{����{���'��"A(A �����������d��h�^k����"���m"""""""���D.�X�P$tt{�������.kB�BNk'""""""r,�#�9t"""""""'��NDDDDDD�Љ�������:�`#'""""""r�DDD���f܊뮽���ha��s1~�dG7����)�8�DDD���? ���ᗭ?5��g�8�X� ��'�ii)�hb�L�vn��.����4=/��zTV�c�>|����h�������g�8�1���Pb^{e9���p���N�GDDt1�:�YS&Omqn�k�������s�c��/���k>����V��՟���S'q㌙�w�C�޾�����,���!��v������ GЉ����T!6�'��c��� pqc��I�����������=��F��Z�F���X�~5�Z-�B#���k���޽�#QQ!�{�-\;m&�����|�i~��W�\.ǃ�b��Q����X��*�T�����s;�����
You can use fs Module
let fs = require('fs')
let writer = fs.createWriteStream('xyz.png')
writer.write(<RAW data here>);
If you getting file directly in body then
let writer = fs.createWriteStream('xyz.png');
let fl = res.body.pipe(writer); (res is your response from api)
fl.on('finish', () => {...Things to do})

Javascript: how to parse base64 encoded csv to fetch the filename and actual csv content

I have a base64 string created by encoding a csv file,
const base64 = 'LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLTExNDc2MDgwNjM5MTM4ODk4MTc2NTYwNA0KQ29udGVudC1EaXNwb3NpdGlvbjogZm9ybS1kYXRhOyBuYW1lPSJmaWxlIjsgZmlsZW5hbWU9ImNoYXJ0T2ZBY2NvdW50LmNzdiINCkNvbnRlbnQtVHlwZTogdGV4dC9jc3YNCg0K77u/QWNjb3VudE51bWJlcixBY2NvdW50TmFtZSxEZWR1Y3RhYmlsaXR5DQoxMTExLHRlZWUsMTAwDQoyMjIyMix0ZXN0LDEwMA0KLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLTExNDc2MDgwNjM5MTM4ODk4MTc2NTYwNC0tDQo='
I want to get the name of the file and create the same file back with the filename. What I need to use. I am using node.js here.
If I understand your question right, your base64 variable needs to be decoded. buffer can do this:
const base64 = 'LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLTExNDc2MDgwNjM5MTM4ODk4MTc2NTYwNA0KQ29udGVudC1EaXNwb3NpdGlvbjogZm9ybS1kYXRhOyBuYW1lPSJmaWxlIjsgZmlsZW5hbWU9ImNoYXJ0T2ZBY2NvdW50LmNzdiINCkNvbnRlbnQtVHlwZTogdGV4dC9jc3YNCg0K77u/QWNjb3VudE51bWJlcixBY2NvdW50TmFtZSxEZWR1Y3RhYmlsaXR5DQoxMTExLHRlZWUsMTAwDQoyMjIyMix0ZXN0LDEwMA0KLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLTExNDc2MDgwNjM5MTM4ODk4MTc2NTYwNC0tDQo=';
// decode base64
const decodedBase64String = Buffer.from(base64, 'base64').toString();
const filename = decodedBase64String.match(/filename="(.*).csv/)[1];
console.log(decodedBase64String);
With a some regex you can extract your filename ("chartOfAccount.csv") from decodedBase64String. And then use fs (explained here) to create from your content a file.

Piping to same Writable stream twice via different Readable stream

I am trying to concatenate a string and a Readable stream (the readable stream is pointing to a file which may have data in multiple chunks, i.e. the file may be large) into one writable stream so that the writable stream can finally be written to a destination.
I am encrypting the string and content of the file and then applying zlib compression on them, then finally I want to pipe them to the writable stream.
To achieve this, I can:
a) Convert the file content into a string then concatenate both the string then encrypt, do compression and then finally pipe it into the writable stream. But this is not possible because the file may be big in size, thus I can't convert its content to string.
b) I can first encrypt and compress the string then convert the string into a stream then pipe it into the writable stream after that is done completely, pipe the file contents into the same writable stream.
To do so, I have written this:
var crypto = require('crypto'),
algorithm = 'aes-256-ctr',
password = 'd6FAjkdlfAjk';
var stream = require('stream');
var fs = require('fs');
var zlib = require('zlib');
// input file
var r = fs.createReadStream('file.txt');
// zip content
var zip = zlib.createGzip();
// encrypt content
var encrypt = crypto.createCipheriv(algorithm, password, iv);
var w = fs.createWriteStream('file.out');
// the string contents are being converted into a stream so that they can be piped
var Readable = stream.Readable;
var s = new Readable();
s._read = function noop() {};
s.push(hexiv+':');
s.push(null);
s.pipe(zlib.createGzip()).pipe(w);
// start pipe when 'end' event is encountered
s.on('end', function(){
r.pipe(zip).pipe(encrypt).pipe(w);
});
What I observe is:
Only the first pipe is done successfully, i.e. the string is written to the file.out. The second pipe doesn't make any difference on the output destination. At first, I thought that the reason might be due to asynchronous behavior of pipe. So, for this reason, I am piping the file content after the first piping is closed. But still, I didn't get the desired output.
I want to know why is this happening any the appropriate way for doing this.
Found the reason why was the second piping to the writable stream w wasn't working.
When the first .pipe(w) finishes, the writable at w is also automatically closed.
So instead of using s.pipe(zlib.createGzip()).pipe(w); I should have used:
s.pipe(zlib.createGzip()).pipe(w, {end: false});
Where the {end: false} is passed to pipe() as the second argument, stating that the writable should not get closed when the piping is done.

Saving byteArray to pdf file in Titanium

I have a SOAP API that is returning me a file divided in chunks encoded in several base64 strings
i'm not being able to save it to the file system without breaking it
This is the pastebin of a whole file encoded, as is, once i download and chain the responses.
What is the way to save it correctly?
i tried in many ways
var f = Ti.FileSystem.getFile(Ti.FileSystem.tempDirectory, 'test.pdf');
...
var blobStream = Ti.Stream.createStream({ source: fileString, mode: Ti.Stream.MODE_READ });
var newBuffer = Ti.createBuffer({ length: fileString.length });
f.write(fileString);
or
var data = Ti.Utils.base64decode(fileString);
var blobStream = Ti.Stream.createStream({ source: data, mode: Ti.Stream.MODE_READ });
var newBuffer = Ti.createBuffer({ length: data.length });
var bytes = blobStream.read(newBuffer);
f.write(fileString);
or
var data = Ti.Utils.base64decode(fileString);
var blobStream = Ti.Stream.createStream({ source: data, mode: Ti.Stream.MODE_READ });
var newBuffer = Ti.createBuffer({ length: data.length });
var bytes = blobStream.read(newBuffer);
f.write(bytes);
but i'm not understanding which one is the right path
Do I have to convert back to byteArray the string on my own?
What is the right way to save it?
Do I have to create a buffer from the string or ...?
I think that the base64enc for the file is not valid or incomplete, I've tested it using bash and base64 utils. You can perform these steps.
Copy and paste the base64 string on a file called pdf.base64 then run this command:
cat pdf.base64 | base64 --decode >> out.pdf
the output file is not a valid pdf.
You can try to encode and decode a valid pdf file to take a look at the generated binary:
cat validfile.pdf | base64 | base64 --decode >> anothervalidfile.pdf
Try to check if you are chainig chunks correctly or simply make a phone call with the guy who build the soap api.
Before you start downloading your file you need to create the file stream to write too, writing to a blob is not the way to go:
// Step 1
var outFileStream = Ti.Filesystem.getFile('outfile.bin').open(Ti.Filesystem.MODE_WRITE);
After creating your HTTPClient or socket stream and when you receive a chunk of Base64 data from the serve, you need to put that decoded data into a Titanium.Buffer. This would probably go into your onload or onstream in an HTPPClient, :
// Step 2
var rawDecodedFileChunk = Ti.Utils.base64decode(fileString);
var outBuffer = Ti.createBuffer({
byteOrder : // May need to set this
type : // May also need to set this to match data
value: rawDecodedFileChunk
});
Finally you can write the data out to the file stream:
// Step 3
var bytesWritten = outFileStream.write(outBuffer); // writes entire buffer to stream
Ti.API.info("Bytes written:" + bytesWritten); // should match data length
if(outBuffer.length !== bytesWritten) {
Ti.API.error("Not all bytes written!");
}
Generally errors come from having the wrong byte order or type of data, or writing in the wrong order. Obviously, this all depends on the server sending the data in the correct order and it being valid!
You may also want to consider the pump command version of this, which allows you to transfer from input stream to output file stream, minimizing your load.

Categories