Fake a Node.js Readable file stream from a JavaScript object - javascript

I want to create a "fake file" from a javascript object, as the libarry I am using is expecting a file as input, but I have an object in memory instead.
So it is expecting code something like this
var file = fs.readFileSync('{/path/to/file}');
lib.addDocument(config, file),
I want to create a fake file from a object I have called payload, and send that instead, my closest attempt so far looks like this:
var fake_file = new stream.Readable({ objectMode: true });
fake_file.push(msg.payload);
fake_file.push(null);
lib.addDocument(config, fake_file),
I feel I am close, but I cant quite get it to work. Current error is
{ Error: Unexpected end of multipart data

var Readable = require('stream').Readable
var obj = { objectMode : true};
var rStream = new Readable
rStream.push(JSON.stringify(obj));
rStream.push(null); //EOF
lib.addDocument(config, rStream)
For older versions below v10 and above v4:
var Readable = require('stream').Readable
var obj = { objectMode : true};
var chars=JSON.stringify(obj).split('');
function read(n){this.push(chars.shift())}
var rStream = new Readable({read:read});
lib.addDocument(config, rStream)

Your code is missing an Object -> String conversion. May use JSON.stringify for that:
lib.addDocument(config, JSON.stringify(msg.payload));

Related

How do I write a LZ compressed string to text file using JXA?

I am trying to write a JXA script in Apple Script Editor, that compresses a string using the LZ algorithm and writes it to a text (JSON) file:
var story = "Once upon a time in Silicon Valley..."
var storyC = LZString.compress(story)
var data_to_write = "{\x22test\x22\x20:\x20\x22"+storyC+"\x22}"
app.displayAlert(data_to_write)
var desktopString = app.pathTo("desktop").toString()
var file = `${desktopString}/test.json`
writeTextToFile(data_to_write, file, true)
Everything works, except that the LZ compressed string is just transformed to a set of "?" by the time it reaches the output file, test.json.
It should look like:
{"test" : "㲃냆੠Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ"}
Instead it looks like:
{"test" : "????????????????????"}
I have a feeling the conversion is happening in the app.write command used by the writeTextToFile() function (which I pulled from an example in Apple's Mac Automation Scripting Guide):
var app = Application.currentApplication()
app.includeStandardAdditions = true
function writeTextToFile(text, file, overwriteExistingContent) {
try {
// Convert the file to a string
var fileString = file.toString()
// Open the file for writing
var openedFile = app.openForAccess(Path(fileString), { writePermission: true })
// Clear the file if content should be overwritten
if (overwriteExistingContent) {
app.setEof(openedFile, { to: 0 })
}
// Write the new content to the file
app.write(text, { to: openedFile, startingAt: app.getEof(openedFile) })
// Close the file
app.closeAccess(openedFile)
// Return a boolean indicating that writing was successful
return true
}
catch(error) {
try {
// Close the file
app.closeAccess(file)
}
catch(error) {
// Report the error is closing failed
console.log(`Couldn't close file: ${error}`)
}
// Return a boolean indicating that writing was successful
return false
}
}
Is there a substitute command for app.write that maintains the LZ compressed string / a better way to accomplish what I am trying to do?
In addition, I am using the readFile() function (also from the Scripting Guide) to load the LZ string back into the script:
function readFile(file) {
// Convert the file to a string
var fileString = file.toString()
// Read the file and return its contents
return app.read(Path(fileString))
}
But rather than returning:
{"test" : "㲃냆੠Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ"}
It is returning:
"{\"test\" : \"㲃냆੠Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ\"}"
Does anybody know a fix for this too?
I know that it is possible to use Cocoa in JXA scripts, so maybe the solution lies therein?
I am just getting to grips with JavaScript so I'll admit trying to grasp Objective-C or Swift is way beyond me right now.
I look forward to any solutions and/or pointers that you might be able to provide me. Thanks in advance!
After some further Googl'ing, I came across these two posts:
How can I write UTF-8 files using JavaScript for Mac Automation?
read file as class utf8
I have thus altered my script accordingly.
writeTextToFile() now looks like:
function writeTextToFile(text, file) {
// source: https://stackoverflow.com/a/44293869/11616368
var nsStr = $.NSString.alloc.initWithUTF8String(text)
var nsPath = $(file).stringByStandardizingPath
var successBool = nsStr.writeToFileAtomicallyEncodingError(nsPath, false, $.NSUTF8StringEncoding, null)
if (!successBool) {
throw new Error("function writeFile ERROR:\nWrite to File FAILED for:\n" + file)
}
return successBool
};
While readFile() looks like:
ObjC.import('Foundation')
const readFile = function (path, encoding) {
// source: https://github.com/JXA-Cookbook/JXA-Cookbook/issues/25#issuecomment-271204038
pathString = path.toString()
!encoding && (encoding = $.NSUTF8StringEncoding)
const fm = $.NSFileManager.defaultManager
const data = fm.contentsAtPath(pathString)
const str = $.NSString.alloc.initWithDataEncoding(data, encoding)
return ObjC.unwrap(str)
};
Both use Objective-C to overcome app.write and app.read's inability to handle UTF-8.

NodeJS stream parse and write json line to line upon Promise result

I have a large json file that looks like that:
[
{"name": "item1"},
{"name": "item2"},
{"name": "item3"}
]
I want to stream this file (pretty easy so far), for each line run a asynchronous function (that returns a promise) upon the resolve/reject call edit this line.
The result of the input file could be:
[
{"name": "item1", "response": 200},
{"name": "item2", "response": 404},
{"name": "item3"} // not processed yet
]
I do not wish to create another file, I want to edit on the fly the SAME FILE (if possible!).
Thanks :)
I don't really answer the question, but don't think it can be answered in a satisfactory way anyway, so here are my 2 cents.
I assume that you know how to stream line by line, and run the function, and that the only problem you have is editing the file that you are reading from.
Consequences of inserting
It is not possible to natively insert data into any file (which is what you want to do by changing the JSON live). A file can only grow up at its end.
So inserting 10 bytes of data at the beginning of a 1GB file means that you need to write 1GB to the disk (to move all the data 10 bytes further).
Your filesystem does not understand JSON, and just sees that you are inserting bytes in the middle of a big file so this is going to be very slow.
So, yes it is possible to do.
Write a wrapper over the file API in NodeJS with an insert() method.
Then write some more code to be able to know where to insert bytes into a JSON file without loading the whole file and not producing invalid JSON at the end.
Now I would not recommend it :)
=> Read this question: Is it possible to prepend data to an file without rewriting?
Why do it then?
I assume that want to either
Be able to kill your process at any time, and easily resume work by reading the file again.
Retry partially treated files to fill only the missing bits.
First solution: Use a database
Abstracting the work that needs to be done to live edit files at random places is the sole purpose of existence of databases.
They all exist only to abstract the magic that is behind UPDATE mytable SET name = 'a_longer_name_that_the_name_that_was_there_before' where name = 'short_name'.
Have a look at LevelUP/Down, sqlite, etc...
They will abstract all the magic that needs to be done in your JSON file!
Second solution: Use multiple files
When you stream your file, write two new files!
One that contain current position in the input file and lines that need to be retried
The other one the expected result.
You will also be able to kill your process at any time and restart
According to this answer writing to the same file while reading is not reliable. As a commenter there says, better to write to a temporary file, and then delete the original and rename the temp file over it.
To create a stream of lines you can use byline. Then for each line, apply some operation and pipe it out to the output file.
Something like this:
var fs = require('fs');
var stream = require('stream');
var util = require('util');
var LineStream = require('byline').LineStream;
function Modify(options) {
stream.Transform.call(this, options);
}
util.inherits(Modify, stream.Transform);
Modify.prototype._transform = function(chunk, encoding, done) {
var self = this;
setTimeout(function() {
// your modifications here, note that the exact regex depends on
// your json format and is probably the most brittle part of this
var modifiedChunk = chunk.toString();
if (modifiedChunk.search('response:[^,}]+') === -1) {
modifiedChunk = modifiedChunk
.replace('}', ', response: ' + new Date().getTime() + '}') + '\n';
}
self.push(modifiedChunk);
done();
}, Math.random() * 2000 + 1000); // to simulate an async modification
};
var inPath = './data.json';
var outPath = './out.txt';
fs.createReadStream(inPath)
.pipe(new LineStream())
.pipe(new Modify())
.pipe(fs.createWriteStream(outPath))
.on('close', function() {
// replace input with output
fs.unlink(inPath, function() {
fs.rename(outPath, inPath);
});
});
Note that the above results in only one async operation happening at a time. You could also save the modifications to an array and once all of them are done write the lines from the array to a file, like this:
var fs = require('fs');
var stream = require('stream');
var LineStream = require('byline').LineStream;
var modifiedLines = [];
var modifiedCount = 0;
var inPath = './data.json';
var allModified = new Promise(function(resolve, reject) {
fs.createReadStream(inPath).pipe(new LineStream()).on('data', function(chunk) {
modifiedLines.length++;
var index = modifiedLines.length - 1;
setTimeout(function() {
// your modifications here
var modifiedChunk = chunk.toString();
if (modifiedChunk.search('response:[^,}]+') === -1) {
modifiedChunk = modifiedChunk
.replace('}', ', response: ' + new Date().getTime() + '}');
}
modifiedLines[index] = modifiedChunk;
modifiedCount++;
if (modifiedCount === modifiedLines.length) {
resolve();
}
}, Math.random() * 2000 + 1000);
});
}).then(function() {
fs.writeFile(inPath, modifiedLines.join('\n'));
}).catch(function(reason) {
console.error(reason);
});
If instead of lines you wish to stream chunks of valid json which would be a more robust approach, take a look at JSONStream.
As mentioned in the comment, the file you have is not proper JSON, although is valid in Javascript. In order to generate proper JSON, JSON.stringify() could be used. I think it would make life difficult for others to parse nonstandard JSON as well, therefore I would recommend furnishing a new output file instead of keeping the original one.
However, it is still possible to parse the original file as JSON. This is possible via eval('(' + procline + ')');, however it is not secure to take external data into node.js like this.
const fs = require('fs');
const readline = require('readline');
const fr = fs.createReadStream('file1');
const rl = readline.createInterface({
input: fr
});
rl.on('line', function (line) {
if (line.match(new RegExp("\{name"))) {
var procline = "";
if (line.trim().split('').pop() === ','){
procline = line.trim().substring(0,line.trim().length-1);
}
else{
procline = line.trim();
}
var lineObj = eval('(' + procline + ')');
lineObj.response = 200;
console.log(JSON.stringify(lineObj));
}
});
The output would be like this:
{"name":"item1","response":200}
{"name":"item2","response":200}
{"name":"item3","response":200}
Which is line-delimited JSON (LDJSON) and could be useful for streaming stuff, without the need for leading and trailing [, ], or ,. There is an ldjson-stream package for it as well.

Stream in NodeJS

I need some help to understand how stream work in NodeJS
I explain, i need to write a module which call a UNIX process (with spawn) and I want to redirect the stdout of this process to a Readable Stream.
I want this behavior to exports the Readable Stream and allow another module to read them.
To do this, I have write a little piece of code :
var spawn = require('child_process').spawn
var Duplex = require('stream').Duplex;
var stream = new Duplex;
var start = function() {
ps = spawn('mycmd', [/*... args ...*/]);
ps.stdout.pipe(stream);
};
exports.stream = stream;
exports.start = start;
But if I use this module I throw an exception which say that the stream doesn't implement the _read method.
Can you help me with this problem ?
Thanks in advance.
[EDIT] I have try the solution of creating a Stream object, but that's doesnt work, here is the code:
var spawn = require('child_process').spawn;
var Stream = require('stream');
var ps = null;
var audio = new Stream;
audio.readable = audio.writable = true;
var start = function() {
if(ps == null) {
ps = spawn('mycmd', []);
ps.stdout.pipe(stream);
}
};
var stop = function() {
if(ps) {
ps.kill();
ps = null;
}
};
exports.stream = stream;
exports.start = start;
exports.stop = stop;
But when I try to listen the stream, I encount an new error :
_stream_readable.js:583
var written = dest.write(chunk);
^
TypeError: Object #<Stream> has no method 'write'
Most of Node's Stream classes aren't meant to be used directly, but as the base of a custom type:
Note that stream.Duplex is an abstract class designed to be extended with an underlying implementation of the _read(size) and _write(chunk, encoding, callback) methods as you would with a Readable or Writable stream class.
One notable exception is stream.PassThrough, which is a simple echo stream implementation.
var PassThrough = require('stream').PassThrough;
var stream = new PassThrough;
Also note that ps will be a global, making it directly accessible in all other modules.
If you simply want to use stream then you should do :
var stream = new Stream;
stream.readable = stream.writable = true;
Duplex is meant for developers. Some methods like _read and _write need to be implemented for it.
[Update]
OK, you have data source, from the stdout. You will need write function, use this :
stream.write = function(data){this.emit('data', data);};

WinJS, display image from a byte array?

I'm looking for a way to display a JPEG image stored in a byte array. This is a Windows 8 Store App built in Javascript. The byte array is returned from a C# WinRT component. Its type in C# is byte[].
Basically, what I want is to get an object so that I can call:
URL.createObjectURL(bytearray, {oneTimeOnly: true});
upon. Currently this generates a runtime error because the array interface is not supported.
Thanks in advance.
I discovered a far easier method.
var blob = new Blob([bytes], {type: "image/jpg"});
var url = URL.createObjectURL(blob, { oneTimeOnly: true });
Blob is actually supported directly by URL.createObjectURL. The only catch is that you have to specify a mime format to identify the buffer format, which in my case is possible.
Current solution I found is quite round about. Hence, before giving the solution for bytes to URL, few comments:
If there is better way to get to DOM stream/blob object from bytes, try out.
If you control the winrt component - check if you can return StorageFile object. In that case - code will simplifyto
var file = MSApp.createFileFromStorageFile(storageFile);
var url = URL.createObjectURL(file, { oneTimeOnly: true });
now solution:
var bytes;
var memoryStream;
var streams = Windows.Storage.Streams;
{
// get IBuffer out of bytes
var dataWriter = new streams.DataWriter();
dataWriter.writeBytes(bytes);
var buffer = dataWriter.detachBuffer();
dataWriter.close();
// get IInputStream out of IBuffer
memoryStream = new streams.InMemoryRandomAccessStream();
return memoryStream.writeAsync(buffer);
}).then(function onmemorystreamwritecomplete()
{
var inputStream = memoryStream.getInputStreamAt(0);
// get DOM MSStream from IInputStream
var domMStream = MSApp.createStreamFromInputStream('image/jpg', inputStream);
var url = URL.createObjectURL(domMStream, { oneTimeOnly: true });
memoryStream.close();
test3Img.setAttribute('src', url);
})

How to create streams from string in Node.Js?

I am using a library, ya-csv, that expects either a file or a stream as input, but I have a string.
How do I convert that string into a stream in Node?
As #substack corrected me in #node, the new streams API in Node v10 makes this easier:
const Readable = require('stream').Readable;
const s = new Readable();
s._read = () => {}; // redundant? see update below
s.push('your text here');
s.push(null);
… after which you can freely pipe it or otherwise pass it to your intended consumer.
It's not as clean as the resumer one-liner, but it does avoid the extra dependency.
(Update: in v0.10.26 through v9.2.1 so far, a call to push directly from the REPL prompt will crash with a not implemented exception if you didn't set _read. It won't crash inside a function or a script. If inconsistency makes you nervous, include the noop.)
Do not use Jo Liss's resumer answer. It will work in most cases, but in my case it lost me a good 4 or 5 hours bug finding. There is no need for third party modules to do this.
NEW ANSWER:
var Readable = require('stream').Readable
var s = new Readable()
s.push('beep') // the string you want
s.push(null) // indicates end-of-file basically - the end of the stream
This should be a fully compliant Readable stream. See here for more info on how to use streams properly.
OLD ANSWER:
Just use the native PassThrough stream:
var stream = require("stream")
var a = new stream.PassThrough()
a.write("your string")
a.end()
a.pipe(process.stdout) // piping will work as normal
/*stream.on('data', function(x) {
// using the 'data' event works too
console.log('data '+x)
})*/
/*setTimeout(function() {
// you can even pipe after the scheduler has had time to do other things
a.pipe(process.stdout)
},100)*/
a.on('end', function() {
console.log('ended') // the end event will be called properly
})
Note that the 'close' event is not emitted (which is not required by the stream interfaces).
From node 10.17, stream.Readable have a from method to easily create streams from any iterable (which includes array literals):
const { Readable } = require("stream")
const readable = Readable.from(["input string"])
readable.on("data", (chunk) => {
console.log(chunk) // will be called once with `"input string"`
})
Note that at least between 10.17 and 12.3, a string is itself a iterable, so Readable.from("input string") will work, but emit one event per character. Readable.from(["input string"]) will emit one event per item in the array (in this case, one item).
Also note that in later nodes (probably 12.3, since the documentation says the function was changed then), it is no longer necessary to wrap the string in an array.
https://nodejs.org/api/stream.html#stream_stream_readable_from_iterable_options
Just create a new instance of the stream module and customize it according to your needs:
var Stream = require('stream');
var stream = new Stream();
stream.pipe = function(dest) {
dest.write('your string');
return dest;
};
stream.pipe(process.stdout); // in this case the terminal, change to ya-csv
or
var Stream = require('stream');
var stream = new Stream();
stream.on('data', function(data) {
process.stdout.write(data); // change process.stdout to ya-csv
});
stream.emit('data', 'this is my string');
Edit: Garth's answer is probably better.
My old answer text is preserved below.
To convert a string to a stream, you can use a paused through stream:
through().pause().queue('your string').end()
Example:
var through = require('through')
// Create a paused stream and buffer some data into it:
var stream = through().pause().queue('your string').end()
// Pass stream around:
callback(null, stream)
// Now that a consumer has attached, remember to resume the stream:
stream.resume()
There's a module for that: https://www.npmjs.com/package/string-to-stream
var str = require('string-to-stream')
str('hi there').pipe(process.stdout) // => 'hi there'
Another solution is passing the read function to the constructor of Readable (cf doc stream readeable options)
var s = new Readable({read(size) {
this.push("your string here")
this.push(null)
}});
you can after use s.pipe for exemple
in coffee-script:
class StringStream extends Readable
constructor: (#str) ->
super()
_read: (size) ->
#push #str
#push null
use it:
new StringStream('text here').pipe(stream1).pipe(stream2)
I got tired of having to re-learn this every six months, so I just published an npm module to abstract away the implementation details:
https://www.npmjs.com/package/streamify-string
This is the core of the module:
const Readable = require('stream').Readable;
const util = require('util');
function Streamify(str, options) {
if (! (this instanceof Streamify)) {
return new Streamify(str, options);
}
Readable.call(this, options);
this.str = str;
}
util.inherits(Streamify, Readable);
Streamify.prototype._read = function (size) {
var chunk = this.str.slice(0, size);
if (chunk) {
this.str = this.str.slice(size);
this.push(chunk);
}
else {
this.push(null);
}
};
module.exports = Streamify;
str is the string that must be passed to the constructor upon invokation, and will be outputted by the stream as data. options are the typical options that may be passed to a stream, per the documentation.
According to Travis CI, it should be compatible with most versions of node.
Heres a tidy solution in TypeScript:
import { Readable } from 'stream'
class ReadableString extends Readable {
private sent = false
constructor(
private str: string
) {
super();
}
_read() {
if (!this.sent) {
this.push(Buffer.from(this.str));
this.sent = true
}
else {
this.push(null)
}
}
}
const stringStream = new ReadableString('string to be streamed...')
In a NodeJS, you can create a readable stream in a few ways:
SOLUTION 1
You can do it with fs module. The function fs.createReadStream() allows you to open up a readable stream and all you have to do is pass the path of the file to start streaming in.
const fs = require('fs');
const readable_stream = fs.createReadStream('file_path');
SOLUTION 2
If you don't want to create file, you can create an in-memory stream and do something with it (for example, upload it somewhere). ​You can do this with stream module. You can import Readable from stream module and you can create a readable stream. When creating an object, you can also implement read() method which is used to read the data out of the internal buffer. If no data available to be read, null is returned. The optional size argument specifies a specific number of bytes to read. If the size argument is not specified, all of the data contained in the internal buffer will be returned.
const Readable = require('stream').Readable;
const readable_stream = new Readable({
​read(size) {
​// ...
​ }
});
SOLUTION 3
When you are fetching something over the network, that can be fetched like stream (for example you are fetching a PDF document from some API).
const axios = require('axios');
const readable_stream = await axios({
method: 'get',
url: "pdf_resource_url",
responseType: 'stream'
}).data;
SOLUTION 4
Third party packages can support creating of streams as a feature. That is a way with aws-sdk package that is usually used for uploading files to S3.
const file = await s3.getObject(params).createReadStream();
JavaScript is duck-typed, so if you just copy a readable stream's API, it'll work just fine. In fact, you can probably not implement most of those methods or just leave them as stubs; all you'll need to implement is what the library uses. You can use Node's pre-built EventEmitter class to deal with events, too, so you don't have to implement addListener and such yourself.
Here's how you might implement it in CoffeeScript:
class StringStream extends require('events').EventEmitter
constructor: (#string) -> super()
readable: true
writable: false
setEncoding: -> throw 'not implemented'
pause: -> # nothing to do
resume: -> # nothing to do
destroy: -> # nothing to do
pipe: -> throw 'not implemented'
send: ->
#emit 'data', #string
#emit 'end'
Then you could use it like so:
stream = new StringStream someString
doSomethingWith stream
stream.send()

Categories