BrowserFS not writing data to file? - javascript

I am trying to write a buffer to a file in localStorage using BrowserFS.
This is a simplified version of the code I am executing:
// Simple function to create a buffer that is filled with value
function createFilledBuffer(size, value) {
const b = Buffer.alloc(size)
b.fill(value)
return b
}
// Configure localStorage FS instance
BrowserFS.configure({
fs: "LocalStorage"
}, function (e) {
if (e) {
console.error('BrowserFS error:', e);
return;
}
// Get BrowserFS's fs module
const fs = BrowserFS.BFSRequire('fs');
const fileName = '/demo.txt';
fs.exists(self.fileName, async (exists) => {
let file;
// Open the file
if (!exists) {
file = fs.openSync(self.fileName, 'w+')
} else {
file = fs.openSync(self.fileName, 'r+')
}
// Write to file -> here is the problem?
// Fill first two "sectors" of the file with 0s
const sector = 4096;
// Write first sector
const bw1 = fs.writeSync(file, createFilledBuffer(sector, 0), 0, sector, 0)
// Write second sector
const bw2 = fs.writeSync(file, createFilledBuffer(sector, 0), 0, sector, sector)
console.log('Bytes written to file: ', bw1, bw2)
});
});
The output in the console is: Bytes written to file: 4096 4096, suggesting that it had successfully written the data but when I try to get the file size of '/demo.txt' using fs.stat, it is still 0 bytes large.
Later in my code I also try to write a buffer with real data (not just 0) into the file at specific locations, like:
const buffer = ...;
const bytesWritten = fs.writeSync(file, buffer, 0, buffer.length, 7 * sector)
which will also tell me it has written buffer.length bytes (=481), but looking at fs.stat, the file still contains 0 bytes.
What do I have to do to successfully write the bytes to the file?

Related

Flushing file's with webkit's filesystem API with Safari

I am trying to use filesystem api to create permanent files, write and read data from them.
Although I succeeded creating the file and writing data to it, after calling flush() the file becomes empty (and it's size is 0).
The files that I created exist and I can still see them in a different running of safari, but the data is lost and the file's are all 0 sized.
Even if I try to read the file just after writing to it and flushing, the data is lost and it's size returns to 0.
Does anybody know what I am doing wrong?
I tried running this:
console.log("Starting");
async function files() {
// Set a Message
const message = "Thank you for reading this.";
const fileName = "Draft.txt";
// Get handle to draft file
const root = await navigator.storage.getDirectory();
const draftHandle = await root.getFileHandle(fileName, { create: true });
console.log("File Name: ", fileName);
// Get sync access handle
const accessHandle = await draftHandle.createSyncAccessHandle();
// Get size of the file.
const fileSize = await accessHandle.getSize();
console.log("File Size: ", fileSize); // getting 0 here
// Read file content to a buffer.
const buffer = new DataView(new ArrayBuffer(fileSize));
const readBuffer = accessHandle.read(buffer, { at: 0 });
console.log("Read: ", readBuffer); // getting 0 here because the file was just created
// Write the message to the file.
const encoder = new TextEncoder();
const encodedMessage = encoder.encode(message);
const writeBuffer = accessHandle.write(encodedMessage, { at: readBuffer });
console.log("Write: ", writeBuffer); // writing 27 bytes here, succeeding
// Persist changes to disk.
accessHandle.flush();
// Always close FileSystemSyncAccessHandle if done.
accessHandle.close();
console.log("Closed file ");
// Find files under root/ and print their details.
for await (const handle of root.values()) {
console.log('Item in root: ', handle.name);
if (handle.kind == "file") {
let f = await handle.getFile();
console.log("File Details: ", f); // I can see here the file I created now and earlier created files, but all of them are 0 sized
}
}
}
files();

Create new json from source json file in test project

UPDATE
I have continued to work through this and have the following code - st
async generatejobs() {
const fs = require("fs");
const path = require("path");
const directoryPath = path.join(__dirname, "../data/pjo/in");
fs.readdir(directoryPath, function (err, files) {
if (err) {
console.log("Error getting directory information." + err);
} else {
files.forEach(function (file) {
console.log(file);
fs.readFile(file, (err, data) => {
console.log(file); // this works, if I stop here
// if (err) throw err;
// let newJson = fs.readFileSync(data);
// console.log(newJson);
})
// let data = fs.readFileSync(file);
// let obj = JSON.parse(data);
// let isoUtc = new Date();
// let isoLocal = toISOLocal(isoUtc);
// obj.printingStart = isoLocal;
// obj.printingEnd = isoLocal;
// let updatedFile = JSON.stringify(obj);
// let write = fs.createWriteStream(
// path.join(__dirname, "../data/pjo/out", updatedFile)
// );
// read.pipe(write);
});
}
});
As soon as I try uncomment the line shown below, it fails.
let newJson = fs.readFileSync(data);
The error I am getting is this.
Uncaught ENOENT: no such file or directory, open 'C:\projects\codeceptJs\ipt\80-012345.json'
This is a true statement as the path should be as follows.
'C:\projects\codeceptJs\ipt\src\data\pjo\in\80-012345.json'
I do not understand why it is looking for the file here given that earlier in the code the path is set and seems to work correctly for finding the file via this.
const directoryPath = path.join(__dirname, "../data/pjo/in");
The remainder of the code which is currently commented out is where I am attempting to do the following.
Grab each file from source dir
put into json object
Update the json object to change two date entries
Save to a new json file / new location in my project
Original Post
I have a codeceptjs test project and would like to include a set of existing json files in my project (src/data/jsondata/in) and then update the date attribute within each and write them to an output location in my project (src/data/jsondata/out). I need to change the date and then get it back into a very specific string format, which I have done and then insert this back into the new json being created. I got this about 80% of the way there and then ran into issues when trying to get the files from one folder within my project to another.
I broke this up in to two parts.
function to take a date and convert it to the date string I need
function to grab the source json, update the date, and make a new json at a new folder location
Number 1 is working as it should. Number 2 is not.
If there is a better way to accomplish this, I am very much open to that.
Here is the code where I'm trying to update the json. The main issue here is I'm not understanding and / or handling correctly the join path stuff.
generatePressJobs() {
//requiring path and fs modules
const path = require('path');
const fs = require('fs');
//joining path of directory
const directoryPath = path.join(__dirname, '../', 'data/pjo/in/');
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
I.say('unable to scan directory: ' + err);
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
files.forEach(function (file) {
// Update each file with new print dates
let data = fs.readFileSync(file);
let obj = JSON.parse(data);
let isoUtc = new Date();
let isoLocal = toISOLocal(isoUtc);
obj.printingStart = isoLocal;
obj.printingEnd = isoLocal;
let updatedFile = JSON.stringify(obj);
fs.writeFile(`C:\\projects\\csPptr\\ipt\\src\\data\\pjo\\out\\${file}`, updatedFile, (err) => {
if (err) {
throw err;
}
});
});
});
},
Error received
Uncaught ENOENT: no such file or directory, open '80-003599.json'
at Object.openSync (fs.js:462:3)
at Object.readFileSync (fs.js:364:35)
at C:\projects\codeceptJs\ipt\src\pages\Base.js:86:23
at Array.forEach (<anonymous>)
at C:\projects\codeceptJs\ipt\src\pages\Base.js:84:13
at FSReqCallback.oncomplete (fs.js:156:23)
The function to generate the json is located in src/pages/basePage.js
The folder structure I've built for the json file is located in
src/data/jsondata/in --> for original source files
src/data/jsondata/out --> for resulting json after change
Any insight or suggestions would be hugely appreciated.
Thank you,
Bob
My approach / resolution
Passing along the final approach I took in the event this is helpful to anyone else. The data in the middle was specific to my requirements, but left in to show the process I took to do what I needed to do.
async generatePressjobs(count) {
const fs = require("fs");
const path = require("path");
const sourceDirectoryPath = path.join(__dirname, "../data/pjo/in/");
const destDirectoryPath = path.join(__dirname, "../data/pjo/out/");
for (i = 0; i < count; i++) {
// read file and make object
let content = JSON.parse(
fs.readFileSync(sourceDirectoryPath + "source.json")
);
// Get current date and convert to required format for json file
let isoUtc = new Date();
let isoLocal = await this.toISOLocal(isoUtc);
let fileNameTimeStamp = await this.getFileNameDate(isoUtc);
// Get current hour and minute for DPI time stamp
let dpiDate = new Date;
let hour = dpiDate.getHours();
let minute = dpiDate.getMinutes();
dpiStamp = hour + '' + minute;
// update attributes in the json obj
content.batchid = `80-0000${i}`;
content.id = `80-0000${i}-10035-tcard-${dpiStamp}-0101010000_.pdf`
content.name = `80-0000${i}-8.5x11CALJEF-CalBody-${dpiStamp}-01010100${i}_.pdf`;
content.printingStart = isoLocal;
content.printingEnd = isoLocal;
// write the file
fs.writeFileSync(
destDirectoryPath + `80-0000${i}-SOME-JOB-NAME-${dpiStamp}.pdf_Press Job printing end_${fileNameTimeStamp}.json`,
JSON.stringify(content)
);
}
},

Writing large CSV to JS file using Node FS

I have a large CSV file of postcode data (~1.1GB), I am trying to filter out the data I need and then write an array of values to a JS file.
The issue is, that i'm always using too much memory and receiving this error:
Ineffective mark-compacts near heap limit Allocation failed - JavaScript heap out of memory
I have tried increasing the memory using this command: node --max-old-space-size=4096 fileName.js but I still hit my memory limit, it just takes longer!
Here is my code to write to the JS
const csvFilePath = "./data/postcodes.csv";
const csv = require("csvtojson");
const fs = require("fs");
csv()
.fromFile(csvFilePath)
.then((jsonArray) => {
const inUsePostcodes = jsonArray.filter((x) => x["In Use?"] === "Yes").map((y) => y.Postcode);
fs.writeFileSync("postcodes.js", inUsePostcodes);
});
Here is a sample of postcodes.csv:
Postcode,In Use?,Latitude,Longitude,Easting,Northing,Grid Ref,County,District,Ward,District Code,Ward Code,Country,County Code,Constituency,Introduced,Terminated,Parish,National Park,Population,Households,Built up area,Built up sub-division,Lower layer super output area,Rural/urban,Region,Altitude,London zone,LSOA Code,Local authority,MSOA Code,Middle layer super output area,Parish Code,Census output area,Constituency Code,Index of Multiple Deprivation,Quality,User Type,Last updated,Nearest station,Distance to station,Postcode area,Postcode district,Police force,Water company,Plus Code,Average Income
AB1 0AA,No,57.101474,-2.242851,385386,801193,NJ853011,"","Aberdeen City","Lower Deeside",S12000033,S13002843,Scotland,S99999999,"Aberdeen South",1980-01-01,1996-06-01,"","",,,"","","Cults, Bieldside and Milltimber West - 02","Accessible small town",,46,,S01006514,,S02001237,"Cults, Bieldside and Milltimber West",,S00090303,S14000002,6808,1,0,2020-02-19,"Portlethen",8.31408,AB,AB1,"Scotland","Scottish Water",9C9V4Q24+HV,
AB1 0AB,No,57.102554,-2.246308,385177,801314,NJ851013,"","Aberdeen City","Lower Deeside",S12000033,S13002843,Scotland,S99999999,"Aberdeen South",1980-01-01,1996-06-01,"","",,,"","","Cults, Bieldside and Milltimber West - 02","Accessible small town",,61,,S01006514,,S02001237,"Cults, Bieldside and Milltimber West",,S00090303,S14000002,6808,1,0,2020-02-19,"Portlethen",8.55457,AB,AB1,"Scotland","Scottish Water",9C9V4Q33+2F,
AB1 0AD,No,57.100556,-2.248342,385053,801092,NJ850010,"","Aberdeen City","Lower Deeside",S12000033,S13002843,Scotland,S99999999,"Aberdeen South",1980-01-01,1996-06-01,"","",,,"","","Cults, Bieldside and Milltimber West - 02","Accessible small town",,45,,S01006514,,S02001237,"Cults, Bieldside and Milltimber West",,S00090399,S14000002,6808,1,0,2020-02-19,"Portlethen",8.54352,AB,AB1,"Scotland","Scottish Water",9C9V4Q22+6M,
How can I write to the JS file from this CSV, without hitting my memory limit?
You need a csv stream parser that will parse it and provide output a line at a time and let you stream it to a file.
Here's one way to do it using the cvs-reader module:
const fs = require('fs');
const csvReader = require('csv-reader');
const { Transform } = require('stream');
const myTransform = new Transform({
readableObjectMode: true,
writableObjectMode: true,
transform(obj, encoding, callback) {
let data = JSON.stringify(obj);
if (this.tFirst) {
// beginning of transformed data
this.push("[");
this.tFirst = false;
} else {
data = "," + data; // add comma separator if not first object
}
this.push(data);
callback();
}
});
myTransform.tFirst = true;
myTransform._flush = function(callback) {
// end of transformed data
this.push("]");
callback();
}
// All of these arguments are optional.
const options = {
skipEmptyLines: true,
asObject: true, // convert data to object
parseNumbers: true,
parseBooleans: true,
trim: true
};
const csvStream = new csvReader(options);
const readStream = fs.createReadStream('example.csv', 'utf8');
const writeStream = fs.createWriteStream('example.json', {autoClose: false});
readStream.on('error', err => {
console.log(err);
csvStream.destroy(err);
}).pipe(csvStream).pipe(myTransform).pipe(writeStream).on('error', err => {
console.error(err);
}).on('finish', () => {
console.log('done');
});
The issue is that the csvtojson node module is trying to store this massive jsonObj in memory!
I found a different solution which involves using the csv-parser node module and then just parsed one row at a time instead of the whole csv!
Here is my solution:
const csv = require('csv-parser');
const fs = require('fs');
var stream = fs.createWriteStream("postcodes.js", {flags:'a'});
let first = false;
fs.createReadStream('./data/postcodes.csv')
.pipe(csv())
.on('data', (row) => {
if (row["In Use?"]) {
if (!first) {
first = true;
stream.write(`const postcodes = ["${row.Postcode},\n"`);
} else {
stream.write(`"${row.Postcode},\n"`);
}
}
})
.on('end', () => {
stream.write("]");
console.log('CSV file successfully processed');
});
It's not very pretty writing strings like const postcodes = to represent JavaScript, but it performs the desired function.

How to write a large amount of random bytes to file

I need to generate some files of varying sizes to test an application, I thought the easiest way to do so would be to write a Node script to do so.
I wrote the following code but the process crashes when the file size exceeds my memory.
const fs = require("fs");
const crypto = require('crypto');
const gb = 1024 * 1024 * 1024;
const data = crypto.randomBytes(gb * 5);
fs.writeFile('bytes.bin', data, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
On top of your memory issue you will also have an issue with the crypto module as the amount of bytes it can generate is limited.
You will need to use fs.createWriteStream to generate and write the data in chunks rather than generating it in one go.
Here is a modified version of some code from the Node documentation on streams to stream chunks of random bytes to a file:
const fs = require("fs");
const crypto = require('crypto');
const fileName = "random-bytes.bin";
const fileSizeInBytes = Number.parseInt(process.argv[2]) || 1000;
console.log(`Writing ${fileSizeInBytes} bytes`)
const writer = fs.createWriteStream(fileName)
writetoStream(fileSizeInBytes, () => console.log(`File created: ${fileName}`));
function writetoStream(bytesToWrite, callback) {
const step = 1000;
let i = bytesToWrite;
write();
function write() {
let ok = true;
do {
const chunkSize = i > step ? step : i;
const buffer = crypto.randomBytes(chunkSize);
i -= chunkSize;
if (i === 0) {
// Last time!
writer.write(buffer, callback);
} else {
// See if we should continue, or wait.
// Don't pass the callback, because we're not done yet.
ok = writer.write(buffer);
}
} while (i > 0 && ok);
if (i > 0) {
// Had to stop early!
// Write some more once it drains.
writer.once('drain', write);
}
}
}
There are also online tools which let you generate files of your required size with less setup. The files are also generated on your system so they don't have to be downloaded over the wire.

How to write a file from an ArrayBuffer in JS

I am trying to write a file uploader for Meteor framework.
The principle is to split the fileon the client from an ArrayBuffer in small packets of 4096 bits that are sent to the server through a Meteor.method.
The simplified code below is the part of the client that sends a chunk to the server, it is repeated until offset reaches data.byteLength :
// data is an ArrayBuffer
var total = data.byteLength;
var offset = 0;
var upload = function() {
var length = 4096; // chunk size
// adjust the last chunk size
if (offset + length > total) {
length = total - offset;
}
// I am using Uint8Array to create the chunk
// because it can be passed to the Meteor.method natively
var chunk = new Uint8Array(data, offset, length);
if (offset < total) {
// Send the chunk to the server and tell it what file to append to
Meteor.call('uploadFileData', fileId, chunk, function (err, length) {
if (!err) {
offset += length;
upload();
}
}
}
};
upload(); // start uploading
The simplified code below is the part on the server that receives the chunk and writes it to the file system :
var fs = Npm.require('fs');
var Future = Npm.require('fibers/future');
Meteor.methods({
uploadFileData: function(fileId, chunk) {
var fut = new Future();
var path = '/uploads/' + fileId;
// I tried that with no success
chunk = String.fromCharCode.apply(null, chunk);
// how to write the chunk that is an Uint8Array to the disk ?
fs.appendFile(path, chunk, 'binary', function (err) {
if (err) {
fut.throw(err);
} else {
fut.return(chunk.length);
}
});
return fut.wait();
}
});
I failed to write a valid file to the disk, actually the file is saved but I cannot open it, when I see the content in a text editor, it is similar to the original file (a jpg for example) but some characters are different, I think that could be an encoding problem as the file size is not the same, but I don't know how to fix that...
Saving the file was as easy as creating a new Buffer with the Uint8Array object :
// chunk is the Uint8Array object
fs.appendFile(path, Buffer.from(chunk), function (err) {
if (err) {
fut.throw(err);
} else {
fut.return(chunk.length);
}
});
Building on Karl.S answer, this worked for me, outside of any framework:
fs.appendFileSync(outfile, Buffer.from(arrayBuffer));
Just wanted to add that in newer Meteor you could avoid some callback hell with async/await. Await will also throw and push the error up to client
Meteor.methods({
uploadFileData: async function(file_id, chunk) {
var path = 'somepath/' + file_id; // be careful with this, make sure to sanitize file_id
await fs.appendFile(path, new Buffer(chunk));
return chunk.length;
}
});

Categories