Unable to remove data from json file on disk - javascript

I'm unable to find a way to remove whole line of JSON data(line) after it's used.
For some reason delete is not working or rather said not doing anything.
.JSON
[
{"id":"1","code":"super-S","expires_in":"","gives_currencies":"1.5","gives_items":"","number_of_attempts":"1","attempts_used":""},
{"id":"2","code":"wow!","expires_in":"","gives_currencies":"3","gives_items":"","number_of_attempts":"1","attempts_used":""},
{"id":"3","code":"slashme","expires_in":"","gives_currencies":"4","gives_items":"","number_of_attempts":"1","attempts_used":""},
{"id":"4","code":"randombla","expires_in":"","gives_currencies":"5","gives_items":"","number_of_attempts":"1","attempts_used":""}
]
code
//fs configuration
const fs = require('fs');
let rawdata = fs.readFileSync('test.json');
let mycodes = JSON.parse(rawdata);
//something above
const randomcode = mycodes[Math.floor(Math.random() * mycodes.length)];
console.log('Your code is:', randomcode['code']); //logs me a random code value
delete mycodes[randomcode];
The goal here is to select random code, which is done but then I need to remove it from .JSON file so it won't repeat. I tried several things but it's not working, delete.randomcode etc... the line never removed from the .JSON file.

Use Array.prototype.splice(index, deleteCount) instead of delete.
delete, on an Array, will just null the key, without removing it.
Save back your modified data using JSON.stringify(mycodes) to that same file.
const fs = require('fs');
const mycodes = JSON.parse(fs.readFileSync('./test.json'));
const randomIndex = Math.floor(Math.random() * mycodes.length);
const randomObject = mycodes[randomIndex];
console.log('Your code is:', randomObject.code); // Log a random code value
mycodes.splice(randomIndex, 1); // Remove one key at randomIndex
// Write back to file
fs.writeFileSync('test.json', JSON.stringify(mycodes, 0, 4), 'utf8');
If you already have that Object out of your Array, and since Objects are passed by reference (like pointer in memory), make use of the Array.prototype.indexOf(someObject) like:
const fs = require('fs');
const mycodes = JSON.parse(fs.readFileSync('./test.json'));
const randomIndex = Math.floor(Math.random() * mycodes.length);
const randomObject = mycodes[randomIndex];
// later in your code....
const objIndex = mycodes.indexOf(randomObject); // Get Object index in Array
mycodes.splice(objIndex, 1); // Remove it from array at that index
// Write back to file
fs.writeFileSync('test.json', JSON.stringify(mycodes, 0, 4), 'utf8');

You need to persist your data by writing it back to your JSON file after using JSON.stringify().
While you're at it, you can move your code into functions, which will make it easier to read and work with.
You might also want to read about editing arrays using Array.prototype.splice().
The delete operator is for deleting properties from objects. While you can use it to delete elements from an array, it will leave the index empty rather than closing the gap in the array after deletion.
const fs = require('fs');
// get a random element from any array
function getRandomElement (array) {
const randomElement = array[Math.floor(Math.random() * array.length)];
return randomElement;
}
function deleteElementFromArray (array, element) {
const index = array.indexOf(element);
if (index < 0) return false;
array.splice(index, 1);
return true;
}
// move the reading work inside a function
function readJson (filePath) {
const json = fs.readFileSync(filePath, {encoding: 'utf8'});
const data = JSON.parse(json);
return data;
}
// move the reading work inside a function
function writeJson (filePath, data) {
const json = JSON.stringify(data);
fs.writeFileSync(filePath, json);
}
const jsonFilePath = 'test.json';
const mycodes = readJson(jsonFilePath);
const randomcode = getRandomElement(mycodes);
console.log('Your code is:', randomcode['code']);
deleteElementFromArray(mycodes, randomcode);
writeJson(jsonFilePath, mycodes);

Related

Array doesnt have expected value - Nodejs

I am running a script which looks into a directory and lists files, then checks for the file type to process, if the extension matches then the file is read and each line of the file (.col which is just a txt file renamed) is inserted into an array.
Now after the file is read and the array is populated I would like to use the array and do some further processing, e.g create a db record. I am missing something really basic here because on each console log I do as below I always get the full items (in my array) of the contents of all files.
So to make it a bit simpler:
array is empty.
Then file is read and processed and array now has
array[0]=line 0 of file
array[0]=line 1 of file etc
const fs = require('fs');
const readline =require('readline');
var files = fs.readdirSync('/home/proj/data');
var path = require('path');
var model=[];
var lineReader=[];
for(var i=0; i<files.length; i++) {
if(path.extname(files[i]) === ".col") {
lineReader[i] = readline.createInterface({
input: require('fs').createReadStream(files[i])
});
lineReader[i].on('line', function (line) {
model.push(line);
}).on('close', async function() {
console.log(model);
});
}
}
Instead the script is run and array[] holds all lines of all files that match the extension.
Your help is greatly appreciated and anyone is allowed to scorch my JS as I am pretty sure I am missing something basic here.
So, you want to read the files in parallel (because that's what your program does) and put it in an array of arrays?
You can make the reading file mechanism a promise and use it using Promise.all. Here is an example to get you started.
const fs = require('fs');
const readline = require('readline');
var files = fs.readdirSync('./');
var path = require('path');
function readFile(fileName) {
return new Promise(resolve => {
const array = [];
const lineReader = readline.createInterface({
input: fs.createReadStream(files[i])
});
lineReader.on('line', function (line) {
array.push(line);
}).on('close', async function () {
//do some proc
console.log(array);
resolve(array);
});
});
}
const readFilePromises = [];
for (var i = 0; i < files.length; i++) {
if (path.extname(files[i]) === ".js") {
readFilePromises.push(readFile(files[i]));
}
}
Promise.all(readFilePromises) //or await Promise.all([..])
.then(data => {
console.log(data);//will be array of arrays
})
If you want a single Array you can always flatten the result using data.flat()
If your files are not very big and sync methods are OK, you can simplify the code this way:
'use strict';
const fs = require('fs');
const path = require('path');
const model = [];
fs.readdirSync('/home/proj/data')
.filter(name => path.extname(name) === '.col')
.forEach((name) => {
model.push(...fs.readFileSync(name, 'utf8').split('\n'));
});
console.log(model);

Problems with nested Object.keys forEach loop and building array of merged JSON objects

I have two CSV files, one with routing steps and one with a list of ids. I need to add each id to the start of each routing step. I'm using Node.js.
var routeNumFile = '/routing_numbers.csv';
var routeStepFile = '/routing_steps.csv';
const csvToJson = require('csvtojson');
const jsonToCsv = require('json2csv').parse;
const fs = require('fs');
var routeNumArray;
var routeStepArray;
try {
routeNumArray = await csvToJson().fromFile(routeNumFile);
} catch (err) {
console.log("error in reading csv file")
}
try {
routeStepArray = await csvToJson().fromFile(routeStepFile);
} catch (err) {
console.log("error in reading csv file")
}
var outputArray = new Array;
var outputPath = '/gitlab/BSI_Create_Csv_Import/finalOutput.csv';
if (routeNumArray != null && routeStepArray != null) {
Object.keys(routeNumArray).forEach(function (key1) {
Object.keys(routeStepArray).forEach(function (key2) {
var comboObj = Object.assign(routeNumArray[key1], routeStepArray[key2]);
console.log(comboObj);
outputArray.push(comboObj);
});
});
}
console.log(outputArray);
var csv = jsonToCsv(outputArray);
fs.writeFileSync(outputPath, csv);
The output from console.log(comboObj) is what I want. However, when I put that into an array, I just get the very last entry in the routing steps CSV over and over. If I write it using a stream directly where the comboObj is created, it mostly works. If I do that I convert the object to CSV first and the output leaves me with the headings duplicated at the end of every line. It writes much cleaner from an array of JSON objects to a CSV.
So, I'm trying to get all of the combined objects together in an array so I can convert it all to CSV and write it to a file. Can anyone tell me what's going wrong with my method?
You should create a new object and assign properties to it, like this:
var comboObj = Object.assign({}, routeNumArray[key1], routeStepArray[key2]);

Nodejs Convert text to JSON

For some reason I'm having such a hard time converting this txt file to an actual javascript array.
myJson.txt
{"action": "key press", "timestamp": 1523783621, "user": "neovim"}
{"action": "unlike", "timestamp": 1523784584, "user": "r00k"}
{"action": "touch", "timestamp": 1523784963, "user": "eevee"}
{"action": "report as spam", "timestamp": 1523786005, "user": "moxie"}
Currently what I have that doesn't work
const fs = require('fs');
function convert(input_file_path) {
const file = fs.readFileSync(input_file_path, 'utf8');
const newFormat = file
.replace(/(\r\n\t|\n|\r\t)/gm,'')
.replace(/}{/g, '},{');
console.log([JSON.parse(newFormat)]);
}
convert('myJson.txt');
Since your file contains a JSON object per line, you could read that file line by line, using readline.
Each line is then parsed, and push into an array, which is then returned (resolved) after the file is fully read.
'use strict';
const fs = require('fs');
const readline = require('readline');
function convert(file) {
return new Promise((resolve, reject) => {
const stream = fs.createReadStream(file);
// Handle stream error (IE: file not found)
stream.on('error', reject);
const reader = readline.createInterface({
input: stream
});
const array = [];
reader.on('line', line => {
array.push(JSON.parse(line));
});
reader.on('close', () => resolve(array));
});
}
convert('myJson.txt')
.then(res => {
console.log(res);
})
.catch(err => console.error(err));
I would have done this in this way
var fs = require('fs');
var readline = require('readline');
var array = [];
var input = null;
var rd = readline.createInterface({
input: fs.createReadStream(__dirname+'/demo.txt')
});
rd.on('line', function(line) {
array.push(JSON.parse(line));
});
rd.on('close', function(d){
array.forEach(e=>console.log(e.action))
})
What's happening here is, I am reading the lines of the file on by one using readline which is one of the core modules of nodejs. Listening on the events and doing what needed.
And yeah, you'll have to parse the line to JSON for sure ;)
Thanks
The problem with your code is that you're trying to parse JS array as JSON array. while JSON array string should be only string.
Here what you're trying to do:
jsArray = ['{"foo": "bar"}, {"foo":"baz"}']
This is a valid JS array of a single string value '{"foo": "bar"}, {"foo":"baz"}'.
while
jsonArrayStr = '["{"foo": "bar"}, {"foo":"baz"}"]'
This is a valid JSON array string (as the square brackets is part of the string).
So as to get your code running, you need to add the square brackets to your string before parsing it.
function convert(input_file_path) {
const file = fs.readFileSync(input_file_path, 'utf8');
const newFormat = file
.replace("{", "[{")
.replace(/}$/, "}]")
console.log(JSON.parse('[' + newFormat + ']'));
}
What I'm doing in the script is reading the content of text file line by line and storing it to array along with converting it to JSON object. When we reach last line and our JSON array/object has all the data. Now you can write this object to a new file fs.writeFileSync() after converting JSON object to string with JSON.stringify().
Note :- You've to install Line reader package i.e. npm install line-reader
var lineReader = require('line-reader');
var fs = require('fs')
var jsonObj = {};
var obj = [];
var file = "fileName.json"
var num= 0;
lineRead();
async function lineRead(){
lineReader.eachLine('input.txt', function(line, last) {
// to check on which line we're.
console.log(num);
num++;
convertJson(line)
if(last){
//when it's last line we convert json obj to string and save it to new file.
var data = JSON.stringify(obj)
fs.writeFileSync(file,data);
}
});
}
function convertJson(data){
var currentVal = data
var value = JSON.parse(data)
var temp = value;
//storing the value in json object
jsonObj = value;
obj.push(jsonObj);
}
}

Unable to add vector to FlatBuffer table (Javascript)

I am following the FlatBuffers Javascript tutorial but I am having trouble adding a vector of non-scalar items to the following object:
namespace test;
enum Availability : byte {
Unavailable = 0,
Available = 1
}
table Channel {
channelNumber:uint;
myState:Availability = Unavailable;
}
table ControlChannel {
channels:[Channel];
}
root_type ControlChannel;
As you can see the root object contains a vector of Channel objects. I successfully generated my javascript code, but while trying to create some test data it seems that the channels aren't added correctly. This is what I tried:
const fs = require('fs');
const flatbuffers = require('./flatbuffers').flatbuffers;
const test = require('./control-channel_generated').test;
// Create builder. The '1' is the 'initial size', whatever that means.
const builder = new flatbuffers.Builder(1);
// Create the first channel
test.Channel.startChannel(builder);
test.Channel.addChannelNumber(builder, 1);
const channel1 = test.Channel.endChannel(builder);
// Create another channel
test.Channel.startChannel(builder);
test.Channel.addChannelNumber(builder, 2);
test.Channel.addMyState(builder, test.Availability.Available);
const channel2 = test.Channel.endChannel(builder);
// Create a vector of the channels
const chans = [channel1, channel2];
const channels = test.ControlChannel.createChannelsVector(builder, chans);
// Create control channel and add the channels
test.ControlChannel.startControlChannel(builder);
test.ControlChannel.addChannels(builder, channels); // The resulting cc-test.data file is the same whether I comment this line or not
const controlChannel = test.ControlChannel.endControlChannel(builder);
// Control channel is finished
builder.finish(controlChannel);
// Create a buffer (to send it, write it etc)
const buf = builder.asUint8Array();
fs.writeFileSync('cc-test.data', buf);
console.log(`Data written to 'cc-test.data'.`);
This results in a file called cc-test.data which contains the buffer and no matter how many channels I try to add, the buffer is always exactly the same. I also tried parsing the data back like this:
const fs = require('fs');
const flatbuffers = require('./flatbuffers').flatbuffers;
const test = require('./control-channel_generated').test;
// Parse the data as a new byte array
const data = new Uint8Array(fs.readFileSync('./cc-test.data'));
const buf = new flatbuffers.ByteBuffer(data);
// This is where all the magic happens
const controlChannel = test.ControlChannel.getRootAsControlChannel(buf);
// You can not iterate over the channels directly, you have to get it by index
console.log(`ControlChannel has ${controlChannel.channelsLength()} channels:`);
for (var i = 0; i < controlChannel.channelsLength(); i++) {
const channel = controlChannel.channels(i);
console.log(`Channel ${channel.channelNumber()} (Available: ${channel.myState()})`);
}
This just prints ControlChannel has 0 channels every time. What am I missing?

how to read a file, store data and then write it

I have a text file with a ton of values that I want to convert to meaningful JSON using node.js fs module.
I want to store the first value of every line in an array unless the value is already present.
7000111,-1.31349,36.699959,1004,
7000111,-1.311739,36.698589,1005,
8002311,-1.262245,36.765884,2020,
8002311,-1.261135,36.767544,2021,
So for this case, I'd like to write to a file:
[7000111, 8002311]
Here's what I have so far. It writes [] to the file.
var fs = require('fs');
var through = require('through');
var split = require('split');
var shape_ids = [];
var source = fs.createReadStream('data/shapes.txt');
var target = fs.createWriteStream('./output3.txt');
var tr = through(write, end);
source
.pipe(split())
.pipe(tr)
// Function definitions
function write(line){
var line = line.toString();
var splitted = line.split(',');
// if it's not in array
if (shape_ids.indexOf(splitted[0]) > -1){
shape_ids.push(splitted[0]);
}
}
function end(){
shape_ids = JSON.stringify(shape_ids);
target.write(shape_ids);
console.log('data written');
}
The code is using the split and through modules
How do I store values in the array and write the populated array to the file?
== === ====== =================
Update:
This is what I want to do, but it's in Ruby:
shape_ids = []
File.open("data/shapes.txt").readlines.each do |line|
data = line.split(',')
shape_id = data.first
if !shape_ids.include? shape_id
shape_ids.push(shape_id)
end
end
puts shape_ids # array of unique shape_ids
Can I do this in javascript?
Unless you are super comfortable with the new Stream API in node, use the event-stream module to accomplish this:
var fs = require('fs');
var es = require('event-stream');
function getIds(src, target, callback) {
var uniqueIDs = [];
es.pipeline(
fs.createReadStream(src),
es.split(),
es.map(function (line, done) {
var id = line.split(',').shift();
if (uniqueIDs.indexOf(id) > -1) return done();
uniqueIDs.push(id);
done(null);
}),
es.wait(function (err, text) {
// Here we create our JSON — keep in mind that valid JSON starts
// as an object, not an array
var data = JSON.stringify({ ids: uniqueIDs});
fs.writeFile(target, data, function (err) {
if ('function' == typeof callback) callback(err);
});
})
);
}
getIds('./values.txt', './output.json');
Unfortunately there is no "easy" way to keep this as a pure stream flow so you have to "wait" until the data is done filtering before turning into a JSON string. Hope that helps!

Categories