I keep getting
TypeError [ERR_INVALID_ARG_TYPE]: The "data" argument must be of type string or an instance of Buffer, TypedArray, or DataView. Received an instance of Array
I'm trying to take lines from a file and separate them at a : and put them into another file as two separate lines then run it through an app and separate the lines by whether they work or not.
const fs = require('fs');
const conf = '.\\login.conf'
const directory = '.\\input\\'
fs.readdir(directory, (err, files) => {
files.forEach(files => {
var files_directory = directory + files
fs.readFile(files_directory, 'utf8', (err, data) => {
if (err) {console.log('error', err)}
var data_array = data.split(' ')
var spaced_data_array = data_array + ' '
Array.from(spaced_data_array)
var d = spaced_data_array.split(' ')
console.log(d)
var data = d + ''
login = data.split(':')
fs.writeFile(conf, login, (err, result) => {
if (err) {console.log('error', err)}
})
require('child_process').exec('.\\ovpnconnector.exe start nordvpn.ovpn')
if (work = 0) {
console.log('Not Working')
fs.writeFile('.\\output\\not_working.txt', holder, (err, result) => {
if (err) {console.log('error', err)}
})
}
if (work = 1) {
console.log('Working')
fs.writeFile('.\\output\\working.txt', holder, (err, result) => {
if (err) {console.log('error', err)}
})
}
})
})
})
This block causes the error:
login = data.split(':')
fs.writeFile(conf, login, (err, result) => {
login is an array and it cannot be used as the second parameter of the fs.writeFile method. You can see it in the error message:
The "data" argument must be of type string or an instance of Buffer, TypedArray, or DataView. Received an instance of Array
Action
What you're trying to do:
take lines from a file and separate them at a : and put them into
another file as two separate lines
You can merge the array to string by the newline character.
login = data.split(':').join('\r\n')
fs.writeFile(conf, login, (err, result) => {
Related
In an Express JS connected to a mySQL db, I am trying to get some data of an already defined route/ query:
// customers.model.js
CUSTOMERS.getAll = (result) => {
let query = "SELECT * FROM customers"
sql.query(query, (err, res) => {
if (err) {
console.log("error: ", err)
result(null, err)
return
}
result(null, res)
})
}
// customers.controller.js
// GET customers is a standalone route and should output all the customers when called.
const CUSTOMERS = require("../models/customers.model.js")
exports.findAll = (req, res) => {
return CUSTOMERS.getAll((err, data) => {
if (err)
res.status(500).send({
message: err.message ||
"Some error occurred while retrieving customers...",
})
else res.send(data)
})
}
In payments.controller.js I would firstly like to get all users so I can do something with the data:
// payments.controller.js
// GET payments is also a standalone route and should get the customers,
// do something with the data and output a calculation with the help of this data
const CUSTOMERS = require("../models/customers.model.js")
exports.calculateAll = (req, res) => {
const customers = CUSTOMERS.getAll((err, data) => {
console.log('this always has correct data', data)
if (err) return err
else return data
})
console.log('this is always undefined', customers)
...
res.send(whatEverCalculatedData)...
}
But that data here is always undefined.
What am I doing wrong in the above, and what's the correct way to call this route inside another route?
I know it has similarities with this question but I couldn't sort it out for my particular example.
It's due to your call which is asynchronous.
You must wait your data being ready before rendering the results.
Maybe you could to use Promises or async/await statements.
For example:
CUSTOMERS.getAll = async () => {
const query = "SELECT * FROM customers";
try {
return await sql.query(query);
} catch (e) {
console.log(`An error occurred while fetching customers: ${e.message}.`);
return null;
}
}
exports.calculateAll = async (req, res) => {
try {
const data = await CUSTOMERS.getAll();
res.send(whatEverCalculatedData);
} catch (e) {
res.send(`Something went wront: ${e.message}.`);
}
}
var ComfyJS = require("comfy.js");
var fs = require('fs');
const dataBuffer = fs.readFileSync('database.json');
const dataJSON = dataBuffer.toString();
const scoreBoard = JSON.parse(dataJSON);
ComfyJS.onChat = (user, message, flags, self, extra) => {
for (let i = 0; i < scoreBoard.length; i++) {
if (scoreBoard[i].name == user) {
console.log('The name already exist');
}
else{
scoreBoard.push({name:user,score:message});
}
}
var data = JSON.stringify(scoreBoard);
fs.writeFile('database.json', data, function (err) {
if (err) {
console.log('There has been an error saving your configuration data.');
console.log(err.message);
return;
}
console.log('Configuration saved successfully.')
});
}
Hi I'm new to code and I'd like to build a twitch bot and I want to save my data on a JSON file. ComfyJS.onchat triggers when somebody types something on chat and I want to take their name and message(score) as value and save it on my database but I need to save them one by one so if the name already exists in JSON file I don't want to add it. How should I fix it?
ComfyJS.onChat = (user, message, flags, self, extra) => {
const exists = scoreBoard.find(fn => fn.name === user)
if (exists) return;
scoreBoard.push({
name: user,
score: message
});
var data = JSON.stringify(scoreBoard);
fs.writeFile('database.json', data, function(err) {
if (err) {
console.log('There has been an error saving your configuration data.');
console.log(err.message);
return;
}
console.log('Configuration saved successfully.')
});
}
I'm trying to add a new text to an existing json file, I tried writeFileSync and appendFileSync however the text added doesn't format as json even when i use JSON.stringify.
const fs = require('fs');
fs.readFile("test.json", (err, data) => {
if( err) throw err;
var data = JSON.parse(data);
console.log(data);
});
var student = {
age: "23"
};
fs.appendFileSync("test.json", "age: 23");
// var writeData = fs.writeFileSync("test.json", JSON.stringify(student));
My json file
{ name: "kevin" }
Append turns out like this, {name: "kevin"}age: "23"
and writeFileSync turns out like {name: "kevin"}{age: "23"}
What I want is to continuously add text to my json file like so
{
name: "kevin",
age: "23"
}
First, dont use readFileSync and writeFileSync. They block the execution, and go against node.js standards. Here is the correct code:
const fs = require('fs');
fs.readFile("test.json", (err, data) => { // READ
if (err) {
return console.error(err);
};
var data = JSON.parse(data.toString());
data.age = "23"; // MODIFY
var writeData = fs.writeFile("test.json", JSON.stringify(data), (err, result) => { // WRITE
if (err) {
return console.error(err);
} else {
console.log(result);
console.log("Success");
}
});
});
What this code does:
Reads the data from the file.
Modifies the data to get the new data the file should have.
Write the data(NOT append) back to the file.
Here's what you can do: read the data from the file, edit that data, then write it back again.
const fs = require("fs")
fs.readFile("test.json", (err, buffer) => {
if (err) return console.error('File read error: ', err)
const data = JSON.parse(buffer.toString())
data.age = 23
fs.writeFile("test.json", JSON.stringify(data), err => {
if (err) return console.error('File write error:', err)
})
})
I am using oracledb with node and fetching data from it asynchronously. For the sake of ease, I have implemented it using separate files like below -
config.js -
module.exports = {
user : "user",
password : "password",
connectString : "*connstring*" ,
deliveredQuery: " SELECT COUNT (DISTINCT order_num) AS Cnt from orders where department = 'HR'
};
query2.js :
module.exports = function(callback) {//pass callback function and return with this
var oracledb = require('oracledb');
var dbConfig = require('./config.js');
this.queryDB = function(query,callback) {
oracledb.getConnection({
user: dbConfig.user,
password: dbConfig.password,
connectString: dbConfig.connectString,
deliveredQuery: dbConfig.deliveredQuery
}, function(err, connection) {
if (err) {
console.error(err.message);
return callback(err);
}
connection.execute(query, function(err, result) {
if (err) {
console.error(err.message);
doRelease(connection);
return;
}
//console.log(result.metaData);
//console.log(JSON.parse(result.rows[0][0]));
doRelease(connection);
return callback(null, JSON.parse(result.rows[0][0]))
});
});
function doRelease(connection) {
connection.release(function(err) {
if (err) {
console.error(err.message);
return callback(err);
}
});
}
};
};
serv_ontime.js :
var dbConfig = require('./config.js');
var res = require('./query2.js')();
var onTime_query = dbConfig.onTime_query;
module.exports = queryDB(onTime_query, function(err, callback){ });
index.js :
var res = require('./serv_ontime.js');
console.log("The result is= "+ res);
Now, When I am doing - node index.js from my cmd then I am getting the output as [object Object]. I suppose it is because the call is happening asynchronously. But if you see in the file query2.js , I am returning the value after parsing(using JSON.parse) but still the value I am getting in the index.js file is not the parsed one. How can I parse the value in index.js? I have already tried JSON.parse but it doesn`t work.
You are getting output as [object Object] because you are doing + with a String('The result is= '), So js engine tries to convert the Object to a String. To view it as an Object, do log it separately or log with comma separated,
console.log('The result is= ', res); (or)
console.log(res); // or console.dir(res)
Or you can get String version of it, by doing JSON.stringify(res)
console.log('The result is= ', JSON.stringify(res));
In serv_ontime.js you are exporting result of queryDB witch indeed is undefined. Ty this:
serv_ontime.js
var dbConfig = require('./config.js');
var res = require('./query2.js')();
var onTime_query = dbConfig.onTime_query;
module.exports = function (callback) {
queryDB(onTime_query, callback)
};
index.js
var serv_ontime = require('./serv_ontime.js');
serv_ontime(function(error, res) {
console.log("The error is= ", error);
console.log("The result is= ", res);
});
So i did that kind of code.
var csv = require('csv-stream');
var request = require('request');
var fs = require('fs');
var pg = require('pg');
var conString = "pg://admin:admin#localhost:5432/labels";
// All of these arguments are optional.
var options = {
delimiter : ';', // default is ,
endLine : '\n', // default is \n,
escapeChar : '"', // default is an empty string
enclosedChar : '"', // default is an empty string
}
try {
var csvStream = csv.createStream(options);
fs.createReadStream('personss.csv').pipe(csvStream)
.on('error', function (err) {
console.error(err);
})
.on('data', function (data) {
// outputs an object containing a set of key/value pair representing a line found in the csv file.
console.log(data);
pg.connect(conString, function (err, client, done) {
client.query(
'INSERT into test (firstname, lastname) from',
function (err, result) {
if (err) {
console.log(err);
} else {
console.log('data inserted');
}
});
});
});
} catch (e) {
console(e.message);
};
where i stream the csv file with csv-stream library, how i can insert now that values from variable 'data' to my database? Should i do it by for cycle?
Update
added for cycle to iterate data
pg.connect(conString, function (err, client, done) {
for (var i = 0; data.length; i++) {
client.query(
'INSERT into test (firstname, lastname)',
function (err, result) {
if (err) {
console.log(err);
} else {
console.log('data inserted' + result.data[0]);
}
});
};
});