I send a query from Node.js to Neo4j, but I do not see anything callback. The query is correctly executed but I am unable to see any information i nthe callback and log it in the console.
I think node.js executes console.log before any data has come, but I do not know how to solve it.
Node.js:
// Load Modules
var neo4j = require('neo4j');
// Database Connection
var db = new neo4j.GraphDatabase("http://neo4j:Gemitis26#localhost:7474/");
// Inizialize Query
var query = "CREATE (:Song {name:'James'})";
db.cypher(query, function(err, node){
if(err) throw err;
// Output node properties.
console.log(node.data);
// Output node id.
console.log(node._id);
});
Output:
C:\Users\RRamos\Documents\Projects\test-neo4j>node index.js
[]
undefined
As I said, I check it and it is correctly created.
There are a number of problems in your code:
Your Cypher query does not have a RETURN clause so your query response will always be an empty array (because it will never contain any result rows).
Your callback is expecting to wrong data structure for the response.
Try this code. It dumps out the error (if any) and the response, so that you can see the actual data structure of a response. It also uses a for-loop to iterate through the rows of data in the response and print out each s node's properties and its native ID. In your case, there will only be at most one result row, so the loop is not strictly necessary, but in general there can be multiple rows.
// Load Modules
var neo4j = require('neo4j');
// Database Connection
var db = new neo4j.GraphDatabase("http://neo4j:Gemitis2#localhost:7474/");
// Inizialize Query
var query = "MATCH (s:Song {name:'James'}) RETURN s";
db.cypher(query, function(err, res){
// Dump out the err and response, to see the data structure.
console.log("err: %j, res: %j", err, res);
if(err) throw err;
// Print out the data for each row in the response.
for (var i = 0; i < res.length; i++) {
var s = res[i].s;
// Output node properties.
console.log(s.properties);
// Output node id.
console.log(s._id);
}
});
Related
I have a project where I have to process an input CSV file and store it into an array that I can add to, then print it out into a CSV file. I then use the transaction data for the rest of my project so being able to complete this part is vital as testing will be performed with other CSV files.
My issue is that whilst using csv-parse if I use console.table(results); it shows the csv objects when I run the .js file in my command terminal so I know its parsing but no matter what I do I cannot get the objects to go into my array variable.
console.table(results);
Please can someone give me a hint as to where I've gone wrong:
var fs = require('fs');
var parse = require('csv-parse');
var transactionValues = []; //Need an array to hold transactions
//constuctor for transactions
function addData (id, accountType, initiatorType, dateTime, transactions) {
var data = {
"AccountID" : id,
"AccountType" : accountType,
"InitiatorType" : initiatorType,
"DateTime" : dateTime,
"TransactionValues" : transactions
}
transactionValues.push(data); //should add a new line
}
var parser = parse({columns: true}, function (err, results) {
console.table(results);
addData(results.index[0].AccountID, results.index[0].AccountType, results.index[0].InitiatorType, results.index[0].DateTime, results.index[0].TransactionValue, 0);
}); //attempted to save the objects into the array but no success
fs.createReadStream(__dirname+'/testData/customer-1234567-ledger.csv').pipe(parser)
console.log(transactionValues); // array is empty
I believe results is already a normal array as it comes back from csv-parse. You are trying to access the first element with results.index[0], but it would just be results[0]. Another issue is that fs.createReadStream(...).pipe(...) is asynchronous. That means your console.log will run before it is done parsing. You would need to put any code that has to run after parsing in the callback of your parse function. Something like this:
var parser = parse({columns: true}, function (err, results) {
console.table(results);
for (const row of results) { //loop through each object parsed from the csv
addData(row.AccountID, row.AccountType, row.InitiatorType, row.DateTime, row.TransactionValue, 0);
}
console.log(transactionValues); // this should be populated properly
/* Do anything that needs to use transactionValues here */
});
I am using rest API of node with mongodb and I am getting some error like
I am unable to mutate array arr when it's push in that response function.
every time I got an empty array in it.
What im using:
var arr = [];
Business.find({ userId: userId }, (err,docss)=>{
if(err) throw err;
arr.push(docss)
});
console.warn(arr)
OUTPUT : []
thanks in advance for helping.
I'm not a super experienced coder, so forgive me if the question is rather too simple.
I have a csv with many rows, and one of its columns is 'id'. How can I remove just one row based on the id (i.e. code should search for id and delete that row)?
I got the following so far (not too helpful since on one day I may need to remove id 5 and on another I may need to remove id 2...) Thank you so much!
var fs = require('fs')
fs.readFile(filename, 'utf8', function(err, data)
{
if (err)
{
// check and handle err
}
var linesExceptFirst = data.split('\n').slice(1).join('\n');
fs.writeFile(filename, linesExceptFirst);
});
PS: it must be in javascript as the code is running on a nodejs server
You'll need to parse the CSV which is simple with Array.prototype.map()
Then you'll need to use Array.prototype.filter() to find the column value you are after.
It is just a couple lines of code and you are all set:
var fs = require('fs')
// Set this up someplace
var idToSearchFor = 2;
// read the file
fs.readFile('csv.csv', 'utf8', function(err, data)
{
if (err)
{
// check and handle err
}
// Get an array of comma separated lines`
let linesExceptFirst = data.split('\n').slice(1);
// Turn that into a data structure we can parse (array of arrays)
let linesArr = linesExceptFirst.map(line=>line.split(','));
// Use filter to find the matching ID then return only those that don't matching
// deleting the found match
// Join then into a string with new lines
let output = linesArr.filter(line=>parseInt(line[0]) !== idToSearchFor).join("\n");
// Write out new file
fs.writeFileSync('new.csv', output);
});
Note that I removed the call to .join() so we can operate on the array created from the call to .split(). The rest is commented.
And finally, a working example can be found here: https://repl.it/#randycasburn/Parse-CSV-and-Find-row-by-column-value-ID
EDIT: The code will now return all rows except the found id. Hence, in essence, deleting the row. (Per OPs comment request).
EDIT2: Now outputting to new CSV file per request.
We need to update the all the documents in a collection to change their shape. We'd like to record an audit of the change in a different collection where we will store a document which contains the old and new version. In order to make this change atomic, we are using a stored proc.
The issue we are facing is updating another collection from a stored proc. It seems the audit document is always written into the collection the stored proc belongs to.
I have written up a sample stored proc:
function sample(prefix) {
var context = getContext();
var fooCollection = context.getCollection();
var barCollection = context.getCollection("BarCollection");
// Query documents and take 1st item.
var isAccepted = fooCollection.queryDocuments(
fooCollection.getSelfLink(),
'SELECT * FROM root r',
function (err, feed, options) {
if (err) throw err;
// Check the feed and if empty, set the body to 'no docs found',
// else take 1st element from feed
if (!feed || !feed.length) context.getResponse().setBody('no docs found');
else {
var fooDoc = feed[0];
var barDoc = {
"foo" : fooDoc,
"bar" : "bar"
}
var isAccepted2 = barCollection.createDocument(barCollection.getSelfLink(), barDoc);
if (!isAccepted2) throw new Error('The query was not accepted by the server.');
}
});
if (!isAccepted) throw new Error('The query was not accepted by the server.');
}
In this sample, my stored proc is saved in the FooCollection. I get a document and try to save a copy into the BarCollection. The new document is always saved into the FooCollection.
Is this scenario supported in Document DB? If so, what changes do I need to make the stored proc to make it work?
DocumentDB stored procedures are collection-scoped. You will not be able to write audit info to a separate collection; you'd need to write to the same collection.
I'm working on creating a JavaScript file to get a JSON dump of an entire MySQL database, running on server side. I found and am using the MySQL driver for node.js (https://www.npmjs.com/package/mysql) for queries, it's been straight forward enough to start. My issue is that I need to call multiple queries and get the results from all of them to put into a single JSON file and I can't quite get that to work. I'm entirely new to JavaScript (basically never touched it before now) so it's probably a relatively simple solution that I'm just missing.
Currently I do a query of 'SHOW TABLES' to get a list of all the tables (this can change so I can't just assume a constant list). I then just want to basically loop through the list and call 'SELECT * from table_name' for each table, combining the results as I go to get one big JSON. Unfortunately I haven't figured out how to get the code to finish all the queries before trying to combine them, thus retuning 'undefined' for all the results. Here is what I currently have:
var mysql = require('mysql');
var fs = require('fs');
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'pass',
database: 'test_data'
});
connection.connect();
connection.query('SHOW TABLES;', function(err, results, fields)
{
if(err) throw err;
var name = fields[0].name;
var database_json = get_table(results[0][name]);
for (i = 1; i < results.length; i++)
{
var table_name = results[i][name];
var table_json = get_table(table_name);
database_json = database_table_json.concat(table_json);
}
fs.writeFile('test_data.json', JSON.stringify(database_json), function (err)
{
if (err) throw err;
});
connection.end();
});
function get_table(table_name)
{
connection.query('select * from ' + table_name + ';', function(err, results, fields) {
if(err) throw err;
return results;
});
}
This gets the table list and goes through all of it with no issue, and the information returned by the second query is correct if I just do a console.log(results) inside the query, but the for loop just keeps going before any query is completed and thus 'table_json' just ends up being 'undefined'. I really think this must be an easy solution (probably something with callbacks which I don't quite understand fully yet) but I keep stumbling.
Thanks for the help.
I'm guessing that this is for some sort of maintenance type function and not a piece that you need for your application. You're probably safe to do this asynchronously. This module is available here: https://github.com/caolan/async
You can also use Q promises, available here: https://github.com/kriskowal/q
This answer: describes both approaches pretty well: Simplest way to wait some asynchronous tasks complete, in Javascript?