i am trying go convert my json array in to simple (numbered)values.
getting them from database(.find), the problem is they are not converting with json.parse. am i doing something wrong?
Voltage.find({},function (err, data) {
var jsontext = data;
var parsedData = JSON.parse(jsontext);
res.json(parsedData);
console.log(parsedData);
});
ths is the console.log for the session, i was hoping for just: 333, 333, 333 etc.
[{"_id":"56f3c19a0298308405d60464","temp":333,"__v":0},{"_id":"56f3c1ee7ec57884068dcb2c","temp":333,"__v":0},{"_id":"56f3c4467ec57884068dcb2d","temp":333,"__v":0},{"_id":"56f3d80191a3c68c138bf04d","temp":337,"__v":0},{"_id":"56f3da3f06cefa781763fb21","temp":337,"__v":0}]
it is the temp values i am trying to get out to send to my front end only. i am using mongooose, express.js and node.js with a mongodb also.
thanks for looking.
One thing you could do is deselect them in the query:
Voltage.find().select('-_id -__v').exec(function (err, data) {
var jsontext = data;
var parsedData = JSON.parse(jsontext);
res.json(parsedData);
console.log(parsedData);
});
Read up on the select method here.
Related
I have two CSV files, one with routing steps and one with a list of ids. I need to add each id to the start of each routing step. I'm using Node.js.
var routeNumFile = '/routing_numbers.csv';
var routeStepFile = '/routing_steps.csv';
const csvToJson = require('csvtojson');
const jsonToCsv = require('json2csv').parse;
const fs = require('fs');
var routeNumArray;
var routeStepArray;
try {
routeNumArray = await csvToJson().fromFile(routeNumFile);
} catch (err) {
console.log("error in reading csv file")
}
try {
routeStepArray = await csvToJson().fromFile(routeStepFile);
} catch (err) {
console.log("error in reading csv file")
}
var outputArray = new Array;
var outputPath = '/gitlab/BSI_Create_Csv_Import/finalOutput.csv';
if (routeNumArray != null && routeStepArray != null) {
Object.keys(routeNumArray).forEach(function (key1) {
Object.keys(routeStepArray).forEach(function (key2) {
var comboObj = Object.assign(routeNumArray[key1], routeStepArray[key2]);
console.log(comboObj);
outputArray.push(comboObj);
});
});
}
console.log(outputArray);
var csv = jsonToCsv(outputArray);
fs.writeFileSync(outputPath, csv);
The output from console.log(comboObj) is what I want. However, when I put that into an array, I just get the very last entry in the routing steps CSV over and over. If I write it using a stream directly where the comboObj is created, it mostly works. If I do that I convert the object to CSV first and the output leaves me with the headings duplicated at the end of every line. It writes much cleaner from an array of JSON objects to a CSV.
So, I'm trying to get all of the combined objects together in an array so I can convert it all to CSV and write it to a file. Can anyone tell me what's going wrong with my method?
You should create a new object and assign properties to it, like this:
var comboObj = Object.assign({}, routeNumArray[key1], routeStepArray[key2]);
I have created a little application that implemented a JSON file as the data store. I'm trying to learn about MEAN, so I'm trying to convert it to a NODE.js app.
I have created a mongoDB importing my JSON file using mongoimport. The db is there and connected. When I put http://localhost:3000/api/characters into the browser it returns JSON for all the characters.
I have built a connection string and required it into my controller as follows...
// FROM node app characters.controller.js
var dbconn = require('../data/dbconnection.js');
var characterData = require('../data/characters.json');
module.exports.charactersGetAll = function(req, res) {
var db = dbconn.get();
var collection = db.collection('characters');
// Get the documents from the collection
// Need to use to Array() as its only a cursor if you don't
collection
.find()
.toArray(function(err, docs) {
console.log("Found characters!", docs);
res
.status(200)
.json(docs);
});
//console.log('db', db);
//console.log("GET the Characters");
//console.log(req.query);
res
.status(200)
.json(characterData);
};
module.exports.charactersGetOne = function(req, res) {
var charId = req.params.id;
var thisChar = characterData[charId];
console.log("GET characterId", charId);
res
.status(200)
.json(thisChar);
};
From the non-NODE version of the app, I call in the JSON data as follows in my main.js file:
//FROM TRADITIONAL HTML/JS application
// Get JSON and callback so JSON can be stored globally
$.getJSON('people.json', callback);
// Populate faces and names in HTML
function callback(data) {
/*optional stuff to do after success */
var $charNames = $('.char-names');
var $faceImage = $('.faces');
$.each(data, function(index, val) {
console.log("success");
/* iterate through array or object */
/* .eq() method constructs new object from one element from set */
$charNames.eq(index).text(val.name);
$faceImage.eq(index).attr('src', val.image);
//Push all JSON to array
allTheCharacters.push(val);
allTheCharactersComp.push(val);
});
}
What I want to ask is – Is there a simple way I can access the mongoDB in the non-NODE main.js file instead of using the $.getJSON method and how would I add/adapt this for the node characters.controller.js
Hopefully, I am making sense. Apologies in advance for any misunderstanding.
Here is the situation:
I have a Rails app that scrapes a website and outputs valid JSON to a restful API endpoint.
So far I can get the JSON with my Node.js script, but I cannot store the values I need to local variables.
Here is the json:
[{"cohort":"1507","teacher":"Josh M."}]
I would like to store "1507" in a variable as well as "Josh M."
My current script is:
var http = require('http');
var url = 'http://localhost:8080/api/v1/classroom_bs';
http.get(url, function(res){
var body = '';
res.on('data', function(chunk){
body += chunk;
});
res.on('end', function(){
var responseB = JSON.parse(body);
var responseBStr = JSON.stringify(body);
var jsonB = JSON.parse(responseBStr);
console.log(responseB);
console.log(responseBStr);
console.log(jsonB);
});
}).on('error', function(e){
console.log("Error: ", e);
});
Any help would be greatly appreciated!
I have tried some functions I found on SO, but for some reason all my console.log(Object.keys) return numbers like "1", "2", "3", etc..
The reason I need to store these as variables is that I am going to output those values to an LCD screen using the johnny5 library via an Arduino.
The rails app that you are using to send to node is an array of length 1, which is an object. To access that object you have to say which array element you are accessing. In this case array[0]. It appears your rails scraper is populating an array.
To access the variables you mentioned you would simply
//Access that 1507
var variable1 = payloadFromRails[0].cohort;
//Access that teach, Josh M.
var variable2 = payloadFromRails[0].teacher;
Try selecting object at index 0 of parsed json string
var response = '[{"cohort":"1507","teacher":"Josh M."}]';
var responseB = JSON.parse(response);
var keys = Object.keys(responseB[0]);
var cohort = responseB[0][keys[0]];
var teacher = responseB[0][keys[1]];
console.log(cohort, teacher)
I have a text file with a ton of values that I want to convert to meaningful JSON using node.js fs module.
I want to store the first value of every line in an array unless the value is already present.
7000111,-1.31349,36.699959,1004,
7000111,-1.311739,36.698589,1005,
8002311,-1.262245,36.765884,2020,
8002311,-1.261135,36.767544,2021,
So for this case, I'd like to write to a file:
[7000111, 8002311]
Here's what I have so far. It writes [] to the file.
var fs = require('fs');
var through = require('through');
var split = require('split');
var shape_ids = [];
var source = fs.createReadStream('data/shapes.txt');
var target = fs.createWriteStream('./output3.txt');
var tr = through(write, end);
source
.pipe(split())
.pipe(tr)
// Function definitions
function write(line){
var line = line.toString();
var splitted = line.split(',');
// if it's not in array
if (shape_ids.indexOf(splitted[0]) > -1){
shape_ids.push(splitted[0]);
}
}
function end(){
shape_ids = JSON.stringify(shape_ids);
target.write(shape_ids);
console.log('data written');
}
The code is using the split and through modules
How do I store values in the array and write the populated array to the file?
== === ====== =================
Update:
This is what I want to do, but it's in Ruby:
shape_ids = []
File.open("data/shapes.txt").readlines.each do |line|
data = line.split(',')
shape_id = data.first
if !shape_ids.include? shape_id
shape_ids.push(shape_id)
end
end
puts shape_ids # array of unique shape_ids
Can I do this in javascript?
Unless you are super comfortable with the new Stream API in node, use the event-stream module to accomplish this:
var fs = require('fs');
var es = require('event-stream');
function getIds(src, target, callback) {
var uniqueIDs = [];
es.pipeline(
fs.createReadStream(src),
es.split(),
es.map(function (line, done) {
var id = line.split(',').shift();
if (uniqueIDs.indexOf(id) > -1) return done();
uniqueIDs.push(id);
done(null);
}),
es.wait(function (err, text) {
// Here we create our JSON — keep in mind that valid JSON starts
// as an object, not an array
var data = JSON.stringify({ ids: uniqueIDs});
fs.writeFile(target, data, function (err) {
if ('function' == typeof callback) callback(err);
});
})
);
}
getIds('./values.txt', './output.json');
Unfortunately there is no "easy" way to keep this as a pure stream flow so you have to "wait" until the data is done filtering before turning into a JSON string. Hope that helps!
I am using MongoDB native driver for NodeJS, and am having trouble converting ObjectID to a string.
My code looks like:
db.collection('user', function(err, collection) {
collection.insert(data, {safe:true}, function(err, result) {
var myid = result._id.toString();
console.log(myid);
)};
});
I have tried various suggestions on StackOverflow like:
myid = result._id.toString();
myid = result._id.toHexString();
but none of them seemed to work.
I am trying to convert the ObjectID to base64 encoding.
Not sure if I am running into supported functionality under the Mongo native driver.
This work for me:
var ObjectID = require('mongodb').ObjectID;
var idString = '4e4e1638c85e808431000003';
var idObj = new ObjectID(idString);
console.log(idObj);
console.log(idObj.toString());
console.log(idObj.toHexString());
Output:
4e4e1638c85e808431000003
4e4e1638c85e808431000003
4e4e1638c85e808431000003
insert returns an array of results (as you can also send an array of objects to be inserted), so your code is trying to get the _id from the array instance rather than the first result:
MongoClient.connect("mongodb://localhost:27017/testdb", function(err, db) {
db.collection("user").insert({name:'wiredprairie'}, function(err, result) {
if (result && result.length > 0) {
var myid = result[0]._id.toString();
console.log(myid);
}
});
});
Also, you won't need to base64 encode the result of calling toString on an ObjectId as it's returned as a hex number already. You could also call: result[0]._id.toHexString() to get the Hex value directly (toString just wraps toHexString).