How to insert data in json after crawling through casperjs? - javascript

I wrote code that parsing a lot of words (innerHTML) from some webpages.
and I'd like to insert data to json file directly..
Here is my js code...
var words = [];
var casper = require('casper').create();
function getWords() {
var words = document.querySelectorAll('td.subject a');
return Array.prototype.map.call(words, function(e) {
return e.innerHTML;
});
}
casper.start('http://www.todayhumor.co.kr/board/list.php?table=bestofbest', function() {
words = this.evaluate(getWords);
});
for (var i=2; i <=5; i++) {
casper.thenOpen('http://www.todayhumor.co.kr/board/list.php?table=bestofbest&page='+i, function() {
words = words.concat(this.evaluate(getWords));
});
}
casper.run(function() {
// echo results in some pretty fashion
this.echo(words.length + ' links found:').exit();
this.echo(words.join('\n')).exit();
});
and
I run this code through terminal like this!
username#wow:~/workspace/app/assets/javascripts $ casperjs application.js
and the result is (for example)
150 words found:
apple
banana
melon
kiwi
citrus
watermelon
passionfruit
mango
orange
...
So I want to insert this data in "word" part of my json file (example code of json below)
and make other columns("type": "fruit" and "spell":) automatically added
{ "my_initial_words": [
{
"type": "fruit",
"word": "apple",
"spell": "ap"
},
{
"type": "fruit",
"word": "banana",
"spell": "ba"
},
{
"type": "fruit",
"word": "melon",
"spell": "me"
}
]
}
----------------------------------------------------------------------------
thanks for adding more answer!..
but I couldn't catch where should I put these code
Could you tell me once more that... Which code you gave me executes "Saving the results to JSON file?" because I have to read json file(makeyourap.json) in my seeds.rb file like this
require 'json'
file = File.open(Rails.root.join('db','makeyourap.json'))
contents = file.read
json = ActiveSupport::JSON.decode(contents)["my_initial_words"]

So, something like this?
function makeTypeObject(name, type) {
return {
name: name,
type: type,
spell: name.substr(0,2)
};
}
var wordDesc = words.map(function (word) {
return makeTypeObject(word, "fruit");
});
var finalObject = {
my_initial_words: wordDesc
};
var jsonString = JSON.stringify(finalObject);
// if you want prettyprint, try JSON.stringify(finalObject, null, "\t");
I hope this helps.

Write to file via casper
If you want to have a file from which you read and write, appending content, you can do it like this:
var fs = require('fs');
var FILENAME = 'makeyourap.json';
function add_new_fruits(fruits) {
var data;
if ( fs.isFile(FILENAME) ) {
data = fs.read(FILENAME);
} else {
data = JSON.stringify({'my_initial_words' : [] });
}
var json = JSON.parse(data);
fruits.forEach(function(word) {
json.my_initial_words.push({"type": "fruit",
"name": word,
"spell": word.slice(0,2)});
});
data = JSON.stringify(json, null, '\t');
fs.write(FILENAME, data, "w");
}
Use this instead of the older this.echo. Just call it as
casperjs application.js
This either reads the object from a file, or creates it if it does not exist. Then, it appends each new object from the new fruits (including duplicates), and writes it back to FILENAME.
Previous approach: how to roll your own
create Object
So first, you want to create an object that only has the parameter my_initial_words with values as above.
You can create a function via
function createFinal(wordArray) {
var out = [];
wordArray.forEach(function(word) {
out.push({"type": "fruit", "name": word, "spell": word.slice(0,2)});
});
return out;
}
to create the array. Then, create the object via
var my_object = { "my_initial_words": createFinal(words) };
to JSON
Javascript has a built-in JSON-object. With a javascript-object like
var my_object = { "my_initial_words": ...
as above, use
JSON.stringify(my_object)
to get the JSON representation to write.
Older: write to file via redirection
Before, you had
this.echo(words.join('\n')).exit();
which gave you the basic list. Using this.echo, try replacing this by
var my_object = { "my_initial_words": createFinal(words) };
this.echo(JSON.stringify(my_object)).exit();
This prints to standard output. Just remove the other this.echo line (150 words found) and redirect the output via
casperjs application.js > makeyourap.json
If you want to write to file in casperjs, look at write-results-into-a-file-using-casperjs.

Related

Split a Javascript string by comma, but ignore commas that would be inside a string [duplicate]

My CSV data looks like this:
heading1,heading2,heading3,heading4,heading5
value1_1,value2_1,value3_1,value4_1,value5_1
value1_2,value2_2,value3_2,value4_2,value5_2
...
How do you read this data and convert to an array like this using JavaScript?:
[
heading1: value1_1,
heading2: value2_1,
heading3: value3_1,
heading4: value4_1
heading5: value5_1
],[
heading1: value1_2,
heading2: value2_2,
heading3: value3_2,
heading4: value4_2,
heading5: value5_2
]
....
I've tried this code but no luck!:
<script type="text/javascript">
var allText =[];
var allTextLines = [];
var Lines = [];
var txtFile = new XMLHttpRequest();
txtFile.open("GET", "file://d:/data.txt", true);
txtFile.onreadystatechange = function()
{
allText = txtFile.responseText;
allTextLines = allText.split(/\r\n|\n/);
};
document.write(allTextLines);
document.write(allText);
document.write(txtFile);
</script>
No need to write your own...
The jQuery-CSV library has a function called $.csv.toObjects(csv) that does the mapping automatically.
Note: The library is designed to handle any CSV data that is RFC 4180 compliant, including all of the nasty edge cases that most 'simple' solutions overlook.
Like #Blazemonger already stated, first you need to add line breaks to make the data valid CSV.
Using the following dataset:
heading1,heading2,heading3,heading4,heading5
value1_1,value2_1,value3_1,value4_1,value5_1
value1_2,value2_2,value3_2,value4_2,value5_2
Use the code:
var data = $.csv.toObjects(csv):
The output saved in 'data' will be:
[
{ heading1:"value1_1",heading2:"value2_1",heading3:"value3_1",heading4:"value4_1",heading5:"value5_1" }
{ heading1:"value1_2",heading2:"value2_2",heading3:"value3_2",heading4:"value4_2",heading5:"value5_2" }
]
Note: Technically, the way you wrote the key-value mapping is invalid JavaScript. The objects containing the key-value pairs should be wrapped in brackets.
If you want to try it out for yourself, I suggest you take a look at the Basic Usage Demonstration under the 'toObjects()' tab.
Disclaimer: I'm the original author of jQuery-CSV.
Update:
Edited to use the dataset that the op provided and included a link to the demo where the data can be tested for validity.
Update2:
Due to the shuttering of Google Code. jquery-csv has moved to GitHub
NOTE: I concocted this solution before I was reminded about all the "special cases" that can occur in a valid CSV file, like escaped quotes. I'm leaving my answer for those who want something quick and dirty, but I recommend Evan's answer for accuracy.
This code will work when your data.txt file is one long string of comma-separated entries, with no newlines:
data.txt:
heading1,heading2,heading3,heading4,heading5,value1_1,...,value5_2
javascript:
$(document).ready(function() {
$.ajax({
type: "GET",
url: "data.txt",
dataType: "text",
success: function(data) {processData(data);}
});
});
function processData(allText) {
var record_num = 5; // or however many elements there are in each row
var allTextLines = allText.split(/\r\n|\n/);
var entries = allTextLines[0].split(',');
var lines = [];
var headings = entries.splice(0,record_num);
while (entries.length>0) {
var tarr = [];
for (var j=0; j<record_num; j++) {
tarr.push(headings[j]+":"+entries.shift());
}
lines.push(tarr);
}
// alert(lines);
}
The following code will work on a "true" CSV file with linebreaks between each set of records:
data.txt:
heading1,heading2,heading3,heading4,heading5
value1_1,value2_1,value3_1,value4_1,value5_1
value1_2,value2_2,value3_2,value4_2,value5_2
javascript:
$(document).ready(function() {
$.ajax({
type: "GET",
url: "data.txt",
dataType: "text",
success: function(data) {processData(data);}
});
});
function processData(allText) {
var allTextLines = allText.split(/\r\n|\n/);
var headers = allTextLines[0].split(',');
var lines = [];
for (var i=1; i<allTextLines.length; i++) {
var data = allTextLines[i].split(',');
if (data.length == headers.length) {
var tarr = [];
for (var j=0; j<headers.length; j++) {
tarr.push(headers[j]+":"+data[j]);
}
lines.push(tarr);
}
}
// alert(lines);
}
http://jsfiddle.net/mblase75/dcqxr/
Don't split on commas -- it won't work for most CSV files, and this question has wayyyy too many views for the asker's kind of input data to apply to everyone. Parsing CSV is kind of scary since there's no truly official standard, and lots of delimited text writers don't consider edge cases.
This question is old, but I believe there's a better solution now that Papa Parse is available. It's a library I wrote, with help from contributors, that parses CSV text or files. It's the only JS library I know of that supports files gigabytes in size. It also handles malformed input gracefully.
1 GB file parsed in 1 minute:
(Update: With Papa Parse 4, the same file took only about 30 seconds in Firefox. Papa Parse 4 is now the fastest known CSV parser for the browser.)
Parsing text is very easy:
var data = Papa.parse(csvString);
Parsing files is also easy:
Papa.parse(file, {
complete: function(results) {
console.log(results);
}
});
Streaming files is similar (here's an example that streams a remote file):
Papa.parse("http://example.com/bigfoo.csv", {
download: true,
step: function(row) {
console.log("Row:", row.data);
},
complete: function() {
console.log("All done!");
}
});
If your web page locks up during parsing, Papa can use web workers to keep your web site reactive.
Papa can auto-detect delimiters and match values up with header columns, if a header row is present. It can also turn numeric values into actual number types. It appropriately parses line breaks and quotes and other weird situations, and even handles malformed input as robustly as possible. I've drawn on inspiration from existing libraries to make Papa, so props to other JS implementations.
I am using d3.js for parsing csv file. Very easy to use.
Here is the docs.
Steps:
npm install d3-request
Using Es6;
import { csv } from 'd3-request';
import url from 'path/to/data.csv';
csv(url, function(err, data) {
console.log(data);
})
Please see docs for more.
Update -
d3-request is deprecated. you can use d3-fetch
Here's a JavaScript function that parses CSV data, accounting for commas found inside quotes.
// Parse a CSV row, accounting for commas inside quotes
function parse(row){
var insideQuote = false,
entries = [],
entry = [];
row.split('').forEach(function (character) {
if(character === '"') {
insideQuote = !insideQuote;
} else {
if(character == "," && !insideQuote) {
entries.push(entry.join(''));
entry = [];
} else {
entry.push(character);
}
}
});
entries.push(entry.join(''));
return entries;
}
Example use of the function to parse a CSV file that looks like this:
"foo, the column",bar
2,3
"4, the value",5
into arrays:
// csv could contain the content read from a csv file
var csv = '"foo, the column",bar\n2,3\n"4, the value",5',
// Split the input into lines
lines = csv.split('\n'),
// Extract column names from the first line
columnNamesLine = lines[0],
columnNames = parse(columnNamesLine),
// Extract data from subsequent lines
dataLines = lines.slice(1),
data = dataLines.map(parse);
// Prints ["foo, the column","bar"]
console.log(JSON.stringify(columnNames));
// Prints [["2","3"],["4, the value","5"]]
console.log(JSON.stringify(data));
Here's how you can transform the data into objects, like D3's csv parser (which is a solid third party solution):
var dataObjects = data.map(function (arr) {
var dataObject = {};
columnNames.forEach(function(columnName, i){
dataObject[columnName] = arr[i];
});
return dataObject;
});
// Prints [{"foo":"2","bar":"3"},{"foo":"4","bar":"5"}]
console.log(JSON.stringify(dataObjects));
Here's a working fiddle of this code.
Enjoy! --Curran
You can use PapaParse to help.
https://www.papaparse.com/
Here is a CodePen.
https://codepen.io/sandro-wiggers/pen/VxrxNJ
Papa.parse(e, {
header:true,
before: function(file, inputElem){ console.log('Attempting to Parse...')},
error: function(err, file, inputElem, reason){ console.log(err); },
complete: function(results, file){ $.PAYLOAD = results; }
});
If you want to solve this without using Ajax, use the FileReader() Web API.
Example implementation:
Select .csv file
See output
function readSingleFile(e) {
var file = e.target.files[0];
if (!file) {
return;
}
var reader = new FileReader();
reader.onload = function(e) {
var contents = e.target.result;
displayContents(contents);
displayParsed(contents);
};
reader.readAsText(file);
}
function displayContents(contents) {
var element = document.getElementById('file-content');
element.textContent = contents;
}
function displayParsed(contents) {
const element = document.getElementById('file-parsed');
const json = contents.split(',');
element.textContent = JSON.stringify(json);
}
document.getElementById('file-input').addEventListener('change', readSingleFile, false);
<input type="file" id="file-input" />
<h3>Raw contents of the file:</h3>
<pre id="file-content">No data yet.</pre>
<h3>Parsed file contents:</h3>
<pre id="file-parsed">No data yet.</pre>
function CSVParse(csvFile)
{
this.rows = [];
var fieldRegEx = new RegExp('(?:\s*"((?:""|[^"])*)"\s*|\s*((?:""|[^",\r\n])*(?:""|[^"\s,\r\n]))?\s*)(,|[\r\n]+|$)', "g");
var row = [];
var currMatch = null;
while (currMatch = fieldRegEx.exec(this.csvFile))
{
row.push([currMatch[1], currMatch[2]].join('')); // concatenate with potential nulls
if (currMatch[3] != ',')
{
this.rows.push(row);
row = [];
}
if (currMatch[3].length == 0)
break;
}
}
I like to have the regex do as much as possible. This regex treats all items as either quoted or unquoted, followed by either a column delimiter, or a row delimiter. Or the end of text.
Which is why that last condition -- without it it would be an infinite loop since the pattern can match a zero length field (totally valid in csv). But since $ is a zero length assertion, it won't progress to a non match and end the loop.
And FYI, I had to make the second alternative exclude quotes surrounding the value; seems like it was executing before the first alternative on my javascript engine and considering the quotes as part of the unquoted value. I won't ask -- just got it to work.
Per the accepted answer,
I got this to work by changing the 1 to a 0 here:
for (var i=1; i<allTextLines.length; i++) {
changed to
for (var i=0; i<allTextLines.length; i++) {
It will compute the a file with one continuous line as having an allTextLines.length of 1. So if the loop starts at 1 and runs as long as it's less than 1, it never runs. Hence the blank alert box.
$(function() {
$("#upload").bind("click", function() {
var regex = /^([a-zA-Z0-9\s_\\.\-:])+(.csv|.xlsx)$/;
if (regex.test($("#fileUpload").val().toLowerCase())) {
if (typeof(FileReader) != "undefined") {
var reader = new FileReader();
reader.onload = function(e) {
var customers = new Array();
var rows = e.target.result.split("\r\n");
for (var i = 0; i < rows.length - 1; i++) {
var cells = rows[i].split(",");
if (cells[0] == "" || cells[0] == undefined) {
var s = customers[customers.length - 1];
s.Ord.push(cells[2]);
} else {
var dt = customers.find(x => x.Number === cells[0]);
if (dt == undefined) {
if (cells.length > 1) {
var customer = {};
customer.Number = cells[0];
customer.Name = cells[1];
customer.Ord = new Array();
customer.Ord.push(cells[2]);
customer.Point_ID = cells[3];
customer.Point_Name = cells[4];
customer.Point_Type = cells[5];
customer.Set_ORD = cells[6];
customers.push(customer);
}
} else {
var dtt = dt;
dtt.Ord.push(cells[2]);
}
}
}
Actually you can use a light-weight library called any-text.
install dependencies
npm i -D any-text
use custom command to read files
var reader = require('any-text');
reader.getText(`path-to-file`).then(function (data) {
console.log(data);
});
or use async-await :
var reader = require('any-text');
const chai = require('chai');
const expect = chai.expect;
describe('file reader checks', () => {
it('check csv file content', async () => {
expect(
await reader.getText(`${process.cwd()}/test/files/dummy.csv`)
).to.contains('Lorem ipsum');
});
});
This is an old question and in 2022 there are many ways to achieve this. First, I think D3 is one of the best alternatives for data manipulation. It's open sourced and free to use, but also it's modular so we can import just the fetch module.
Here is a basic example. We will use the legacy mode so I will import the entire D3 library. Now, let's call d3.csv function and it's done. This function internally calls the fetch method therefore, it can open dataURL, url, files, blob, and so on.
const fileInput = document.getElementById('csv')
const outElement = document.getElementById('out')
const previewCSVData = async dataurl => {
const d = await d3.csv(dataurl)
console.log({
d
})
outElement.textContent = d.columns
}
const readFile = e => {
const file = fileInput.files[0]
const reader = new FileReader()
reader.onload = () => {
const dataUrl = reader.result;
previewCSVData(dataUrl)
}
reader.readAsDataURL(file)
}
fileInput.onchange = readFile
<script type="text/javascript" src="https://unpkg.com/d3#7.6.1/dist/d3.min.js"></script>
<div>
<p>Select local CSV File:</p>
<input id="csv" type="file" accept=".csv">
</div>
<pre id="out"><p>File headers will appear here</p></pre>
If we don't want to use any library and we just want to use pain JavaScrip (Vanilla JS) and we managed to get the text content of a file as data and we don't want to use d3 we can implement a simple function that will split the data into a text array then we will extract the first line and split into a headers array and the rest of the text will be the lines we will process. After, we map each line and extract its values and create a row object from an array created from mapping each header to its correspondent value from values[index].
NOTE:
We also we going to use a little trick array objects in JavaScript can also have attributes. Yes so we will define an attribute rows.headers and assign the headers to it.
const data = `heading_1,heading_2,heading_3,heading_4,heading_5
value_1_1,value_2_1,value_3_1,value_4_1,value_5_1
value_1_2,value_2_2,value_3_2,value_4_2,value_5_2
value_1_3,value_2_3,value_3_3,value_4_3,value_5_3`
const csvParser = data => {
const text = data.split(/\r\n|\n/)
const [first, ...lines] = text
const headers = first.split(',')
const rows = []
rows.headers = headers
lines.map(line => {
const values = line.split(',')
const row = Object.fromEntries(headers.map((header, i) => [header, values[i]]))
rows.push(row)
})
return rows
}
const d = csvParser(data)
// Accessing to the theaders attribute
const headers = d.headers
console.log({headers})
console.log({d})
Finally, let's implement a vanilla JS file loader using fetch and parsing the csv file.
const fetchFile = async dataURL => {
return await fetch(dataURL).then(response => response.text())
}
const csvParser = data => {
const text = data.split(/\r\n|\n/)
const [first, ...lines] = text
const headers = first.split(',')
const rows = []
rows.headers = headers
lines.map(line => {
const values = line.split(',')
const row = Object.fromEntries(headers.map((header, i) => [header, values[i]]))
rows.push(row)
})
return rows
}
const fileInput = document.getElementById('csv')
const outElement = document.getElementById('out')
const previewCSVData = async dataURL => {
const data = await fetchFile(dataURL)
const d = csvParser(data)
console.log({ d })
outElement.textContent = d.headers
}
const readFile = e => {
const file = fileInput.files[0]
const reader = new FileReader()
reader.onload = () => {
const dataURL = reader.result;
previewCSVData(dataURL)
}
reader.readAsDataURL(file)
}
fileInput.onchange = readFile
<script type="text/javascript" src="https://unpkg.com/d3#7.6.1/dist/d3.min.js"></script>
<div>
<p>Select local CSV File:</p>
<input id="csv" type="file" accept=".csv">
</div>
<pre id="out"><p>File contents will appear here</p></pre>
I used this file to test it
Here is another way to read an external CSV into Javascript (using jQuery).
It's a little bit more long winded, but I feel by reading the data into arrays you can exactly follow the process and makes for easy troubleshooting.
Might help someone else.
The data file example:
Time,data1,data2,data2
08/11/2015 07:30:16,602,0.009,321
And here is the code:
$(document).ready(function() {
// AJAX in the data file
$.ajax({
type: "GET",
url: "data.csv",
dataType: "text",
success: function(data) {processData(data);}
});
// Let's process the data from the data file
function processData(data) {
var lines = data.split(/\r\n|\n/);
//Set up the data arrays
var time = [];
var data1 = [];
var data2 = [];
var data3 = [];
var headings = lines[0].split(','); // Splice up the first row to get the headings
for (var j=1; j<lines.length; j++) {
var values = lines[j].split(','); // Split up the comma seperated values
// We read the key,1st, 2nd and 3rd rows
time.push(values[0]); // Read in as string
// Recommended to read in as float, since we'll be doing some operations on this later.
data1.push(parseFloat(values[1]));
data2.push(parseFloat(values[2]));
data3.push(parseFloat(values[3]));
}
// For display
var x= 0;
console.log(headings[0]+" : "+time[x]+headings[1]+" : "+data1[x]+headings[2]+" : "+data2[x]+headings[4]+" : "+data2[x]);
}
})
Hope this helps someone in the future!
A bit late but I hope it helps someone.
Some time ago even I faced a problem where the string data contained \n in between and while reading the file it used to read as different lines.
Eg.
"Harry\nPotter","21","Gryffindor"
While-Reading:
Harry
Potter,21,Gryffindor
I had used a library csvtojson in my angular project to solve this problem.
You can read the CSV file as a string using the following code and then pass that string to the csvtojson library and it will give you a list of JSON.
Sample Code:
const csv = require('csvtojson');
if (files && files.length > 0) {
const file: File = files.item(0);
const reader: FileReader = new FileReader();
reader.readAsText(file);
reader.onload = (e) => {
const csvs: string = reader.result as string;
csv({
output: "json",
noheader: false
}).fromString(csvs)
.preFileLine((fileLine, idx) => {
//Convert csv header row to lowercase before parse csv file to json
if (idx === 0) { return fileLine.toLowerCase() }
return fileLine;
})
.then((result) => {
// list of json in result
});
}
}
I use the jquery-csv to do this.
and I provide two examples as below
async function ReadFile(file) {
return await file.text()
}
function removeExtraSpace(stringData) {
stringData = stringData.replace(/,( *)/gm, ",") // remove extra space
stringData = stringData.replace(/^ *| *$/gm, "") // remove space on the beginning and end.
return stringData
}
function simpleTest() {
let data = `Name, Age, msg
foo, 25, hello world
bar, 18, "!! 🐬 !!"
`
data = removeExtraSpace(data)
console.log(data)
const options = {
separator: ",", // default "," . (You may want to Tab "\t" or somethings.
delimiter: '"', // default "
headers: true // default true
}
// const myObj = $.csv.toObjects(data, options)
const myObj = $.csv.toObjects(data) // If you want to use default options, then you can omit them.
console.log(myObj)
}
window.onload = () => {
const inputFile = document.getElementById("uploadFile")
inputFile.onchange = () => {
const inputValue = inputFile.value
if (inputValue === "") {
return
}
const selectedFile = document.getElementById('uploadFile').files[0]
const promise = new Promise(resolve => {
const fileContent = ReadFile(selectedFile)
resolve(fileContent)
})
promise.then(fileContent => {
// Use promise to wait for the file reading to finish.
console.log(fileContent)
fileContent = removeExtraSpace(fileContent)
const myObj = $.csv.toObjects(fileContent)
console.log(myObj)
})
}
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.6.0/jquery.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery-csv/1.0.11/jquery.csv.min.js"></script>
<label for="uploadFile">Demo 1</label>
<input type="file" id="uploadFile" accept=".csv"/>
<button onclick="simpleTest()">Demo 2</button>
With this function csvToObjs you can transform data-entries from format CSV to an array of objects.
function csvToObjs(string) {
const lines = data.split(/\r\n|\n/);
let [headings, ...entries] = lines;
headings = headings.split(',');
const objs = [];
entries.map(entry=>{
obj = entry.split(',');
objs.push(Object.fromEntries(headings.map((head, i)=>[head, obj[i]])));
})
return objs;
}
data = `heading1,heading2,heading3,heading4,heading5
value1_1,value2_1,value3_1,value4_1,value5_1
value1_2,value2_2,value3_2,value4_2,value5_2`
console.log(csvToObjs(data));

How to merge array into JSON array

I have a JSON file that I need to add Comments into and then update the file.
I've created an array for the new comments
//ADDING NEW COMMENTS
//add new comment within project
$scope.updatecomments = [];
$scope.addnewcomment = function() {
$scope.updatecomments.push({
"Author": "test",
"Text": $scope.NewComment
})
}
I can post the new comments into the JSON file but it overrides the past comments.
I have tried to merge the older comments with the new comments with the following
$scope.updatecomments = [];
$scope.addnewcomment = function() {
$scope.updatecomments.push({"Author": "test" ,"Text": $scope.NewComment}).concat($scope.Comments, $scope.updatecomments);
}
$scope.updatecomments = [].concat($scope.updatecomments,
$scope.projectDetails.Comments);
$scope.addnewcomment = function() {
$scope.updatecomments.push({
"Author": "test",
"Text": $scope.NewComment
});
}
I also tried making a new function that when called combines the two and then post the combined array
$scope.combine = [];
$scope.combineComments = function (){
var jsonStr = $scope.projectDetails.Comments;
var obj = JSON.parse(jsonStr);
obj['Comments'].push({"Author":"Test","Text":$scope.NewComment});
jsonStr = JSON.stringify(obj);
}
}
I have been going over this for the past few days now and can't seem to get it. Any help would be greatly appreciated!
EDIT
Sample Data of already existing data in JSON file
{
"Comments":[{
"Author": "John Doe",
"Text": "Work completed"
}]
}
Want to add to this (is from html input text tag) stored as NewComment
{
"Comments":[{
"Author": "Test",
"Text": "Project flagged"
}]
}
Edit 2
This is how I'm getting my projects data
/FIND PROJECTS - ADD TO LIST
$scope.projectList = [];
for (var id = 0; id < 30; id++) {
var targetURL = 'https://happybuildings.sim.vuw.ac.nz/api/sooleandr/project.'+id+'.json';
$http.get(targetURL).then(
function successCall(response){
$scope.projectList.push(response.data);
}
);
}
I then use this to access the selected information
//script
$scope.showData = function(x){
$scope.projectDetails = x;
};
//html
<ul class = 'pList'>
<li ng-repeat = 'x in projectList' class = 'pbList'>
<button class = 'pbutton' ng-click = 'showData(x)'>
<label ng-model ='pID'>Project ID: </label>{{x.ProjectID}} <br>
<label id ='pName'>Project Name: </label> {{x.Name}} <br>
<label id ='bID'>Building ID: </label>{{x.BuildingID}}<br>
<label id ='sDate'>Start Date: </label>{{x.StartDate}}
</button>
</li>
</ul>
Then I have the following variables to post
$scope.updateProject = function (projectDetails){
var updateproject = {
"ProjectID":$scope.projectDetails.ProjectID,
"Name":$scope.projectDetails.Name,
"BuildingID":$scope.projectDetails.BuildingID,
"StartDate":$scope.projectDetails.StartDate,
"EndDate":$scope.projectDetails.EndDate,
"Status":$scope.projectDetails.Status,
"ContactPerson":$scope.projectDetails.ContactPerson,
"Contractor":$scope.projectDetails.Contractor,
"ProjectManager":$scope.projectDetails.ProjectManager,
"Works": $scope.projectDetails.works,
"Comments":$scope.updatecomments,
};
$http.post("https://happybuildings.sim.vuw.ac.nz/api/sooleandr/update.project.json", updateproject).then(
function success(){
alert("Project Successfully Posted");
},
function error(){
alert("Error: Couldn't post to server");
}
)
};
It posts perfectly fine but it currently overrides the comments. I want to be able to add a new comment and still keep all the past comments. So I want to be able to push/add the comments into the full POST.JSON array.
Hope this makes a bit more sense
OK, updating answer after looking at provided code.
It appears you may be under the impression that $scope.projectDetails.Comments is a JSON string, when, in fact.. it's the actual Comments array.
I would try this for the addnewcomment function:
//ADDING NEW COMMENTS
//add new comment within project
$scope.updatecomments = undefined;
$scope.addnewcomment = function() {
$scope.updatecomments = $scope.updatecomments || $scope.projectDetails.Comments;
$scope.updatecomments.push({
"Author": "test",
"Text": $scope.NewComment
})
}
IF it just so happens to be a JSON string (highly unlikely), then I would update the combine function to this:
$scope.combineComments = function (){
var jsonStr = $scope.projectDetails.Comments;
var obj = JSON.parse(jsonStr);
obj.push({"Author":"Test","Text":$scope.NewComment});
jsonStr = JSON.stringify(obj);
}
}
EDIT
I'm adding another answer from my original because of the possibility things will break when there are no updated comments
//ADDING NEW COMMENTS
//add new comment within project
$scope.addnewcomment = function() {
$scope.projectDetails.Comments.push({
"Author": "test",
"Text": $scope.NewComment
})
}
Then in the POST, change to:
"Comments":$scope.projectDetails.Comments
I have figured out how to combine the two with
$scope.combinecomments = [];
$scope.combine = function (){
$scope.combinecomments.push($scope.projectDetails.Comments);
$scope.combinecomments.push($scope.updatecomments);
}
Except now it doesn't post the combined comments
$scope.ProjectID='$scope.ProjectID';
$scope.Name = '$scope.Name';
$scope.BuildingID = '$scope.BuildingID';
$scope.StartDate = '$scope.StartDate';
$scope.EndDate = '$scope.EndDate';
$scope.Status = '$scope.Status';
$scope.ContactPerson = '$scope.ContactPerson';
$scope.Contractor ='$scope.Contractor';
$scope.ProjectManager = '$scope.ProjectManager';
$scope.Works = '$scope.works';
$scope.Comments ='$scope.comments';
$scope.updateProject = function (projectDetails){
var updateproject = {
"ProjectID":$scope.projectDetails.ProjectID,
"Name":$scope.projectDetails.Name,
"BuildingID":$scope.projectDetails.BuildingID,
"StartDate":$scope.projectDetails.StartDate,
"EndDate":$scope.projectDetails.EndDate,
"Status":$scope.projectDetails.Status,
"ContactPerson":$scope.projectDetails.ContactPerson,
"Contractor":$scope.projectDetails.Contractor,
"ProjectManager":$scope.projectDetails.ProjectManager,
"Works": $scope.projectDetails.works,
"Comments":$scope.combinecomments,
};
$http.post("https://happybuildings.sim.vuw.ac.nz/api/sooleandr/update.project.json", updateproject).then(
function success(){
alert("Project Successfully Posted");
},
function error(){
alert("Error: Couldn't post to server");
}
)
};
It successfully posts the project except for the comments. It doesn't seem to like my combined array. When I post $scope.updatecomments it will post that but not the $scope.combinecomments.
I'll make a new question for this.

Why is previousHash not showing when I console.log my test blockchain?

I am trying learn more about blockchain by building a small project using JavaScript. Can anyone explain to me why the previousHash does not show when I console.log the test blockchain?
(npm install --save crypto-js if you need SHA256)
const SHA256 = require('crypto-js/sha256');
class Block{
//Index: (Optional) tells us where the block is in the chain
//Timestamp: tells us when the block was created
//Data: Can include any kind of data; for a currency you could store details of the transaction(transfer amount, sender id, receiver id)
//previousHash: String which contains the hash of the block which came before it (Ensures data integrity)
constructor(index, timestamp, data, previousHash = ''){
this.index = index;
this.timestamp = timestamp;
this.data = data;
this.previousHash = previousHash;
//Hash of this block
this.hash = this.calculateHash();
}
//Runs values from block through a hashing function to create a hash
calculateHash(){
return SHA256(this.index + this.previousHash + this.timestamp + JSON.stringify(this.data)).toString();
}
}
//8.44
class Blockchain{
//Initializes blockchain
constructor(){
this.chain = [this.createGenesisBlock];
}
// first block needs to be created manually
//index, timeStamp, data, previousHash(there is none in this case)
createGenesisBlock(){
return new Block(0, "01/01/2017", "Genesis block", "0");
}
//Returns most recently added block
getLatestBlock(){
return this.chain[this.chain.length -1];
}
//adds a new block to the chain
addBlock(newBlock){
newBlock.previousHash = this.getLatestBlock().hash;
newBlock.hash = newBlock.calculateHash
//The chain is just an array so you can use regular array methods
this.chain.push(newBlock);
}
}
//create instance of blockchhain
let Coin = new Blockchain();
Coin.addBlock(new Block(1, "10/07/2017", {amount: 4}));
Coin.addBlock(new Block(2, "12/07/2017", {amount: 10}));
console.log(JSON.stringify(Coin, null, 4));
I expect the console.logged JSON file to contain the previousHash as well as the index, timestamp and data. It contains all except previousHash.
Thanks for the help have been scratching my head for a while trying to figure this out...
You forgot the paranthesis in the line with: this.chain = [this.createGenesisBlock()]; line, thus the first element of the chain will be a reference to a function, instead of an instance of a Block. Add the paranthesis, and it should work.
Additionally, you also forgot the paranthesis at another function call: when you are trying to call the function newBlock.hash = newBlock.calculateHash()
You also don't have hash in your console. This is the line with the error:
newBlock.hash = newBlock.calculateHash
calculateHash is a function
newBlock.hash = newBlock.calculateHash()
Now it is working:
{
"chain": [
null,
{
"index": 1,
"timestamp": "10/07/2017",
"data": {
"amount": 4
},
"hash": "8f84adcf036e9aa052a4d7e689c7b8b06070b851eff535870f5cb8f7d53ab05a"
},
{
"index": 2,
"timestamp": "12/07/2017",
"data": {
"amount": 10
},
"previousHash": "8f84adcf036e9aa052a4d7e689c7b8b06070b851eff535870f5cb8f7d53ab05a",
"hash": "a2479e7df8f2a61f97f3ae4830aff93c0d43041b4a7cbb8079c2309a915d8945"
}
]
}

Adding things into an exsisting array in a json file, microDB

I'm trying to see if it's possible to add to an already created object on the MicroDB file.
I've tried using .push() but I'm having no luck.
The file has this object in already:
{
"home": {
"location": "walsall",
"time": "12:00",
"date": "12/12/17",
"gym": "home",
"players": ""
}
}
File is a .json
I'm trying to add something into the players key when a user wants to join.
Any help? Also, how would one remove a user name from the players section aforementioned?
Edit:
Some code from my .js file
if (commandEX.toLowerCase() === "join") {
var joinEX = message.content.split(' ')[2];
if (joinEX === undefined) {
return message.reply("**ERROR**: No gym entered, enter a gym to join.").then(m => m.delete(10000));
}
var removeTrigger = message.content.split(' ')[0];
var findGym = message.content.slice(removeTrigger.length);
findGym = findGym.slice(commandEX.length);
findGym = findGym.slice(2);
var commandFind = exRaidDB.data[findGym];
memberEX = message.member.nickname;
if (commandFind == undefined) {
message.channel.send(`**ERROR**: No ex raid at that gym.`).then(m => m.delete(10000));
} else {
message.channel.send(`**Added ${memberEX} to the list, details below.** \n**EX Raid at** ${commandFind.gym} \n**Date**: ${commandFind.date} \n**Time**: ${commandFind.time} \n**Location**: ${commandFind.location}`);
// this line is where the code needs to be for adding the $[message.member.nickname} into the "players" string.
}
}
As you tagged javascript, am I assuming correct you are using Node.js?
If it's the case, you could use fs.writeFileSync(PATH_TO_JSON, JSON.stringify(newObj), 'utf8'); where newObj is the new object you want to store in the .json-file.
But instead of working with the file directly, you can use a simple wrapper like follows:
let
fs = require('fs'),
PATH_TO_JSON = '';
function readJson() {
if (!fs.existsSync(PATH_TO_JSON)) {
writeJson();
}
return JSON.parse(fs.readFileSync(PATH_TO_JSON, 'utf8'));
}
function writeJson(obj = {}) {
fs.writeFileSync(PATH_TO_JSON, JSON.stringify(obj), 'utf8');
}
function Ressource(path) {
PATH_TO_JSON = path;
this.data = readJson();
}
Ressource.prototype.set = function(obj) {
this.data = obj;
};
Ressource.prototype.save = function() {
writeJson(this.data);
};
module.exports = Ressource;

Javascript: SHA512 value convert to JSON object

I already crypt the variable 'getShaValue' to sha512. Then combine it with many variable like 'name','ic' using JSON.stringify. But when I debug my JSON object, the value of SHA512 didn't show the right value. If I only debug the value before convert it into JSON, it show the right value.
Here is my function to crypt the value
self.sha512 = function () {
var value = self.generateSHAvalue();
var getShaValue= CryptoJS.SHA512(value);
return getShaValue;
};
I combine it with many variable
var authToken = SHA.sha512();
var requestData = JSON.stringify({
name: "Test",
authToken: authToken
})
console.log("requestData: " + JSON.stringify(requestData));
The result of the console is
{
"name": "Test",
"authToken": '"$super":{"$super":{}},"words":[1157899753,2720090447,1588426441,2244605341,2288345873,3771352114,2976397435,3171064119,-130018106,2601059156,3822838925,2519334849,1988499628,2785343384,-556559616,-1270654637],"sigBytes":64'
}
But it should be like this:
{
"name": "Test",
"authToken": "21507C7061D3F45058A95751E2FB332DD68F6A2ADC2039DE4341199643E12ADEFB8DF603C3F 34E71FB447F46B82BC5DC7BD2B81B83B389D8950583BEFB424676"
}
Can anyone help me. Thanks.
You included the binary digest in the json. Try converting it to hex first:
self.sha512 = function () {
var value = self.generateSHAvalue();
var shaHex = CryptoJS.SHA512(value).toString(CryptoJS.enc.Hex);
return shaHex ;
};

Categories