I'm extracting an array with 4 objects and each object has an array inside, from my kendo charts datasource, on my Angular project.
The data inside each sub-object varies in size, but it always includes a timestamp, and 1-5 value fields.
I need to export this array to an Excel file (.xls or .xlsx NOT CSV).
So far I managed to download the JSON as a file on its own (both .json and unformatted .xls).
I'd like for each object to be a book and in that book to have a formatting that has the timestamp in the first column, value 1 in another, and so on. The header for the columns should be timestamp, value1 name, etc (I'm translating these on the ui according to user preferences).
How can I build this type of formatted .xls file using angular? I don't know a particular good library for this, that is clear on how to use it in Angular.
Following Nathan Beck's link sugestion, I used AlaSQL. I'm getting correctly formatted columns, just need to adapt my array to have multiple worksheets.
The way we integrate alaSQL into our Angular project is by including the alasql.min.js and xlsx.core.min.js.
Then we call the alasql method in our function
$scope.export = function(){
var arrayToExport = [{id:1, name:"gas"},...];
alasql('SELECT * INTO XLSX("your_filename.xlsx",{headers:true}) FROM ?', arrayToExport);
}
EDIT: Solved the multiple worksheets issues as well. Keep in mind that when using the multiple worksheet method, you have to remove the asterisk and replace the headers: true object in the query with a question mark, passing the options in a separate array. So:
var arrayToExport1 = [{id:1, name:"gas"},...];
var arrayToExport2 = [{id:1, name:"solid"},...];
var arrayToExport3 = [{id:1, name:"liquid"},...];
var finalArray = arrayToExport1.concat(arrayToExport2, arrayToExport3);
var opts = [{sheetid: "gas", headers: true},{sheetid: "solid", headers: true},{sheetid: "liquid", headers: true}];
alasql('SELECT INTO XLSX("your_filename.xlsx",?) FROM ?', [opts, finalArray]);
You can use the XLSX library to convert JSON into XLS file and Download. Just create a service for your AngularJS application then call it as service method having below code.
I found this tutorial having JS and jQuery code but we can refer this code to use in AngularJS
Working Demo
Source link
Method
Include library
<script type="text/javascript" src="//unpkg.com/xlsx/dist/xlsx.full.min.js"></script>
JavaScript Code
var createXLSLFormatObj = [];
/* XLS Head Columns */
var xlsHeader = ["EmployeeID", "Full Name"];
/* XLS Rows Data */
var xlsRows = [{
"EmployeeID": "EMP001",
"FullName": "Jolly"
},
{
"EmployeeID": "EMP002",
"FullName": "Macias"
},
{
"EmployeeID": "EMP003",
"FullName": "Lucian"
},
{
"EmployeeID": "EMP004",
"FullName": "Blaze"
},
{
"EmployeeID": "EMP005",
"FullName": "Blossom"
},
{
"EmployeeID": "EMP006",
"FullName": "Kerry"
},
{
"EmployeeID": "EMP007",
"FullName": "Adele"
},
{
"EmployeeID": "EMP008",
"FullName": "Freaky"
},
{
"EmployeeID": "EMP009",
"FullName": "Brooke"
},
{
"EmployeeID": "EMP010",
"FullName": "FreakyJolly.Com"
}
];
createXLSLFormatObj.push(xlsHeader);
$.each(xlsRows, function(index, value) {
var innerRowData = [];
$("tbody").append('<tr><td>' + value.EmployeeID + '</td><td>' + value.FullName + '</td></tr>');
$.each(value, function(ind, val) {
innerRowData.push(val);
});
createXLSLFormatObj.push(innerRowData);
});
/* File Name */
var filename = "FreakyJSON_To_XLS.xlsx";
/* Sheet Name */
var ws_name = "FreakySheet";
if (typeof console !== 'undefined') console.log(new Date());
var wb = XLSX.utils.book_new(),
ws = XLSX.utils.aoa_to_sheet(createXLSLFormatObj);
/* Add worksheet to workbook */
XLSX.utils.book_append_sheet(wb, ws, ws_name);
/* Write workbook and Download */
if (typeof console !== 'undefined') console.log(new Date());
XLSX.writeFile(wb, filename);
if (typeof console !== 'undefined') console.log(new Date());
Angular directive for exporting and downloading JSON as a CSV. Perform bower install ng-csv-download. Run in plunkr
var app = angular.module('testApp', ['tld.csvDownload']);
app.controller('Ctrl1', function($scope, $rootScope) {
$scope.data = {};
$scope.data.exportFilename = 'example.csv';
$scope.data.displayLabel = 'Download Example CSV';
$scope.data.myHeaderData = {
id: 'User ID',
name: 'User Name (Last, First)',
alt: 'Nickname'
};
$scope.data.myInputArray = [{
id: '0001',
name: 'Jetson, George'
}, {
id: '0002',
name: 'Jetson, Jane',
alt: 'Jane, his wife.'
}, {
id: '0003',
name: 'Jetson, Judith',
alt: 'Daughter Judy'
}, {
id: '0004',
name: 'Jetson, Elroy',
alt: 'Boy Elroy'
}, {
id: 'THX1138',
name: 'Rosie The Maid',
alt: 'Rosie'
}];
});
<!DOCTYPE html>
<html ng-app="testApp">
<head>
<meta charset="utf-8" />
<title>Exporting JSON as a CSV</title>
<script>document.write('<base href="' + document.location + '" />');</script>
<link rel="stylesheet" href="style.css" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/angular.js/1.3.15/angular.min.js"></script>
<script src="csv-download.min.js"></script>
<script src="app.js"></script>
</head>
<body>
<div>Using an Angular directive for exporting JSON data as a CSV download.</div>
<div ng-controller="Ctrl1">
<h2>All Attributes Set</h2>
<csv-download
filename="{{data.exportFilename}}"
label="{{data.displayLabel}}"
column-header="data.myHeaderData"
input-array="data.myInputArray">
</csv-download>
<hr />
<h2>Only Required Attribute Set</h2>
<h3>Optional Attributes Default</h3>
<csv-download
input-array="data.myInputArray">
</csv-download>
</div>
</body>
</html>
Related
I have a file with data in it that I am needing to parse and store in a DB. Below, is an example of 2 entries in the file. I'm not quite sure what the structure is (although it looks to be ndJSON). I am trying to parse the data in to a JSON object in order to store it in a DB, but cannot seem to figure it out. Here is what I have so far
var ndjson = {
"sequence-num": "0123456789",
"version": "N1.4",
"record-type": "R",
"session-id": "197-30760303",
"date": "2021-07-23 15:00:53",
"passport-header": { "alg": "ES256", "ppt": "test", "typ": "passport", "x5u": "https://cr.com" },
"passport-payload": { "attest": "A", "dest": { "tn": ["0123456789"] }, "iat": 0123456789, "orig": { "tn": "0123456789" }, "origid": "c699f78a-ebc6-11eb-bfd8-bec0bbc98888" },
"identity-header": "eyJhbGciOiJFUzI1NiIsInBwdCI6InNoYWtlbiIsInR5cCI6InBhc3Nwb3J0IiwieDV1IjoiaHR0cHM6Ly9jci5zYW5zYXkuY29tL1RvdWNodG9uZV82ODNBIn0.eyJhdHRlc3QiOiJCIiwiZGVzdCI6eyJ0biI6WyIxMjUeyJhdHRlc3QiOiJCIiwiZGVzdCI6eyJ0biI6WyIxMj;info=<https://google.com/>;alg=ES256;ppt=\"test\""
}
{
"sequence-num": "0123456788",
"version": "N1.4",
"record-type": "R",
"session-id": "214-30760304",
"date": "2021-07-23 15:00:53",
"passport-header": { "alg": "ES256", "ppt": "test", "typ": "passport", "x5u": "https://cr.com" },
"passport-payload": { "attest": "B", "dest": { "tn": ["0123456788"] }, "iat": 0123456788, "orig": { "tn": "0123456788" }, "origid": "c69d0588-ebc6-11eb-bfd8-bec0bbc98888" },
"identity-header": "eyJhbGciOiJFUzI1NiIsInBwdCI6InNoYWtlbiIsInR5cCI6InBhc3Nwb3J0IiwieDV1IjoiaHR0cHM6Ly9jci5zYW5zYXkuY29tL1RvdWNodG9uZV82ODNBIn0.eyJhdHRlc3QiOiJCIiwiZGVzdCI6eyJ0biI6WyIxMjUeyJhdHRlc3QiOiJCIiwiZGVzdCI6eyJ0biI6WyIxMj;info=<https://google.com/>;alg=ES256;ppt=\"test\""
};
let result = ndjson.split(',').map(s => JSON.parse(s));
console.log('The resulting array of items:');
console.log(result);
console.log('Each item at a time:');
for (o of result) {
console.log("item:", o);
}
When I run this, I get Uncaught SyntaxError: Unexpected token ':' error on line 12 at the 2nd node of "sequence-num": "0123456788",.
Any help is appreciated, thank you!
If you actually have ndJSON(newline-delimited JSON) then each line in the file is valid JSON, delimited by newlines. A simple file would look like this:
{"key1": "Value 1","key2": "Value 2","key3": "Value 3","key4": "Value 4"}
{"key1": "Value 5","key2": "Value 6","key3": "Value 7","key4": "Value 8"}
This differs from the formatted data you've posted here, and the difference is important since once you've formatted it, the valid JSON objects cannot simply be distinguished by the presence of newlines.
So, on the assumption that you do have valid ndJSON, in its original form, you can extract it by using split() on newLines and using JSON.parse() on the resulting array.
This snippet adds a little file handling to allow a file to be uploaded, but thereafter it uses split() and JSON.parse() to extract the data:
"use strict";
document.getElementsByTagName('form')[0].addEventListener('submit',function(e){
e.preventDefault();
const selectedFile = document.getElementById('inputFile').files[0];
let fr = new FileReader();
fr.onload = function(e){
let ndJSON = e.target.result; // ndJSON extracted here
let ndJSONLines = ndJSON.split('\n');
// Process JSON objects here
ndJSONLines.forEach(function(el){
let obj = JSON.parse(el);
Object.keys(obj).forEach(key=>{
console.log(`Key: ${key}, Value: ${obj[key]}`);
});
});
}
fr.readAsText(selectedFile)
});
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Parsing ndJSON</title>
</head>
<body>
<form method="post" enctype="multipart/form-data">
<input type="file" name="inputFile" id="inputFile">
<input type="submit">
</form>
</body>
</html>
Output, based on the sample file above:
Here is what I do
const end_point_url = 'https://ipfs.io/ipfs/bafkqap33ejwgs3tfgerduitomrvhg33oebtg64tnmf2c4lroej6qu6zcnruw4zjsei5ce5dinfzsa2ltebqsa43fmnxw4zbanruw4zjcpufa';
let json = await fetch(end_point_url).
then( resp => resp.text() ).
then( buf => { // NDJSON format ...
return buf.slice(0,-1).split('\n').map(JSON.parse);
}).
catch(console.error);
We are looking for ways to bulk upload data into google firestore and found here in Stack Overflow an awesome script to do so, sort of. The problem is that when data is uploaded the collection is fine, document is fine, but nesting some data we can only manage to import as "map" type when we need "array" and "map". Since we are basically newbies trial and error has not been enough. We appreciate if you can take a look at the code and help us with that.
So far, with the following code:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<script src="https://www.gstatic.com/firebasejs/7.5.0/firebase-app.js"></script>
<script src="https://www.gstatic.com/firebasejs/7.5.0/firebase-firestore.js"></script>
</head>
<body>
<script>
var config = {
apiKey: 'MY API KEY',
authDomain: 'MY AUTHDOMAIN',
projectId: 'MY PROJECT ID'
};
firebase.initializeApp(config);
var firestore = firebase.firestore();
//Paste from the CSV to JSON converter
const data = [
{
"collection": "sub_cats",
"uid": "Av9vJ0EoFfxhAR2",
"class": "especialidades_medicas",
"id": 0,
"name": "Cardiologo",
"ind": "11",
"district": ""
},
{
"collection": "sub_cats",
"uid": "Av9vJ0EoFfxhAR2",
"class": "especialidades_medicas",
"id": 1,
"name": "Urologo",
"ind": "12",
"district": ""
}
]
var promises = [];
data.forEach(function (arrayItem) {
var docRef = firestore.collection(arrayItem.collection).doc(arrayItem.uid);
var objClass = {};
var objId = {};
objId[arrayItem.id] = { name: arrayItem.name, ind: arrayItem.ind };
objClass[arrayItem.class] = objId;
promises.push(docRef.set(objClass, { merge: true }));
});
</script>
</body>
</html>
we get this:
data structure current / needed
How would you modify this code to get the array / map structure?
Thanks a lot!!!
You will need to use arrayUnion() of fieldValue that can be used with set() or update(). So basically if you use it with update it will "merge" the new value with the existing array values in your document. Inside that you can have your map or any datatype you want.
An example code looks like this:
db.collection("users").doc(props.uid).update({
points: db.FieldValue.arrayUnion({value: pointObj.value, reason: pointObj.reason})
});
I'm trying to read some data from a locally stored JSON file, process it into separate JS objects, and append it to a queue. For the life of me, I can't find a way to test that my parsing function is working. To test that it works, I'm trying to pull the data from the local JSON file, and and print it to console. I think there's something conceptually that I don't understand, or a method to test this that I don't know of.
I cannot run the fs module in the browser even with Browserify. Firefox tells me that createReadStream is not a function.
index.html
<!DOCTYPE html>
<div id="container" style="position: relative; width: 100%; height: 700px;"></div>
<!-- <link rel="stylesheet" type="text/css" href="index.css" /> -->
<body>
<script type="text/javascript" src="parse.js"></script>
</body>
</html>
parse.js
//Assume JSON output = {attack source, attack destination, attack type}
//Required modules
//#ts-ignore: undeclared module
const watchStream = require('fs-watch-stream')
const es = require('event-stream')
const stream = require('JSONStream')
const fs = require('fs');
//Create websocket to receive json data instead?
//Save for later
//https.createServer(attacks).listen(9090)
function initial (){
var filepath = 'sample.json';
//Creates readable stream for JSON file parsing
var stream = fs.createReadStream(filepath, { encoding: 'utf8' }),
parser = stream.parse('*.');
//Send read data to parser function
return stream.pipe(parser);
}
initial()
.pipe(es.mapSync(function (data) {
console.log(data);
}));
sample.json
{
"employees": {
"employee": [
{
"id": "1",
"firstName": "Tom",
"lastName": "Cruise",
"photo": "https://jsonformatter.org/img/tom-cruise.jpg"
},
{
"id": "2",
"firstName": "Maria",
"lastName": "Sharapova",
"photo": "https://jsonformatter.org/img/Maria-Sharapova.jpg"
},
{
"id": "3",
"firstName": "Robert",
"lastName": "Downey Jr.",
"photo": "https://jsonformatter.org/img/Robert-Downey-Jr.jpg"
}
]
}
}
There's no need to involve the browser; just run node parse.js. Node.js has console.log too, and you will see the output in the console.
If you install fs-extra, you can use fs.readJson() to get the JSON as an object directly.
Given the following setup code (loosely realistic)
default.js (used by config.js for loading base configurations)
{
"templateData": {
"corp": {
"corpName": "Company",
"DepartmentOne": {
"name": "DepartmentOne",
"phone": "1-800-555-555",
"email": "departmentone#company.com"
},
"address": {
"main": {
"addressLine1": "Somwhere",
"city": "SomeTown",
"stateShort": "SState",
"stateLong": "SomeState",
"zipCodeShort": "Zippy"
}
},
"urls": {
"company": {
"corporate": {
"site": "https://company.com/",
"logo": "http://sites.company.com/images/logo.png",
"altText" : "Company Corporate Logo"
},
"facebook": {
"site": "https://www.facebook.com/company",
"icon": "http://sites.company.com/images/facebook.png",
"altText" : "Company Facebook Page"
},
"twitter": {
"site": "https://twitter.com/company",
"icon": "http://sites.company.com/images/twitter.png",
"altText" : "Company Twitter Feed"
},
"youtube": {
"site": "https://www.youtube.com/company",
"icon": "http://sites.company.com/images/youtube.png",
"altText" : "Company YouTube Channel"
}
}
},
"currentYear": "2015",
"corpWebSiteName": "Company.com"
}
}
}
test01.js (main node code)
var fs = require('fs');
var bunyan = require('bunyan');
var jade = require('jade');
var config = require('config');
var restify = require('restify');
var log = bunyan.createLogger({src: false, name: "emailTemplateMerger"});
var html = '';
var corpConfig = config.get('templateData');
var emailData = { 'emailAddress' : 'nonya#bidness.com',
'firstName' : 'nonya',
'lastName' : 'bidness'
}
var miscData = { 'billingDate' : '2015-08-01',
'accountType' : 'new',
'discountTypes' : {
'primary' : 'Coupon20',
'bonus' : 'First30Days',
'standard' : 'freeShipping'
}
}
fs.readFile('./emailTemplates/test01.jade', 'utf-8', function(error, source){
var template = jade.compile(source);
html = template(corpConfig)
console.log(html);
});
Jade Template:
html
head
body
p.
Hello #{emailData.firstName},
p.
Welcome to #{corp.corpName}'s #{DepartmentOne.name}.
p.
Your next bill will be sent on #{miscData.billingDate} for your #{miscData.accountType}.
p.
Thank you
Problem: Determining an efficient method of merging the config data and the 2 local datasets (which will be passed in via REST) into the Jade template.
Since the data has many levels of nesting, a direct merge can be complex and possibly error prone depending on the data passed in, is there anyway to pass Jade multiple, separate datasets and defer the merging to the Jade engine?
I've tried multiple passes, but a single pass of the Jade merge removes all tags from the template so that didn't work out. I haven't figured out anyway to chain the results.
You could create a top level context object like this:
var context = {
corp: corpConfig,
email: emailData,
misc: miscData
};
....
var html = template(context);
And then change the template references to something like this
html
head
body
p.
Hello #{context.email.firstName},
p.
Welcome to #{context.corp.corpName}'s #{context.misc.DepartmentOne.name}.
// Start with this JSON
var initialJson = {
"rows": [{
"ID": 123,
"Data": 430910,
"VersionNum": 0,
"RowSeqNum": 1,
"IterationNum": 1,
"FirstName": "Aqwemara",
"LastName": "Seweqweebi",
"Location": "CweAN",
"Role": "Site",
"In_Out": "Internal",
"Editor": "User1",
"Edit_Date": "2015%2D02%2D25T15%3A30%3A47%2E883Z"
}]
};
//Create an array that lists the Keys for the NEW JSON
var hResponse = [];
hResponse.push("FirstName", "LastName", "Location", "Role", "Editor", "Edit_Date");
var wbuResponse = [];
// When GO! button is pressed, process the "initialJson" object, creating a new object with only the Keys listed in "hResponse" array
$(document).ready(function() {
$('.btn').click(function() {
wbuResponse.push(
for each(hHeading in hResponse[{
hHeading: response[i].hHeading
}]);
console.log(JSON.stringify(wbuResponse));
});
});
//console setup
var consoleLine = "<p class=\"console-line\"></p>";
console = {
log: function(text) {
$("#console-log").append($(consoleLine).html(text));
}
};
.console-line {
font-family: console;
margin: 2px;
}
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
<input class="btn" type="button" id="btn" value="Go!">
<div id="console-log"></div>
I want to take the initialJson , I will modify the pairs, then rebuild the JSON based on the entries in the hResponse array.
Focusing on the rebuild part, I only want to take a certain amount of the keys and put them into the new JSON array.
Can i do a for each loop within the wbuResponse.push to create the correct structure?
Am I doing this right, maybe there is a better more efficient way?
Thanks
JSFIDDLE : https://jsfiddle.net/b5m0nk67/5/
What you're looking for is called map. It's a built-in function in modern JavaScript implementations: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map
Using .map(), your filtering code could look something like this:
var wbuResponse = initalJson.map(function(row, index) {
return {
FirstName: row.FirstName,
LastName: row.LastName,
Location: row.Location,
Role: row.Role,
Editor: row.Editor,
Edit_Date: row.Edit_Date
};
});
If you wanted to use an approach like the one you're alluding to, with an array of property names to filter down to rather than having it hard-coded, you could do something like this:
var props = ["FirstName", "LastName", "Location", "Role", "Editor", "Edit_Date"];
var wbuResponse = initalJson.map(function(row, index) {
var mappedRow = { };
for (var i = 0; i < props.length; i++) {
mappedRow[props[i]] = row[props[i]];
}
return mappedRow;
});
For broader browser support, you can use jQuery's built-in map function that includes a polyfill for browsers that don't support it natively. A few examples of that here: http://encosia.com/use-jquery-to-extract-data-from-html-lists-and-tables/