I was trying to export data from two tables into one excel sheet, I already export two table in to two sheets, how can I get all data in to sheet.
exportAsExcel(excelObj: ExcelExport,excelObjHeader: ExcelExport): void {
let wb: XLSX.WorkBook;
let header = excelObj.header
const ws: XLSX.WorkSheet = XLSXUtils.json_to_sheet(excelObj.data);
const ws1: XLSX.WorkSheet = XLSXUtils.json_to_sheet(excelObjHeader.data);
wb = XLSXUtils.book_new();
XLSXUtils.book_append_sheet(wb, ws, excelObj.sheetName);
XLSXUtils.book_append_sheet(wb, ws1, excelObjHeader.sheetName);
writeFile(wb, `${excelObj.fileName.trim()}${this.fileExtension}`);}
I am try to ger this format
I used exceljs module and I remodelled my problem using that module.
exportExcelReport(result)
{
this.sName = 'tsetName';
this.excelFileName = 'Teat_Excel.xlsx';
this.cols =['ID','ItemCode','ItemName','Quantity','Price','LineTotal']
var excelItem = [];
for (let index = 0; index < result.Details.length; index++) {
const element = result.Details[index].ID;
console.log(element);
excelItem.push({
ID: result.Details[index].ID,
ItemCode: result.Details[index].ItemCode,
ItemName: result.Details[index].ItemName,
Quantity: result.Details[index].Quantity,
Price: result.Details[index].Price,
LineTotal: result.Details[index].LineTotal
}); }
this.data=excelItem
var workbook = new Excel.Workbook();
workbook.creator = 'Web';
workbook.lastModifiedBy ='Web';
workbook.created = new Date();
workbook.modified = new Date();
workbook.addWorksheet(this.sName, { views: [{ state: 'frozen', ySplit: 4, activeCell: 'A1', showGridLines: true }] })
var sheet = workbook.getWorksheet(1);
var head1 = ["Document No.",result.Code , " Status" , result.Name, " Date Range",this.datePipe.transform( result.FromDate,'yyyy/mm/dd') +" - "+this.datePipe.transform( result.ToDate,'yyyy/mm/dd') ];
sheet.addRow("");
sheet.addRow(head1);
sheet.addRow("");
sheet.getRow(4).values = this.cols;
sheet.columns = [
{ key: 'ID' },
{ key: 'ItemCode' },
{ key: 'ItemName' },
{ key: 'Quantity' },
{ key: 'Price' },
{ key: 'LineTotal' },
];
sheet.addRows(this.data);
workbook.xlsx.writeBuffer().then(data => {
var blob = new Blob([data], { type: this.blobType });
var url = window.URL.createObjectURL(blob);
var a = document.createElement("a");
document.body.appendChild(a);
a.href = url;
a.download = this.excelFileName;
a.click();
});
}
refrense1
refrense2
Related
This is my code and in this the data displayed in chart is hole project data but in rally dashboard there is release filter at the top of your page. and i want my chart to show data of the the release selected by that filter and my sdk version in code is 1.33
<!DOCTYPE HTML\>
<script
src="https://cdn.jsdelivr.net/npm/chartjs-adapter-date-fns/dist/chartjs-adapter-date-fns.bundle.min.js">
var WORKSPACE_OID = "__WORKSPACE_OID__";
var PROJECT_OID = "__PROJECT_OID__";
var PROJECT_SCOPING_UP = "__PROJECT_SCOPING_UP__";
var PROJECT_SCOPING_DOWN = "__PROJECT_SCOPING_DOWN__";
var MILS_IN_DAY = 86400000;
var DAYS_BACK = 30;
var filterChart;
var currentProjectDataSource;
var fromDate = new Date(new Date().getTime() - (MILS_IN_DAY * DAYS_BACK));
var allDefects = [];
// var currentRelease;
var onLoadAllIssues = function (result) {
// var defects = result.defects.filter(function (defect) {
// return defect.Release && defect.Release.\_refObjectName === currentRelease.Name;
// });
var labels = [];
var openDefects = [];
var closedDefects = [];
var defects = result.defects;
for (var count = 0; count < defects.length; count++) {
allDefects[allDefects.length] = defects[count];
var defect = defects[count];
labels.push(defect.CreationDate.split('T')[0]);
if (defect.ClosedDate !==null) {
closedDefects.push(defect.ClosedDate.split('T')[0]);
}
}
closedDefects.sort();
const counts = {};
labels.forEach(function (x) { counts[x] = (counts[x] || 0) + 1; });
const closedcounts = {};
closedDefects.forEach(function (x) { closedcounts[x] = (closedcounts[x] || 0) + 1; });
mychart(counts,closedcounts,labels)
};
var createCharts = function () {
var loadAllDefectsQuery = {
type: 'defect',
key: 'defects',
fetch: 'CreationDate,ClosedDate,ObjectID,FormattedID,Name,State,Priority',
order: 'CreationDate',
query: '((CreationDate != "null") OR (CreationDate > "' + dojo.date.stamp.toISOString(fromDate, { zulu: true }) +
'"))'
};
currentProjectDataSource.findAll(loadAllDefectsQuery, onLoadAllIssues);
};
var initPage = function () {
currentProjectDataSource = new rally.sdk.data.RallyDataSource(WORKSPACE_OID, PROJECT_OID, PROJECT_SCOPING_UP,
PROJECT_SCOPING_DOWN);
createCharts();
};
rally.addOnLoad(initPage);
function mychart(counts,closedcounts,labels) {
const pielable = labels;
const piedata = counts;
const closedcountsdata = closedcounts;
const data = {
datasets: [
{
label: 'Number of opened defects',
data: piedata,
},
{
label: 'Number of closed defects',
data: closedcountsdata,
}
]
};
const config = {
type: 'line',
data: data,
options: {
scales: {
x: {
min:"2022-01-01",
max:"2022-12-31",
type: 'time',
time:{
unit:'day',
},
},
y: {
beginAtZero: true,
grace: 5,
ticks: {
stepSize: 1,
},
},
},
plugins: {
legend: {
position: 'top',
},
title: {
display: true,
text: 'Defect Burndown Chart'
},
tooltip: {
yAlign: 'bottom',
titleMarginBottom: 0,
callbacks: {
title: function (context) {
return( `${context[0].label.slice(0, -13).replace(/,/g, " ")}`)
},
}
}
}
}
};
const myChart = new Chart(
document.getElementById('myChart'),
config
)
filterChart= function filterChart(date){
const year = date.value.substring(0,4);
const month = date.value.substring(5);
const lastday = (y,m)=>{
return new Date(y,m,0).getDate();
}
const startDate = `${date.value}-01`;
const endDate = `${date.value}-${lastday(year,month)}`;
myChart.config.options.scales.x.min=startDate;
myChart.config.options.scales.x.ma`your text`x=endDate;
myChart.update();
}}
</script>
I'm trying to run this async function code, but for some reason it will stop running after await _limit(), not sure what am I doing wrong.
Basically I'm trying to go to a specific image dir, do something for each image and then update some meta-tags into some other dir that contains json files. The code will basically stop working after await_limit()
const path = require("path");
const basePath = process.cwd();
const fs = require("fs");
const { RateLimit } = require('async-sema');
const { fetchWithRetry } = require(`${basePath}/utils/fetchWithRetry.js`);
const { LIMIT } = require(`${basePath}/src/config.js`);
const _limit = RateLimit(LIMIT);
const allMetadata = [];
const regex = new RegExp("^([0-9]+).png");
async function main() {
console.log("Starting upload of images...");
const files = fs.readdirSync(`${basePath}/build/images`);
files.sort(function(a, b){
return a.split(".")[0] - b.split(".")[0];
});
for (const file of files) {
try {
if (regex.test(file)) {
const fileName = path.parse(file).name;
let jsonFile = fs.readFileSync(`${basePath}/build/json/${fileName}.json`);
let metaData = JSON.parse(jsonFile);
if(!metaData.file_url.includes('https://')) {
console.log(`code executes until here`);
await _limit()
const url = "https://api.nftport.xyz/v0/files";
const formData = new FormData();
const fileStream = fs.createReadStream(`${basePath}/build/images/${file}`);
formData.append("file", fileStream);
const options = {
method: "POST",
headers: {Authorization: 'my API key here'},
body: formData,
};
const response = await fetchWithRetry(url, options);
metaData.file_url = response.ipfs_url;
metaData.image = response.ipfs_url;
fs.writeFileSync(
`${basePath}/build/json/${fileName}.json`,
JSON.stringify(metaData, null, 2)
);
console.log(`${response.file_name} uploaded & ${fileName}.json updated!`);
} else {
console.log(`${fileName} already uploaded.`);
}
allMetadata.push(metaData);
}
} catch (error) {
console.log(`Catch: ${error}`);
}
}
fs.writeFileSync(
`${basePath}/build/json/_metadata.json`,
JSON.stringify(allMetadata, null, 2)
);
}
main();```
///config.js file starts here
const basePath = process.cwd();
const { MODE } = require(`${basePath}/constants/blend_mode.js`);
const { NETWORK } = require(`${basePath}/constants/network.js`);
const network = NETWORK.eth;
// General metadata for Ethereum
const namePrefix = "name of collection here";
const description = "description of collection here";
const baseUri = "ipfs://NewUriToReplace";
const solanaMetadata = {
symbol: "YC",
seller_fee_basis_points: 1000, // Define how much % you want from secondary market sales 1000 = 10%
external_url: "https://www.youtube.com/c/hashlipsnft",
creators: [
{
address: "address here",
share: 100,
},
],
};
// If you have selected Solana then the collection starts from 0 automatically
const layerConfigurations = [
{
growEditionSizeTo: 10, // With Cap
layersOrder: [
{ name: "Background" },
{ name: "Short" },
{ name: "Body" },
{ name: "Neckchain" },
{ name: "Tshirt" },
{ name: "Hoodie" },
{ name: "Facial Expression" },
{ name: "Beard" },
{ name: "Piercing" },
{ name: "Glasses" },
{ name: "Cap" },
],
},
];
const shuffleLayerConfigurations = false;
const debugLogs = false;
const format = {
width: 2000,
height: 3000,
smoothing: false,
};
const gif = {
export: false,
repeat: 0,
quality: 100,
delay: 500,
};
const text = {
only: false,
color: "#ffffff",
size: 20,
xGap: 40,
yGap: 40,
align: "left",
baseline: "top",
weight: "regular",
family: "Courier",
spacer: " => ",
};
const pixelFormat = {
ratio: 20 / 128,
};
const background = {
generate: true,
brightness: "80%",
static: false,
default: "#000000",
};
const extraMetadata = {
external_url: "external url here"
};
const rarityDelimiter = "#";
const uniqueDnaTorrance = 10000;
const preview = {
thumbPerRow: 5,
thumbWidth: 50,
imageRatio: format.height / format.width,
imageName: "preview.png",
};
const preview_gif = {
numberOfImages: 5,
order: "ASC", // ASC, DESC, MIXED
repeat: 0,
quality: 100,
delay: 500,
imageName: "preview.gif",
};
module.exports = {
format,
baseUri,
description,
background,
uniqueDnaTorrance,
layerConfigurations,
rarityDelimiter,
preview,
shuffleLayerConfigurations,
debugLogs,
extraMetadata,
pixelFormat,
text,
namePrefix,
network,
solanaMetadata,
gif,
preview_gif,
};
const AUTH = "API here";
const LIMIT = 2;
I'm trying to calculate sums of each columns of csv. I'm able to read a csv in js using readfile method. I also was able to loop through it and parsed data into array of objects. Now I just to figure out a way to add up all the column elements, that's where I'm struggling. My csv object is in array of object format which looks like this.
[
{ item: '18', count: '180' },
{ item: '19', count: '163' },
{ item: '20', count: '175' },
{ item: '', count: undefined }
]
CSV input is like this:
item,count
18,180
19,163
20,175
I want to add 18 + 19 + 20 and final answer should look like this [57,518].
Here's I've done so far, I just need help to make this better and column wise adding logic in JS, please help.
const fs = require('fs')
let result = []
var dataArray = []
fs.readFile(filename, 'utf8', function (err, data) {
dataArray = data.split(/\r?\n/);
// console.log("dataArray", dataArray)
var headers = dataArray[0].split(",");
for (var i = 1; i < dataArray.length; i++) {
var obj = {};
console.log("dataArray", dataArray)
var currentline = dataArray[i].split(",");
for (var j = 0; j < headers.length; j++) {
obj[headers[j]] = currentline[j];
}
result.push(obj);
}
})
You can iterate through each row of your csv and sum up values of items and count using array#reduce and array#forEach.
const fs = require('fs').promises;
const fileName = 'data.csv'
const calculateSum = async () => {
const data = await fs.readFile(fileName, 'utf-8');
const dataArray = data.split(/\r?\n/);
const header = dataArray[0].split(',')
const result = dataArray.slice(1).reduce((sum, arr) => {
arr.split(',').forEach((v, i) => {
sum[i] = (sum[i] || 0) + Number(v.trim());
})
return sum;
}, []);
console.log(result);
}
Generic function
let dataArray = [
{ item: '18', count: '180' },
{ item: '19', count: '163' },
{ item: '20', count: '175' },
{ item: '', count: undefined }
]
const sums = dataArray.reduce((sum, tableRow) => {
Object.keys(tableRow).forEach((obj) => {
if (Number(tableRow[obj])) sum[obj] = (sum[obj] || 0) + Number(tableRow[obj]);
})
return sum;
}, []);
console.log(sums) // [ item: 57, count: 518 ]
I am unable to decrypt the documentation...
The goal is to create a link formula in each cell based on another cell.
let wb = XLSX.utils.book_new();
wb.Props = {
Title: `export`,
Subject: "export",
};
let ws_data = [
[
"sku",
"product",
"picture"
]
];
products.map(item => {
ws_data.push([
item.sku,
item.product,
`=HYPERLINK("/pics/"&${item.sku}&".jpg";"link")`,
])
})
wb.SheetNames.push("data");
let ws = XLSX.utils.aoa_to_sheet(ws_data);
wb.Sheets["data"] = ws;
let wbout = XLSX.write(wb, { bookType: 'xlsx', type: 'binary' });
saveByteArray([convertBinaryToOctetStream(wbout)], `export.xlsx`);
Obviously, this simply puts the =HYPERLINK("/pics/all/"&sku&".jpg";"link") text in the cell. How can it be the actual formula?
Could you try mapping the array separately and then use it in the worksheet? I am guessing you want the link in the third column.
products.map(item => {
ws_data.push([
item.sku,
item.product,
'link',
])
})
products.foreach(item => {
ws_links.push("/pics/" + item.sku + ".jpg" )
})
wb.SheetNames.push("data");
let ws = XLSX.utils.aoa_to_sheet(ws_data);
for (let i = 0; i < json.length; i++) {
ws[XLSX.utils.encode_cell({
c: 3,
r: i
})].l = { Target: ws_links[i] };
}
wb.Sheets["data"] = ws;
let wbout = XLSX.write(wb, { bookType: 'xlsx', type: 'binary' });
I can't figure out how to match the title and genre correctly based on what I have in my module.
The csv_json module has an exception where it doesn't match each of the properties accordingly and that is when the title has "The" in it.
//csv file
movieId,title,genre
1,"American President, The (1995)",Comedy|Drama|Romance
2,"Creation, The creator(xxxx)",Comedy|Drama|Romance
3,"Destruction, The destroyer(xxxxx)",Comedy|Drama|Romance
//csv_json module
const readline = require('readline');
const fs = require('fs');
function readCsv(pathToFile) {
return new Promise((resolve, reject) => {
const csvReader = readline.createInterface({
input: fs.createReadStream(pathToFile)
});
let headers;
const rows = [];
let tempRows = [];
csvReader
.on('line', row => {
if (!headers) {
headers = row.split(','); // header name breed age
} else {
rows.push(row.split(','));
}
})
.on('close', () => {
// then iterate through all of the "rows", matching them to the "headers"
for (var i = 0; i < rows.length; i++) {
var obj = {};
var currentline = rows[i];
for (var j = 0; j < headers.length; j++) {
obj[headers[j]] = currentline[j]; //Kitty Siamese 14
}
tempRows.push(obj);
}
resolve(JSON.stringify(tempRows));
});
// This would be in place of the "return" statement you had before
});
}
module.exports = readCsv;
//js file
const readCsv = require('./csvjson.js');
readCsv('movieTest.csv').then((data) => {
console.log(data)
let movieJson = JSON.parse(data);
console.log(movieJson)
/*data output:
[{"movieId":"1","title":"\"American President","genre":" The (1995)\""},{"movieId":"2","title":"\"Creation","genre":" The creator(xxxx)\""},{"movieId":"3","title":"\"Destruction","genre":" The destroyer(xxxxx)\""}]
*/
/*movieJson output:
[ { movieId: '1',
title: '"American President',
genre: ' The (1995)"' },
{ movieId: '2',
title: '"Creation',
genre: ' The creator(xxxx)"' },
{ movieId: '3',
title: '"Destruction',
genre: ' The destroyer(xxxxx)"' } ]
*/
});
I expect the output to match:
[ { movieId: '1',
title: "American President, The (1995)",
genre:'Comedy|Drama|Romance' },
{ movieId: '2',
title: "The creator(xxxx) Creation",
genre: ' Comedy|Drama|Romance' },
{ movieId: '3',
title: "Destruction The destroyer(xxx)",
genre: ' Comedy|Drama|Romance' } ]
This is probably since you're splitting each row on every occurrence of a comma.
const row = '1,"American President, The (1995)",Comedy|Drama|Romance'
row.split(',')
// returns ["1", ""American President", " The (1995)"", "Comedy|Drama|Romance"]
Try replacing every comma that is not followed by a whitespace with some unique string that wouldn't occur anywhere else in the CSV file, and then split on that:
row.replace(/\,(\S)/g, '&unique;$1').split('&unique;')
// returns ["1", ""American President, The (1995)"", "Comedy|Drama|Romance"]
Hope this helps! :)