Adding things into an exsisting array in a json file, microDB - javascript

I'm trying to see if it's possible to add to an already created object on the MicroDB file.
I've tried using .push() but I'm having no luck.
The file has this object in already:
{
"home": {
"location": "walsall",
"time": "12:00",
"date": "12/12/17",
"gym": "home",
"players": ""
}
}
File is a .json
I'm trying to add something into the players key when a user wants to join.
Any help? Also, how would one remove a user name from the players section aforementioned?
Edit:
Some code from my .js file
if (commandEX.toLowerCase() === "join") {
var joinEX = message.content.split(' ')[2];
if (joinEX === undefined) {
return message.reply("**ERROR**: No gym entered, enter a gym to join.").then(m => m.delete(10000));
}
var removeTrigger = message.content.split(' ')[0];
var findGym = message.content.slice(removeTrigger.length);
findGym = findGym.slice(commandEX.length);
findGym = findGym.slice(2);
var commandFind = exRaidDB.data[findGym];
memberEX = message.member.nickname;
if (commandFind == undefined) {
message.channel.send(`**ERROR**: No ex raid at that gym.`).then(m => m.delete(10000));
} else {
message.channel.send(`**Added ${memberEX} to the list, details below.** \n**EX Raid at** ${commandFind.gym} \n**Date**: ${commandFind.date} \n**Time**: ${commandFind.time} \n**Location**: ${commandFind.location}`);
// this line is where the code needs to be for adding the $[message.member.nickname} into the "players" string.
}
}

As you tagged javascript, am I assuming correct you are using Node.js?
If it's the case, you could use fs.writeFileSync(PATH_TO_JSON, JSON.stringify(newObj), 'utf8'); where newObj is the new object you want to store in the .json-file.
But instead of working with the file directly, you can use a simple wrapper like follows:
let
fs = require('fs'),
PATH_TO_JSON = '';
function readJson() {
if (!fs.existsSync(PATH_TO_JSON)) {
writeJson();
}
return JSON.parse(fs.readFileSync(PATH_TO_JSON, 'utf8'));
}
function writeJson(obj = {}) {
fs.writeFileSync(PATH_TO_JSON, JSON.stringify(obj), 'utf8');
}
function Ressource(path) {
PATH_TO_JSON = path;
this.data = readJson();
}
Ressource.prototype.set = function(obj) {
this.data = obj;
};
Ressource.prototype.save = function() {
writeJson(this.data);
};
module.exports = Ressource;

Related

Unable To Pass Objects/Arrays in IPCRenderer, An object could not be cloned EventEmitter.i.send.i.send

I am unable to pass any object or arrays to IPCRenderer.
I am getting error when passing an object or array through ipcs, I have even tried to send by converting to string using JSON.stringify but it converts it into empty object string.
I have tried passing a fileList, an array of object & even an object nothing passes. only string or handwritten objects are working.
I've read that it uses Structured Clone Algorithm and fileList & Array is allowed by this algorithm
ERROR:
electron/js2c/renderer_init.js:74 Uncaught Error: An object could not be cloned.
at EventEmitter.i.send.i.send (electron/js2c/renderer_init.js:74)
at HTMLButtonElement.compressNow (ImageHandling.js:190)
I have tried many possible solutions but nothing worked
code:
const compressNow = () => {
ipcRenderer.send("image:compress", filess). ///This is the error.
// filess is a variable containing an array of selected files from an HTML input.
}
Now i have tried to send filess as JSON.stringify, i tried to send it as an object but nothing works unless i manually write a dummy object or string.
Here's My Github Repo for this project
Files With ErrorJ:-
ImageHandling.js
const fs = window.require('fs');
const {ipcRenderer} = require("electron")
const SELECT = (target) => document.querySelector(`${target}`)
var filess = []
const imgUploadInput = SELECT("#imgUploadInput")
const warning = SELECT("#warning")
const setImgBase64 = (imgEl, file) => {
const ReadAbleFile = fs.readFileSync(file.path).toString('base64')
let src = "data:image/png;base64," + ReadAbleFile
imgEl.setAttribute("src", src)
// El.src=src
// console.log(`FIXED IMAGE # ${imgEl} `,ReadAbleFile)
}
const renderImages = () => {
const files = filess && Array.from(filess)
const defaultImg = SELECT("#defaultImg")
const addImgBtn = SELECT("#addImgBtn")
imgUploadInput.disabled = true;
let numOfFiles = files.length
if (numOfFiles < 1) {
SELECT("#compressContainer").style.visibility = "hidden"
} else {
SELECT("#compressContainer").style.visibility = "visible"
}
if (numOfFiles > 49) {
warning.innerHTML = `<b style="font-weight:bold; color:red;">WARNING:</b><br/>
<span style="padding:10px;text-align:left">
Your processor/computer may not be able to process ${numOfFiles} Images at once, We recommend selecting less than 50 Images at once for better performance.
</span>
`;
}
addImgBtn.innerHTML = `LOADING.....`
if (defaultImg && numOfFiles > 0)
defaultImg.remove();
setTimeout(() => {
if (files && numOfFiles > 0) {
let displayImages = SELECT("#displayImages")
displayImages.innerHTML = ""
files ?. forEach((file, i) => {
let divEl = document.createElement("div")
let imgEl = document.createElement("img")
imgEl.src = file.path
imgEl.id = `PNG_${i}_${
btoa(file.name)
}`
divEl.className = "displayedImg"
imgEl.setAttribute("onclick", `document.getElementById('ImageView').src=this.src`)
const a = document.createElement("a")
a.appendChild(imgEl)
a.setAttribute("href", `#ViewImage`)
a.className = "perfundo__link"
divEl.appendChild(a)
divEl.className = "displayedImg perfundo"
displayImages.appendChild(divEl)
if (i == files.length - 1) {
warning.innerHTML = "";
updateNumOfImages();
}
imgEl.onerror = () => setImgBase64(imgEl, file) // converting to base64 only on error, this make performance better and help us avoid freezes. (before this i was converting all images to base64 wither errored or not that was making computer freez)
})
addImgBtn.innerHTML = "+ Add MORE"
imgUploadInput.disabled = false
findDuplicate()
}
}, 0);
}
const hasDuplicate=()=>{
let FileNames = [... filess.map(f => f.name)]
let duplicateFiles = filess.filter((file, i) => FileNames.indexOf(file.name) !== i)
return {FileNames,duplicateFiles,FilesLength:duplicateFiles.length}
}
const findDuplicate = (forceAlert = false) => {
if (filess && filess.length) {
let {FileNames} = hasDuplicate()
let {duplicateFiles} = hasDuplicate()
if (duplicateFiles.length) { // alert(``)
let countFiles = duplicateFiles.length
let fileStr = countFiles > 1 ? "files" : "file"
console.log("result from removeDup=> ", filess, " \n dupfilename=> ", FileNames, " \n dupfiles=> ", duplicateFiles)
let shouldNotAsk = localStorage.getItem("NeverAsk")
let msg = `You've selected ${
countFiles > 1 ? countFiles : "a"
} duplicate ${fileStr}`
let duplInner = `<span style='color:red'>
<b>WARNING</b>
<p style="margin:0px;line-height:1"> ${msg} . <button onClick="findDuplicate(true)" type="button" class="btn btn-danger btn-rounded btn-sm">REMOVE DUPLICATE</button></p>
</span>`
if (! shouldNotAsk || forceAlert) {
swal("DUPLICATE FILES DETECTED", `${msg} , Would you like to un-select duplicate ${fileStr} having same name?`, {
icon: 'warning',
dangerMode: true,
buttons: {
cancel: true,
...forceAlert ? {} : {
never: "Never Ask"
},
confirm: "Yes !"
}
}).then((Yes) => {
if (Yes == "never") {
localStorage.setItem("NeverAsk", true)
warning.innerHTML=duplInner
} else if (Yes) {
removeDuplicates()
}
})
} else {
warning.innerHTML=duplInner
}
}
}
}
const removeDuplicates = (showAlert=true) => {
let {FileNames} = hasDuplicate()
let {duplicateFiles} = hasDuplicate()
let duplicateFileNames = duplicateFiles.map(f => f.name)
let uniqueFiles = filess.filter((file) => ! duplicateFileNames.includes(file.name))
filess = [
... uniqueFiles,
... duplicateFiles
]
console.log("result from removeDup=> ", filess, " \n filename=> ", FileNames, " \n dupfiles=> ", duplicateFiles, "\n unique fil=> ", uniqueFiles)
renderImages()
if(showAlert){
swal("DONE", "Removed Duplicate Files ", {icon: 'success'}).then(() =>{
renderImages()
setTimeout(() => {
let hasDuplicateFiles = hasDuplicate().FilesLength
if(hasDuplicate){//Re-check if any duplicate files left after the current removal process.
removeDuplicates(false) //Re-run the function to remove remaining. false will make sure that this alert does not show and the loop does not continue.
}
renderImages()
}, 10);
})
}
}
const updateNumOfImages = () => {
warning.innerHTML = `
<span style="text-align:left; color:green">
Selected ${
filess.length
} Image(s)
</span>
`;
}
const compressNow = () => {
ipcRenderer.send("image:compress", filess)
// alert("WOW")
}
CompressBtn.addEventListener("click", compressNow)
imgUploadInput.addEventListener("change", (e) => {
let SelectedFiles = e.target.files
if (SelectedFiles && SelectedFiles.length) {
filess = [
... filess,
... SelectedFiles
]
renderImages()
}
})
// SELECT("#imgUploadInput").addEventListener("drop",(e)=>console.log("DROP=> ",e))
UPDATE:-
I REPLACED THIS:
const compressNow = () => {
ipcRenderer.send("image:compress",filess)
}
INTO THIS:-
const compressNow = () => {
filess.forEach(file => {
ipcRenderer.send("image:compress",file.path )
});
}
Now here i am sending the files one by one via forEach, actually its sending string "file path" so thats how its working i am still confused why do i have to do this? why can't i send whole fileList i assume that this loop method is a bad practice because it will consume more CPU its one additional loop however it won't be necessary if i am able to send the whole array.
See Behavior Changed: Sending non-JS objects over IPC now throws an exception. DOM objects etc. are not serializable. Electron 9.0 (and newer) throws "object could not be cloned" error when unserializable objects are sent.
In your code, File and FileList are DOM objects.
If you want to avoid using forEach, try this code:
const compressNow = () => {
const paths = filess.map(f => f.path);
ipcRenderer.send("image:compress", paths);
}
Can refer to electron github issue tracker for this issue (already closed)
Error: An object could not be cloned #26338
Docs for ipcRenderer.send(channel, ...args)
This issue mainly comes when we have non-cloneable values like function within an object in data we are sending via IPC, to avoid that we can use JSON.stringify() before sending and JSON.parse() later on receiving end, but doing so will cause to lose some of the values eg:
const obj = {
x :10,
foo : ()=>{
console.log('This is non-cloneable value')
}
}
console.log(JSON.stringify(obj))
output:{"x":10}
Instead of sending the images save them in fs and send the path
The simplest thing that could possibly work is to use lodash cloneDeep()
ipcMain.handle('stuffgetList', async () => {
return _.cloneDeep(await stuffStore.getList())
})
in the windows JSON.stringify()
in the main.js JSON.parse()
Remove :compress from. .send method and try

replace lines between two lines in a template file in nodejs

I have a file with the following content:
function(doc) {
//pr reqs
var facet = true;
var store = true;
// Template start
var fields = {
}
// template end
noiseList = ["type", "objects", "value"]
const isNumeric = (num) => {
return !isNaN(num)
}
const emitIndex = () => {
if (doc.created_by_ref) {
Object.keys(fields).forEach(function(key) {
if (typeof fields[key] == 'object' && fields[key].length !== undefined) {
if (fields[key].length === 0) {
index(key, 'UNDEFINED', {
'store': store,
'facet': facet
});
} else {
fields[key].forEach(function(ele) {
index(key, ele.toString(), {
'store': store,
'facet': facet
});
})
}
} else {
index(key, fields[key].toString(), {
'store': store,
'facet': facet
});
}
})
}
}
Object.keys(doc).map(obj => {
if (typeof doc[obj] === 'object' && doc[obj] !== null) {
traverseObjectsInDoc(doc[obj], noiseCancelation(obj) ? "" : obj, doc.objects, false);
} else if (doc[obj] !== null && isValueType(obj) && !noiseCancelation(obj)) {
AddToFields(`${obj}`, doc[obj])
}
});
emitIndex();
}
As you I have two special sign there: template start and template end
what I am trying to achieve is to replace sth similar to
var fields = {
"test1": "test",
"test2": "test2"
}
instead of
var fields = {
}
in that file. And I should mention that this fields are generated in runtime so the content needs to be dynamic as well that is why I need this approach. All I can think of is to read the file
const searchAll1 = () => {
const contents = fs
.readFileSync("./lib/design_documents/searchAll", "utf8");
// find the template end and start replace the new fields some and return
};
and find the template end and start replace the new fields somehow and return. However I am not really sure if this is the best way?
What is the best way to do so?
Adjust the design of your function by adding a second parameter you can add the fields object dynamically whenever you call the function.
// your-script.js
module.exports = function(doc, fields) {
...
}
Then when you import and use the function, create a new object and pass it to your function and call it.
const yourFunction = require('./your-script.js');
let doc = someValue;
let fields = {
"test1": "test",
"test2": "test2"
}
yourFunction(doc, fields);

How to build Typescript code in JS using a function?

Trying to build typescript code in JS so i can display on UI for user to play with the code , is there a way to create this code as a typescript instead of text so it compile as well ? Any help here will be highly appreciated couldn't find any source related to this issue.
data contains interfaces
main.js
function buildTypescript(data) {
var _ref = window.activeOperation;
var modelData = getModelData(data);
var text = '';
text += "import {Api as IngenSDK} from '#SSDK'" + ';\n\n';
text += modelData;
text += 'app.Api.setConfig({\n "env": "SIT3"\n});\n\n';
text += _ref + '(' + JSON.stringify(SDKresult[_ref].request, null, 4) + ', function(result) {\n //Your code goes here \n debugger; \n console.log(result); \n});';
$('#request_method_TS').text(text);
}
function getModelData(data){
var activePath = window.activePath.toLowerCase();
var _interface;
$.each(data.children, function(id, item){
// item.name would be string like "#SDK/core/interface/member/Details";
var path = item.name.replace(/\"/g, "");
if (path.toLowerCase().includes(activePath)) {
console.log('OBJ', item);
_interface = createInterfaces(path,item.children);
}
});
return _interface;
}
function createInterfaces(path, data) {
const imports = data.map(d => d.name).join(', ');
return `import { ${imports} } from '${path}';\n\n`;
}
html
<pre id="request_method_TS" style="margin: 5px;"></pre>
You can create the Typescript as executable by doing something like -
const executableTypescript = new Function(typescriptDataAsText);
The typescript code in the string will already be compiled at this point.
To execute it, you just call the newly created function like -
executableTypescript();
Check by running the below snippet that the Typescript code of logging the message in the variable is executed.
Note: For your case, if the modelData and more such Typescript data is included, it should be a part of a new module since it has import statements and they are required to be on top of a module.
var path = "/filepath"
var data = [
{
name: "IParam"
},
{
name: "IError"
}
]
function createInterfaces(path, data){
const imports = data.map(d => d.name).join(', ');
return `import { ${imports} } from '${path}';\n\n`;
}
function buildTypescript(data) {
// To include this data as part of your text, make sure that it's a part of a new module
const modelData = createInterfaces(path,data);
const text = `
let message = "Typecript executed";\n\n
console.log(message)`;
return text;
}
const typescriptDataAsText = buildTypescript(data);
const executableTypescript = new Function(typescriptDataAsText);
executableTypescript()

Javascript Rewrite Config File

I have a config.js file which I believe is JSON which is called when the application first starts:
var config={};
config.user = [
{id:'JSMITH', priceModify:'true'},
{id:'JBLOGGS', priceModify:'false'},
]
config.price = [
{id:"price01", name:"priceName01", primary:"57.25", secondary:"34.54"},
{id:"price02", name:"priceName02", primary:"98.26", secondary:"139.45"},
{id:"price03", name:"priceName03", primary:"13.87", secondary:"29.13"}
]
To pull / push data I just use the following:
// Read
var curPrice = config.price[0].primary;
// Write
config.price[0].primary = "98.24";
How do I go about exporting the config file with the new value so that it will load next time the application is opened? I can use the file system object to write the file, I just don't understand how I would export everything (and preferably keep the same format).
I originally thought about reading the whole config file into a variable, cycling through to find the required block, id, and key and replacing the value, then writing the whole thing back, but I can't seem to figure out how to replace that specific value only.
Any help would be greatly appreciated
Edit Apologies, I forgot to mention that this application is completely offline and uses local directories
Solution
I stumbled across a few solutions to different issues which, when combined, gave me the perfect solution. First we cycle the Javascript object, building an array of the detail and then converting the array to a string:
vMethod.convertToText = function(obj) {
var string = [];
var output = '';
var count= 0;
var countTotal = 0;
if (typeof(obj) == "object" && (obj.join == undefined)) {
count= 0;
countTotal = 0;
string.push("{");
for (prop in obj) {
countTotal++;
}
for (prop in obj) {
if(count==countTotal - 1) {
string.push(prop, ": ", vMethod.convertToText(obj[prop]),'}\r\n');
} else {
string.push(prop, ": ", vMethod.convertToText(obj[prop]), ",");
}
count++;
};
} else if (typeof(obj) == "object" && !(obj.join == undefined)) {
count= 0;
countTotal = 0;
string.push("[\r\n")
for (prop in obj) {
countTotal++;
}
for(prop in obj) {
if(count==countTotal - 1) {
string.push(vMethod.convertToText(obj[prop]),'];\r\n');
} else {
string.push(vMethod.convertToText(obj[prop]), ",");
}
count++;
}
} else if (typeof(obj) == "function") {
string.push(obj.toString())
} else {
string.push(JSON.stringify(obj))
}
output = string.join("").toString();
//output = output.slice(1, -1);
return output;
}
Then we clean the array (neccessary for me to remove excess characters)
vMethod.cleanConfigText = function() {
var outputText = vMethod.convertToText(config);
outputText = outputText.slice(1, -1);
outputText = 'var config = {};\r\n'+outputText;
outputText = outputText.replace('user:','config.user =');
outputText = outputText.replace(',price:','config.price =');
outputText = outputText.slice(0, -2);
outputText = outputText.replace(/"/g, "'")
return outputText;
}
Finally a function to export the object into my config.js file:
vMethod.writeToConfig = function() {
vObject.fileSystem = new ActiveXObject('Scripting.FileSystemObject');
vObject.fileSystemFile = vObject.fileSystem.CreateTextFile('source\\js\\config.js',true);
vObject.fileSystemFile.Write(vMethod.cleanConfigText());
vObject.fileSystemFile.Close();
delete vObject.fileSystemFile;
delete vObject.fileSystem;
}
So when I want to export a change in the config, I just call:
vMethod.writeToConfig();
The only difference in the file format is that the commas appear at the start of a trailing line rather than the end of a preceding line but I can live with that!
Edit Turns out I'm anally retentive and the commas were bugging me
Added these to the clean up function and now the config is identical to before but without the indent
outputText = outputText.replace(/[\n\r]/g, '_');
outputText = outputText.replace(/__,/g, ',\r\n');
outputText = outputText.replace(/__/g, '\r\n');
Thank you to those that looked at the question and tried to help, very much appreciated.
Edit
DO NOT READ THE SOLUTION ABOVE, IT IS IN THE WRONG PLACE AND THERFORE IS NOT A VALID ANSWER. YOU'VE BEEN WARNED.
You can use a very popular npm package: https://www.npmjs.com/package/jsonfile . There are many but I've choosen this one.
Usually config stuff should be in json or .env files.
Now, all you have to do is use jsonfile's API to read/write JSON and parse (the package does the serialization/deserialization) it at the beginning when the application starts.
Example:
var jsonfile = require('jsonfile');
var util = require('util');
var config = null;
var file = './config.json';
// Reading
jsonfile.readFile(file, function(err, obj) {
config = obj;
});
// Writing
// Edit your config blah blah
config.user = [
{id:'JSMITH', priceModify:'true'},
{id:'JBLOGGS', priceModify:'false'},
];
config.price = [
{id:"price01", name:"priceName01", primary:"57.25", secondary:"34.54"},
{id:"price02", name:"priceName02", primary:"98.26", secondary:"139.45"},
{id:"price03", name:"priceName03", primary:"13.87", secondary:"29.13"}
];
jsonfile.writeFile(file, config, function (err) {
if(err) return err;
console.log('Config saved to file!');
});

Async Recursion with JavaScript and Node.js

This is probably a noob JavaScript question, but I'm looking to know if my solution to a problem I am having is 'correct'
I have created the following sample application that recreates my error:
Firstly in index.js
var processor = require('./fileProcessor/processor.js');
var container = {
source: "source.txt",
destination: "destination.txt"
};
new processor().process(container);
I create my container object which has the name of the source file and the name of the destination file. This is passed into the process function of the processor:
var fileProcessor = require('./fileProcessor.js');
module.exports = function Processor() {
this.process = function(container) {
var file = new fileProcessor();
if(container.finished === undefined) {
if(container.body === undefined) {
file.read(container, this.process);
} else {
file.write(container, this.process);
}
}
};
};
As you can see this calls the read and write functions passing in the container and the process function as the callback, the fileProcessor looks like this:
var fs = require('fs');
module.exports = function() {
this.read = function(container, callback) {
fs.readFile(container.source, function (err, data) {
if(err) throw err;
container.body = data;
callback(container);
});
};
this.write = function(container, callback) {
fs.writeFile(container.destination, container.body, function(err) {
if(err) {
return console.log(err);
}
container.finished = true;
callback(container);
});
};
};
In simple terms the processor calls file.read, which reads the file and calls back into the process function, which then calls the write function. However at the end of the write function an error is thrown:
callback(container);
^
TypeError: object is not a function
Obviously when passing in this.process to file.write(container, this.process); the this isn't the this I intend it to be!
If I update my processor by adding a processFunction variable:
var fileProcessor = require('./fileProcessor.js');
module.exports = function Processor() {
var processFunction = function(container) {
var file = new fileProcessor();
if(container.finished === undefined) {
if(container.body === undefined) {
file.read(container, processFunction);
} else {
file.write(container, processFunction);
}
}
};
this.process = function(container) {
processFunction(container);
};
};
Everything works fine. Is this a good way to do this or is there a better solution?
I think this is a fine way to do it. There is one possible modification that you might make. Since you are creating a new name in your scope just for the purpose of recursing, you could just name your function and refer to it by its name inside of the function.
module.exports = function Processor() {
this.process = function processFunction(container) {
var file = new fileProcessor();
if(container.finished === undefined) {
if(container.body === undefined) {
file.read(container, processFunction);
} else {
file.write(container, processFunction);
}
}
};
};
Then you can avoid creating a name (processFunction) that will be visible outside the function.
Take a look here for reference:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/function#Named_function_expression

Categories