How can I handle not existing route in leaflet routing-machine?
In case of error I do not want to return <RoutingMachine />
const Routing = ({ pointA, pointB }) => {
const dispatch = useDispatch()
const createLayer = () => {
const instance = L.Routing.control({
waypoints: [
L.latLng(pointA.position.lat, pointA.position.lng),
L.latLng(pointB.position.lat, pointB.position.lng)
],
lineOptions: {
styles: [{ color: "#965de9", weight: 5 }]
},
});
instance.on('routesfound', (e) => {
let routes = e.routes;
let summary = routes[0].summary;
let totalTime = moment.utc(1000 * summary.totalDistance).format("H[h] mm[m]")
console.log('total time: ', totalTime);
let totalDistance = Math.floor(summary.totalDistance / 1000)
dispatch(setTime(totalTime))
dispatch(setDistance(totalDistance))
});
return instance;
};
const RoutingMachine = createControlComponent(createLayer);
return <RoutingMachine />
};
Related
Good evening! I have a problem.
I am creating a collection of NFTs using this js file.
What I am trying to modify is that once it reads the images in the layers folder, creates the final image and writes the metadata, these read and used images are deleted in the source folder (${basePath}/layers).
const basePath = process.cwd();
const { NETWORK } = require(`${basePath}/constants/network.js`);
const fs = require("fs");
const sha1 = require(`${basePath}/node_modules/sha1`);
const { createCanvas, loadImage } = require(`${basePath}/node_modules/canvas`);
const buildDir = `${basePath}/build`;
const layersDir = `${basePath}/layers`;
const {
format,
baseUri,
description,
background,
uniqueDnaTorrance,
layerConfigurations,
rarityDelimiter,
shuffleLayerConfigurations,
debugLogs,
extraMetadata,
text,
namePrefix,
network,
solanaMetadata,
gif,
} = require(`${basePath}/src/config.js`);
const canvas = createCanvas(format.width, format.height);
const ctx = canvas.getContext("2d");
ctx.imageSmoothingEnabled = format.smoothing;
var metadataList = [];
var attributesList = [];
var dnaList = new Set();
const DNA_DELIMITER = "-";
const HashlipsGiffer = require(`${basePath}/modules/HashlipsGiffer.js`);
let hashlipsGiffer = null;
const buildSetup = () => {
if (fs.existsSync(buildDir)) {
fs.rmdirSync(buildDir, { recursive: true });
}
fs.mkdirSync(buildDir);
fs.mkdirSync(`${buildDir}/json`);
fs.mkdirSync(`${buildDir}/images`);
if (gif.export) {
fs.mkdirSync(`${buildDir}/gifs`);
}
};
const getRarityWeight = (_str) => {
let nameWithoutExtension = _str.slice(0, -4);
var nameWithoutWeight = Number(
nameWithoutExtension.split(rarityDelimiter).pop()
);
if (isNaN(nameWithoutWeight)) {
nameWithoutWeight = 1;
}
return nameWithoutWeight;
};
const cleanDna = (_str) => {
const withoutOptions = removeQueryStrings(_str);
var dna = Number(withoutOptions.split(":").shift());
return dna;
};
const cleanName = (_str) => {
let nameWithoutExtension = _str.slice(0, -4);
var nameWithoutWeight = nameWithoutExtension.split(rarityDelimiter).shift();
return nameWithoutWeight;
};
const getElements = (path) => {
return fs
.readdirSync(path)
.filter((item) => !/(^|\/)\.[^\/\.]/g.test(item))
.map((i, index) => {
return {
id: index,
name: cleanName(i),
filename: i,
path: `${path}${i}`,
weight: getRarityWeight(i),
};
});
};
const layersSetup = (layersOrder) => {
const layers = layersOrder.map((layerObj, index) => ({
id: index,
elements: getElements(`${layersDir}/${layerObj.name}/`),
name:
layerObj.options?.["displayName"] != undefined
? layerObj.options?.["displayName"]
: layerObj.name,
blend:
layerObj.options?.["blend"] != undefined
? layerObj.options?.["blend"]
: "source-over",
opacity:
layerObj.options?.["opacity"] != undefined
? layerObj.options?.["opacity"]
: 1,
bypassDNA:
layerObj.options?.["bypassDNA"] !== undefined
? layerObj.options?.["bypassDNA"]
: false,
}));
return layers;
};
const saveImage = (_editionCount) => {
fs.writeFileSync(
`${buildDir}/images/${_editionCount}.png`,
canvas.toBuffer("image/png")
);
};
const genColor = () => {
let hue = Math.floor(Math.random() * 360);
let pastel = `hsl(${hue}, 100%, ${background.brightness})`;
return pastel;
};
const drawBackground = () => {
ctx.fillStyle = background.static ? background.default : genColor();
ctx.fillRect(0, 0, format.width, format.height);
};
const addMetadata = (_dna, _edition) => {
let dateTime = Date.now();
let tempMetadata = {
name: `${namePrefix} #${_edition}`,
description: description,
image: `${baseUri}/${_edition}.png`,
dna: sha1(_dna),
edition: _edition,
date: dateTime,
...extraMetadata,
attributes: attributesList,
compiler: "CryptoGioconda Engine",
};
if (network == NETWORK.sol) {
tempMetadata = {
//Added metadata for solana
name: tempMetadata.name,
symbol: solanaMetadata.symbol,
description: tempMetadata.description,
//Added metadata for solana
seller_fee_basis_points: solanaMetadata.seller_fee_basis_points,
image: `image.png`,
//Added metadata for solana
external_url: solanaMetadata.external_url,
edition: _edition,
...extraMetadata,
attributes: tempMetadata.attributes,
properties: {
files: [
{
uri: "image.png",
type: "image/png",
},
],
category: "image",
creators: solanaMetadata.creators,
},
};
}
metadataList.push(tempMetadata);
attributesList = [];
};
const addAttributes = (_element) => {
let selectedElement = _element.layer.selectedElement;
attributesList.push({
trait_type: _element.layer.name,
value: selectedElement.name,
});
};
const loadLayerImg = async (_layer) => {
return new Promise(async (resolve) => {
const image = await loadImage(`${_layer.selectedElement.path}`);
resolve({ layer: _layer, loadedImage: image });
});
};
const addText = (_sig, x, y, size) => {
ctx.fillStyle = text.color;
ctx.font = `${text.weight} ${size}pt ${text.family}`;
ctx.textBaseline = text.baseline;
ctx.textAlign = text.align;
ctx.fillText(_sig, x, y);
};
const drawElement = (_renderObject, _index, _layersLen) => {
ctx.globalAlpha = _renderObject.layer.opacity;
ctx.globalCompositeOperation = _renderObject.layer.blend;
text.only
? addText(
`${_renderObject.layer.name}${text.spacer}${_renderObject.layer.selectedElement.name}`,
text.xGap,
text.yGap * (_index + 1),
text.size
)
: ctx.drawImage(
_renderObject.loadedImage,
0,
0,
format.width,
format.height
);
addAttributes(_renderObject);
};
const constructLayerToDna = (_dna = "", _layers = []) => {
let mappedDnaToLayers = _layers.map((layer, index) => {
let selectedElement = layer.elements.find(
(e) => e.id == cleanDna(_dna.split(DNA_DELIMITER)[index])
);
return {
name: layer.name,
blend: layer.blend,
opacity: layer.opacity,
selectedElement: selectedElement,
};
});
return mappedDnaToLayers;
};
/**
* In some cases a DNA string may contain optional query parameters for options
* such as bypassing the DNA isUnique check, this function filters out those
* items without modifying the stored DNA.
*
* #param {String} _dna New DNA string
* #returns new DNA string with any items that should be filtered, removed.
*/
const filterDNAOptions = (_dna) => {
const dnaItems = _dna.split(DNA_DELIMITER);
const filteredDNA = dnaItems.filter((element) => {
const query = /(\?.*$)/;
const querystring = query.exec(element);
if (!querystring) {
return true;
}
const options = querystring[1].split("&").reduce((r, setting) => {
const keyPairs = setting.split("=");
return { ...r, [keyPairs[0]]: keyPairs[1] };
}, []);
return options.bypassDNA;
});
return filteredDNA.join(DNA_DELIMITER);
};
/**
* Cleaning function for DNA strings. When DNA strings include an option, it
* is added to the filename with a ?setting=value query string. It needs to be
* removed to properly access the file name before Drawing.
*
* #param {String} _dna The entire newDNA string
* #returns Cleaned DNA string without querystring parameters.
*/
const removeQueryStrings = (_dna) => {
const query = /(\?.*$)/;
return _dna.replace(query, "");
};
const isDnaUnique = (_DnaList = new Set(), _dna = "") => {
const _filteredDNA = filterDNAOptions(_dna);
return !_DnaList.has(_filteredDNA);
};
const createDna = (_layers) => {
let randNum = [];
_layers.forEach((layer) => {
var totalWeight = 0;
layer.elements.forEach((element) => {
totalWeight += element.weight;
});
// number between 0 - totalWeight
let random = Math.floor(Math.random() * totalWeight);
for (var i = 0; i < layer.elements.length; i++) {
// subtract the current weight from the random weight until we reach a sub zero value.
random -= layer.elements[i].weight;
if (random < 0) {
return randNum.push(
`${layer.elements[i].id}:${layer.elements[i].filename}${
layer.bypassDNA ? "?bypassDNA=true" : ""
}`
);
}
}
});
return randNum.join(DNA_DELIMITER);
};
const writeMetaData = (_data) => {
fs.writeFileSync(`${buildDir}/json/_metadata.json`, _data);
};
const saveMetaDataSingleFile = (_editionCount) => {
let metadata = metadataList.find((meta) => meta.edition == _editionCount);
debugLogs
? console.log(
`Writing metadata for ${_editionCount}: ${JSON.stringify(metadata)}`
)
: null;
fs.writeFileSync(
`${buildDir}/json/${_editionCount}.json`,
JSON.stringify(metadata, null, 2)
);
};
function shuffle(array) {
let currentIndex = array.length,
randomIndex;
while (currentIndex != 0) {
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex--;
[array[currentIndex], array[randomIndex]] = [
array[randomIndex],
array[currentIndex],
];
}
return array;
}
const startCreating = async () => {
let layerConfigIndex = 0;
let editionCount = 1;
let failedCount = 0;
let abstractedIndexes = [];
for (
let i = network == NETWORK.sol ? 0 : 1;
i <= layerConfigurations[layerConfigurations.length - 1].growEditionSizeTo;
i++
) {
abstractedIndexes.push(i);
}
if (shuffleLayerConfigurations) {
abstractedIndexes = shuffle(abstractedIndexes);
}
debugLogs
? console.log("Editions left to create: ", abstractedIndexes)
: null;
while (layerConfigIndex < layerConfigurations.length) {
const layers = layersSetup(
layerConfigurations[layerConfigIndex].layersOrder
);
while (
editionCount <= layerConfigurations[layerConfigIndex].growEditionSizeTo
) {
let newDna = createDna(layers);
if (isDnaUnique(dnaList, newDna)) {
let results = constructLayerToDna(newDna, layers);
let loadedElements = [];
results.forEach((layer) => {
loadedElements.push(loadLayerImg(layer));
});
await Promise.all(loadedElements).then((renderObjectArray) => {
debugLogs ? console.log("Clearing canvas") : null;
ctx.clearRect(0, 0, format.width, format.height);
if (gif.export) {
hashlipsGiffer = new HashlipsGiffer(
canvas,
ctx,
`${buildDir}/gifs/${abstractedIndexes[0]}.gif`,
gif.repeat,
gif.quality,
gif.delay
);
hashlipsGiffer.start();
}
if (background.generate) {
drawBackground();
}
renderObjectArray.forEach((renderObject, index) => {
drawElement(
renderObject,
index,
layerConfigurations[layerConfigIndex].layersOrder.length
);
if (gif.export) {
hashlipsGiffer.add();
}
});
if (gif.export) {
hashlipsGiffer.stop();
}
debugLogs
? console.log("Editions left to create: ", abstractedIndexes)
: null;
saveImage(abstractedIndexes[0]);
addMetadata(newDna, abstractedIndexes[0]);
saveMetaDataSingleFile(abstractedIndexes[0]);
console.log(
`Created edition: ${abstractedIndexes[0]}, with DNA: ${sha1(
newDna
)}`
);
});
dnaList.add(filterDNAOptions(newDna));
editionCount++;
abstractedIndexes.shift();
} else {
console.log("DNA exists!");
failedCount++;
if (failedCount >= uniqueDnaTorrance) {
console.log(
`You need more layers or elements to grow your edition to ${layerConfigurations[layerConfigIndex].growEditionSizeTo} artworks!`
);
process.exit();
}
}
}
layerConfigIndex++;
}
writeMetaData(JSON.stringify(metadataList, null, 2));
};
module.exports = { startCreating, buildSetup, getElements };
I tried to use this code by inserting it inside the main document, but it doesn't work.
try {
fs.unlinkSync(path)
//file removed
} catch(err) {
console.error(err)
}
Can anyone tell me how can I solve?
this is the sample search widget in arcGIS, I want to override the search function, that the only thing that can be searched is the data in my query and it will appear just like in the picture
const [searchData, setSearchData] = useState(null);
const fetchSearchLocation = async (name) => {
const { data } = await client.query({
query: SEARCH_LOCATION,
variables: {
name: name
},
})
setSearchData(data);
return data;
};
useEffect(() => {
location();
}, []);
const location = () => {
if (mapDiv.current) {
esriConfig.apiKey = "sample api key";
const map = new Map({
basemap: 'arcgis-light-gray',
});
const view = new MapView({
center: [123.5504, 12.3574], // Longitude, latitude
container: mapDiv.current,
map: map,
zoom: 2, // Zoom level
});
const searchWidget = new Search({
view: view,
sources: [customSearchSource],
includeDefaultSources: false
});
view.ui.add(searchWidget, { position: "top-left", index: 0 });
view
.when((r) => {})
.then(() => {
mapDiv.current = view;
fetchData();
});
}
};
const searchDataLocation = new Promise((resolve, reject) => {
setTimeout(() => {
console.log("searchData: ", searchData)
resolve(searchData); //test_data = Your data.
}, 1000);
});
const customSearchSource = new SearchSource({
placeholder: 'Search',
getSuggestions: (params) => {
fetchSearchLocation(params.suggestTerm)
console.log("searchData2: ", searchData)
return searchDataLocation.then((data) => {
console.log("datas: ", data) // the result is null.
})
....
resource
https://developers.arcgis.com/javascript/latest/api-reference/esri-widgets-Search.html
How to put queries in ArcGIS esri search?
note: this is edited question
I use i18next to localize HTML and I am trying to cache the selected language so it will not fallback on page refresh but can't get it to work.
here is my code.
<script src="https://unpkg.com/i18next/dist/umd/i18next.min.js"></script>
<script>
function updateContent() {
const elements = document.getElementsByClassName("i18nelement");
for (let i = 0; i < elements.length; i++) {
const element = elements[i];
const k = element.getAttribute("data-i18n");
element.innerHTML = i18next.t(k);
}
}
async function i18Loader() {
const langs = ["en", "ru"];
const jsons = await Promise.all(
langs.map((l) => fetch("src/i18/" + l + ".json").then((r) => r.json()))
);
const res = langs.reduce((acc, l, idx) => {
acc[l] = { translation: jsons[idx] };
return acc;
}, {});
await i18next.init({
lng: 'en',
debug: true,
resources: res,
fallbackLng: "en-US"
});
updateContent();
i18next.on("languageChanged", () => {
updateContent();
});
const langSelector = document.getElementById("langSelector");
langSelector.removeAttribute("disabled");
langSelector.addEventListener("change", (e) => {
i18next.changeLanguage(e.target.value);
});
}
i18Loader();
</script>
How can I store selected language in localstorage?
I found the solution. the correct code here
async function i18Loader() {
const langs = ["en", "ru"];
const jsons = await Promise.all(
langs.map((l) => fetch("src/i18/" + l + ".json").then((r) => r.json()))
);
const res = langs.reduce((acc, l, idx) => {
acc[l] = { translation: jsons[idx] };
return acc;
}, {});
await i18next
.init({
lng: localStorage.getItem("lan") || 'en',
debug: true,
resources: res,
fallbackLng: "en-US",
backend: {
backendOptions: [{
// can be either window.localStorage or window.sessionStorage. Default: window.localStorage
store: typeof window !== 'undefined' ? window.localStorage : null
}, {
loadPath: '/scr/i18/{{lng}}.json' // xhr load path for my own fallback
}]
}
});
function updateContent() {
const elements = document.getElementsByClassName("i18nelement");
for (let i = 0; i < elements.length; i++) {
const element = elements[i];
const k = element.getAttribute("data-i18n");
element.innerHTML = i18next.t(k);
}
}
updateContent();
i18next.on("languageChanged", () => {
updateContent();
});
const langSelector = document.getElementById("langSelector");
langSelector.removeAttribute("disabled");
langSelector.addEventListener("change", (e) => {
i18next.changeLanguage(e.target.value);
localStorage.setItem("lan", e.target.value);
});
}
i18Loader();
I'm having two issues this moment
I am having trouble displaying the data from my firestore document after the user signs in and they do have a document with data
I have a button that lets the user upload a document and I will parse it to extract all important data. That data is then put into the document with Ids, which is basically the userId + what number doc it is belonging to the user. When they upload the data it seems that all data extracted uses the same ID and only the last one extracted gets displayed.
this is my code:
const loansRef = firebase.firestore().collection('goals');
const [ courseGoals, setCourseGoals ] = useState([]);
const [goalCounter, setGoalCounter] = useState(0)
//let the user upload doc, what gets called when they press button
const pickDocument = async () => {
try {
let input = await DocumentPicker.getDocumentAsync({
type: "text/plain",
});
setUserOut(await FileSystem.readAsStringAsync(input.uri));
} catch (error) {
console.log(error);
}
createLoans();
};
//extracts important info and returns array with all info for one loan
const fileParser = () => {
const parsedLoans = [];
var newUserOut = userOut;
if (newUserOut.length == 0) {
return;
}
//remove the grants
var grantPos = newUserOut.search("Grant Type:");
var pos = newUserOut.search("Loan Type:");
//hopefully just the loans now
newUserOut = newUserOut.slice(pos, grantPos);
while (newUserOut.length > 0) {
var lastPos = newUserOut.lastIndexOf("Loan Type:");
parsedLoans.push(newUserOut.slice(lastPos, newUserOut.length));
newUserOut = newUserOut.slice(0, lastPos);
}
//console.log('parsed loans: ' + parsedLoans)
return parsedLoans;
};
//where we actually create loans and get the important data for each loan
const createLoans = () => {
const newLoans = fileParser();
const title= 'Loan Amount:$'
const interest = 'Loan Interest Rate:'
for(let i =0; i < newLoans.length; i++){
let loan = newLoans[i]
let goalTitle=loan.substring(loan.indexOf(title)+title.length,loan.indexOf('Loan Disbursed Amount:'))
//console.log("goalTitle: " + goalTitle)
let interestRate = loan.substring(loan.indexOf(interest)+interest.length,loan.indexOf('Loan Repayment Plan Type'))
//console.log("Interest rate: "+ interestRate)
let years = 0
let paidOff = 0
addGoalHandler(goalTitle,interestRate,years,paidOff)
}
return
};
useEffect(() => {
getPW()
let isMounted = true;
if (isMounted) {
loansRef.doc(userId).onSnapshot(
(docSnapshot) => {
if(!docSnapshot.exists){console.log('doc doesnt exist, start from scratch')}
else{
console.log('loaded successfully '+docSnapshot.data())
setGoalCounter(docSnapshot.data().loans.length)
console.log(goalCounter)
}
},
(error) => {
console.log(error);
}
);
}
return () => {
isMounted = false;
};
}, []);
const addGoalHandler = (goalTitle, interestRate, years, paidOff) => {
//setCourseGoals([...courseGoals, enteredGoal])
setGoalCounter(goalCounter+1)
setCourseGoals((prevGoals) => [
...courseGoals,
{
id:userId.toString() + goalCounter.toString(),
value: goalTitle,
interest: interestRate,
years: years,
paidOff: paidOff
}
]);
addToFB(goalTitle, interestRate,years,paidOff)
//var oldIDS = docIDS
//oldIDS.push(userId.toString() + goalCounter.toString())
//setDocIDS(oldIDS)
setIsAddMode(false);
};
const cancelGoalAdditionHandler = () => {
setIsAddMode(false);
};
const addToFB = async (goalTitle, interestRate, years, paidOff) => {
//adding data to firebase, takes into account if doc exists already
const loadDoc = await loansRef.doc(userId).get()
.then((docSnapshot)=> {
if(docSnapshot.exists){
loansRef.doc(userId).onSnapshot((docu)=>{
const updateLoansArr = loansRef.doc(userId).update({
loans: firebase.firestore.FieldValue.arrayUnion({
id: userId+goalCounter.toString(),
value: goalTitle,
interest: interestRate,
years: years,
paidOff: paidOff
})
})
//setGoalCounter(goalCounter+1)
})
}
else{
const addDoc = loansRef.doc(userId).set({
loans: firebase.firestore.FieldValue.arrayUnion({
id: userId+goalCounter.toString(),
value: goalTitle,
interest: interestRate,
years: years,
paidOff: paidOff
})
})
//setGoalCounter(goalCounter+1)
}})
//setGoalCounter(goalCounter+1)
}
const removeGoalHandler = async (goalId) => {
/*
setCourseGoals((currentGoals) => {
loansRef.doc(goalId).delete().then(console.log('removed correctly'))
return currentGoals.filter((goal) => goal.id !== goalId);
});
setGoalCounter(goalCounter-1)
*/
//need to use the value and not the goalId
const removeGoal = await loansRef.doc(goalId).update({
goals: firebase.firestore.FieldValue.arrayRemove(goalId)
})
setCourseGoals((currentGoals)=> {
return currentGoals.filer((goal)=> goal.id !== goalId)
})
};
We are creating a meta-search for realEstate. I would like to predict the running time (time the listing will be online) based on certain metrics with tensorflow.js sequential model.
I have following data:
listing.buyingPrice,
listing.foreClosure ? 1 :0,
listing.leasehold ? 1 : 0,
listing.squareMeter,
listing.rooms,
listing.grossReturn,
listing.locationFactor.score
This is the code I'm using:
const createModel = () => {
// Create a sequential model
const model = tf.sequential();
// Add a single hidden layer
model.add(tf.layers.dense({inputShape: [shapeLength], units: 50, useBias: true}));
// Add an output layer
model.add(tf.layers.dense({units: 1, useBias: true}));
return model;
};
async function trainModel(model, inputs, labels) {
// Prepare the model for training.
const learningRate = 0.0001;
model.compile({
optimizer: tf.train.adam(learningRate),
loss: tf.losses.meanSquaredError,
metrics: ['mse'],
});
const batchSize = 32;
const epochs = 50;
return await model.fit(inputs, labels, {
batchSize,
epochs,
shuffle: true
});
}
const shapeLength=7;
const convertListing = (listing) => ([
listing.buyingPrice,
listing.foreClosure ? 1 :0,
listing.leasehold ? 1 : 0,
listing.squareMeter,
listing.rooms,
listing.grossReturn,
listing.locationFactor.score
]);
const convertToTensor =(listings) => {
// Wrapping these calculations in a tidy will dispose any
// intermediate tensors.
return tf.tidy(() => {
// Step 1. Shuffle the data
tf.util.shuffle(listings);
// Step 2. Convert data to Tensor
const inputs = listings.map(listing =>
convertListing(listing)
);
const labels = listings.map(listing => listing.runningTime);
const inputTensor = tf.tensor2d(inputs, [inputs.length, shapeLength]);
const labelTensor = tf.tensor2d(labels, [labels.length, 1]);
return {
inputs: inputTensor,
labels: labelTensor,
}
});
};
const modelCreated = createModel();
export const doTraining = async () => {
const listings = await findExactMatch({
rented:false,
active:false,
zip:'10243',
}, OBJECT_TYPES.APARTMENTBUY);
const filteredListings = listings.filter(listing =>
listing.runningTime>=0
&& listing.buyingPrice>=0
&& listing.runningTime <100
&& listing.rooms>0
&& listing.squareMeter >0
&& listing.grossReturn >0
&& listing.locationFactor.score>0
);
const tensorData = convertToTensor(filteredListings);
const {inputs, labels} = tensorData;
// Train the model
await trainModel(modelCreated, inputs, labels);
console.log('Done Training');
filteredListings.map(listing=>{
console.log(listing.runningTime)
});
for(let i=0;i<10;i++){
console.log('Real running time',filteredListings[i].runningTime)
console.log('Predicted runningTime',await testModel(filteredListings[i],tensorData));
}
};
export const testModel = async (listing) => {
const input = convertListing(listing);
const inputData = tf.tensor2d([input], [1, shapeLength]);
const result = modelCreated.predict(inputData);
return result.dataSync()[0];
}
I'm using roughly 3000 listings. Problem is, that the running time I get predicted is WAY off. What to do to improve that? Is my model even correct? :)