Store selected language in windows localstorage - javascript

I use i18next to localize HTML and I am trying to cache the selected language so it will not fallback on page refresh but can't get it to work.
here is my code.
<script src="https://unpkg.com/i18next/dist/umd/i18next.min.js"></script>
<script>
function updateContent() {
const elements = document.getElementsByClassName("i18nelement");
for (let i = 0; i < elements.length; i++) {
const element = elements[i];
const k = element.getAttribute("data-i18n");
element.innerHTML = i18next.t(k);
}
}
async function i18Loader() {
const langs = ["en", "ru"];
const jsons = await Promise.all(
langs.map((l) => fetch("src/i18/" + l + ".json").then((r) => r.json()))
);
const res = langs.reduce((acc, l, idx) => {
acc[l] = { translation: jsons[idx] };
return acc;
}, {});
await i18next.init({
lng: 'en',
debug: true,
resources: res,
fallbackLng: "en-US"
});
updateContent();
i18next.on("languageChanged", () => {
updateContent();
});
const langSelector = document.getElementById("langSelector");
langSelector.removeAttribute("disabled");
langSelector.addEventListener("change", (e) => {
i18next.changeLanguage(e.target.value);
});
}
i18Loader();
</script>
How can I store selected language in localstorage?

I found the solution. the correct code here
async function i18Loader() {
const langs = ["en", "ru"];
const jsons = await Promise.all(
langs.map((l) => fetch("src/i18/" + l + ".json").then((r) => r.json()))
);
const res = langs.reduce((acc, l, idx) => {
acc[l] = { translation: jsons[idx] };
return acc;
}, {});
await i18next
.init({
lng: localStorage.getItem("lan") || 'en',
debug: true,
resources: res,
fallbackLng: "en-US",
backend: {
backendOptions: [{
// can be either window.localStorage or window.sessionStorage. Default: window.localStorage
store: typeof window !== 'undefined' ? window.localStorage : null
}, {
loadPath: '/scr/i18/{{lng}}.json' // xhr load path for my own fallback
}]
}
});
function updateContent() {
const elements = document.getElementsByClassName("i18nelement");
for (let i = 0; i < elements.length; i++) {
const element = elements[i];
const k = element.getAttribute("data-i18n");
element.innerHTML = i18next.t(k);
}
}
updateContent();
i18next.on("languageChanged", () => {
updateContent();
});
const langSelector = document.getElementById("langSelector");
langSelector.removeAttribute("disabled");
langSelector.addEventListener("change", (e) => {
i18next.changeLanguage(e.target.value);
localStorage.setItem("lan", e.target.value);
});
}
i18Loader();

Related

Delete image read after running script

Good evening! I have a problem.
I am creating a collection of NFTs using this js file.
What I am trying to modify is that once it reads the images in the layers folder, creates the final image and writes the metadata, these read and used images are deleted in the source folder (${basePath}/layers).
const basePath = process.cwd();
const { NETWORK } = require(`${basePath}/constants/network.js`);
const fs = require("fs");
const sha1 = require(`${basePath}/node_modules/sha1`);
const { createCanvas, loadImage } = require(`${basePath}/node_modules/canvas`);
const buildDir = `${basePath}/build`;
const layersDir = `${basePath}/layers`;
const {
format,
baseUri,
description,
background,
uniqueDnaTorrance,
layerConfigurations,
rarityDelimiter,
shuffleLayerConfigurations,
debugLogs,
extraMetadata,
text,
namePrefix,
network,
solanaMetadata,
gif,
} = require(`${basePath}/src/config.js`);
const canvas = createCanvas(format.width, format.height);
const ctx = canvas.getContext("2d");
ctx.imageSmoothingEnabled = format.smoothing;
var metadataList = [];
var attributesList = [];
var dnaList = new Set();
const DNA_DELIMITER = "-";
const HashlipsGiffer = require(`${basePath}/modules/HashlipsGiffer.js`);
let hashlipsGiffer = null;
const buildSetup = () => {
if (fs.existsSync(buildDir)) {
fs.rmdirSync(buildDir, { recursive: true });
}
fs.mkdirSync(buildDir);
fs.mkdirSync(`${buildDir}/json`);
fs.mkdirSync(`${buildDir}/images`);
if (gif.export) {
fs.mkdirSync(`${buildDir}/gifs`);
}
};
const getRarityWeight = (_str) => {
let nameWithoutExtension = _str.slice(0, -4);
var nameWithoutWeight = Number(
nameWithoutExtension.split(rarityDelimiter).pop()
);
if (isNaN(nameWithoutWeight)) {
nameWithoutWeight = 1;
}
return nameWithoutWeight;
};
const cleanDna = (_str) => {
const withoutOptions = removeQueryStrings(_str);
var dna = Number(withoutOptions.split(":").shift());
return dna;
};
const cleanName = (_str) => {
let nameWithoutExtension = _str.slice(0, -4);
var nameWithoutWeight = nameWithoutExtension.split(rarityDelimiter).shift();
return nameWithoutWeight;
};
const getElements = (path) => {
return fs
.readdirSync(path)
.filter((item) => !/(^|\/)\.[^\/\.]/g.test(item))
.map((i, index) => {
return {
id: index,
name: cleanName(i),
filename: i,
path: `${path}${i}`,
weight: getRarityWeight(i),
};
});
};
const layersSetup = (layersOrder) => {
const layers = layersOrder.map((layerObj, index) => ({
id: index,
elements: getElements(`${layersDir}/${layerObj.name}/`),
name:
layerObj.options?.["displayName"] != undefined
? layerObj.options?.["displayName"]
: layerObj.name,
blend:
layerObj.options?.["blend"] != undefined
? layerObj.options?.["blend"]
: "source-over",
opacity:
layerObj.options?.["opacity"] != undefined
? layerObj.options?.["opacity"]
: 1,
bypassDNA:
layerObj.options?.["bypassDNA"] !== undefined
? layerObj.options?.["bypassDNA"]
: false,
}));
return layers;
};
const saveImage = (_editionCount) => {
fs.writeFileSync(
`${buildDir}/images/${_editionCount}.png`,
canvas.toBuffer("image/png")
);
};
const genColor = () => {
let hue = Math.floor(Math.random() * 360);
let pastel = `hsl(${hue}, 100%, ${background.brightness})`;
return pastel;
};
const drawBackground = () => {
ctx.fillStyle = background.static ? background.default : genColor();
ctx.fillRect(0, 0, format.width, format.height);
};
const addMetadata = (_dna, _edition) => {
let dateTime = Date.now();
let tempMetadata = {
name: `${namePrefix} #${_edition}`,
description: description,
image: `${baseUri}/${_edition}.png`,
dna: sha1(_dna),
edition: _edition,
date: dateTime,
...extraMetadata,
attributes: attributesList,
compiler: "CryptoGioconda Engine",
};
if (network == NETWORK.sol) {
tempMetadata = {
//Added metadata for solana
name: tempMetadata.name,
symbol: solanaMetadata.symbol,
description: tempMetadata.description,
//Added metadata for solana
seller_fee_basis_points: solanaMetadata.seller_fee_basis_points,
image: `image.png`,
//Added metadata for solana
external_url: solanaMetadata.external_url,
edition: _edition,
...extraMetadata,
attributes: tempMetadata.attributes,
properties: {
files: [
{
uri: "image.png",
type: "image/png",
},
],
category: "image",
creators: solanaMetadata.creators,
},
};
}
metadataList.push(tempMetadata);
attributesList = [];
};
const addAttributes = (_element) => {
let selectedElement = _element.layer.selectedElement;
attributesList.push({
trait_type: _element.layer.name,
value: selectedElement.name,
});
};
const loadLayerImg = async (_layer) => {
return new Promise(async (resolve) => {
const image = await loadImage(`${_layer.selectedElement.path}`);
resolve({ layer: _layer, loadedImage: image });
});
};
const addText = (_sig, x, y, size) => {
ctx.fillStyle = text.color;
ctx.font = `${text.weight} ${size}pt ${text.family}`;
ctx.textBaseline = text.baseline;
ctx.textAlign = text.align;
ctx.fillText(_sig, x, y);
};
const drawElement = (_renderObject, _index, _layersLen) => {
ctx.globalAlpha = _renderObject.layer.opacity;
ctx.globalCompositeOperation = _renderObject.layer.blend;
text.only
? addText(
`${_renderObject.layer.name}${text.spacer}${_renderObject.layer.selectedElement.name}`,
text.xGap,
text.yGap * (_index + 1),
text.size
)
: ctx.drawImage(
_renderObject.loadedImage,
0,
0,
format.width,
format.height
);
addAttributes(_renderObject);
};
const constructLayerToDna = (_dna = "", _layers = []) => {
let mappedDnaToLayers = _layers.map((layer, index) => {
let selectedElement = layer.elements.find(
(e) => e.id == cleanDna(_dna.split(DNA_DELIMITER)[index])
);
return {
name: layer.name,
blend: layer.blend,
opacity: layer.opacity,
selectedElement: selectedElement,
};
});
return mappedDnaToLayers;
};
/**
* In some cases a DNA string may contain optional query parameters for options
* such as bypassing the DNA isUnique check, this function filters out those
* items without modifying the stored DNA.
*
* #param {String} _dna New DNA string
* #returns new DNA string with any items that should be filtered, removed.
*/
const filterDNAOptions = (_dna) => {
const dnaItems = _dna.split(DNA_DELIMITER);
const filteredDNA = dnaItems.filter((element) => {
const query = /(\?.*$)/;
const querystring = query.exec(element);
if (!querystring) {
return true;
}
const options = querystring[1].split("&").reduce((r, setting) => {
const keyPairs = setting.split("=");
return { ...r, [keyPairs[0]]: keyPairs[1] };
}, []);
return options.bypassDNA;
});
return filteredDNA.join(DNA_DELIMITER);
};
/**
* Cleaning function for DNA strings. When DNA strings include an option, it
* is added to the filename with a ?setting=value query string. It needs to be
* removed to properly access the file name before Drawing.
*
* #param {String} _dna The entire newDNA string
* #returns Cleaned DNA string without querystring parameters.
*/
const removeQueryStrings = (_dna) => {
const query = /(\?.*$)/;
return _dna.replace(query, "");
};
const isDnaUnique = (_DnaList = new Set(), _dna = "") => {
const _filteredDNA = filterDNAOptions(_dna);
return !_DnaList.has(_filteredDNA);
};
const createDna = (_layers) => {
let randNum = [];
_layers.forEach((layer) => {
var totalWeight = 0;
layer.elements.forEach((element) => {
totalWeight += element.weight;
});
// number between 0 - totalWeight
let random = Math.floor(Math.random() * totalWeight);
for (var i = 0; i < layer.elements.length; i++) {
// subtract the current weight from the random weight until we reach a sub zero value.
random -= layer.elements[i].weight;
if (random < 0) {
return randNum.push(
`${layer.elements[i].id}:${layer.elements[i].filename}${
layer.bypassDNA ? "?bypassDNA=true" : ""
}`
);
}
}
});
return randNum.join(DNA_DELIMITER);
};
const writeMetaData = (_data) => {
fs.writeFileSync(`${buildDir}/json/_metadata.json`, _data);
};
const saveMetaDataSingleFile = (_editionCount) => {
let metadata = metadataList.find((meta) => meta.edition == _editionCount);
debugLogs
? console.log(
`Writing metadata for ${_editionCount}: ${JSON.stringify(metadata)}`
)
: null;
fs.writeFileSync(
`${buildDir}/json/${_editionCount}.json`,
JSON.stringify(metadata, null, 2)
);
};
function shuffle(array) {
let currentIndex = array.length,
randomIndex;
while (currentIndex != 0) {
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex--;
[array[currentIndex], array[randomIndex]] = [
array[randomIndex],
array[currentIndex],
];
}
return array;
}
const startCreating = async () => {
let layerConfigIndex = 0;
let editionCount = 1;
let failedCount = 0;
let abstractedIndexes = [];
for (
let i = network == NETWORK.sol ? 0 : 1;
i <= layerConfigurations[layerConfigurations.length - 1].growEditionSizeTo;
i++
) {
abstractedIndexes.push(i);
}
if (shuffleLayerConfigurations) {
abstractedIndexes = shuffle(abstractedIndexes);
}
debugLogs
? console.log("Editions left to create: ", abstractedIndexes)
: null;
while (layerConfigIndex < layerConfigurations.length) {
const layers = layersSetup(
layerConfigurations[layerConfigIndex].layersOrder
);
while (
editionCount <= layerConfigurations[layerConfigIndex].growEditionSizeTo
) {
let newDna = createDna(layers);
if (isDnaUnique(dnaList, newDna)) {
let results = constructLayerToDna(newDna, layers);
let loadedElements = [];
results.forEach((layer) => {
loadedElements.push(loadLayerImg(layer));
});
await Promise.all(loadedElements).then((renderObjectArray) => {
debugLogs ? console.log("Clearing canvas") : null;
ctx.clearRect(0, 0, format.width, format.height);
if (gif.export) {
hashlipsGiffer = new HashlipsGiffer(
canvas,
ctx,
`${buildDir}/gifs/${abstractedIndexes[0]}.gif`,
gif.repeat,
gif.quality,
gif.delay
);
hashlipsGiffer.start();
}
if (background.generate) {
drawBackground();
}
renderObjectArray.forEach((renderObject, index) => {
drawElement(
renderObject,
index,
layerConfigurations[layerConfigIndex].layersOrder.length
);
if (gif.export) {
hashlipsGiffer.add();
}
});
if (gif.export) {
hashlipsGiffer.stop();
}
debugLogs
? console.log("Editions left to create: ", abstractedIndexes)
: null;
saveImage(abstractedIndexes[0]);
addMetadata(newDna, abstractedIndexes[0]);
saveMetaDataSingleFile(abstractedIndexes[0]);
console.log(
`Created edition: ${abstractedIndexes[0]}, with DNA: ${sha1(
newDna
)}`
);
});
dnaList.add(filterDNAOptions(newDna));
editionCount++;
abstractedIndexes.shift();
} else {
console.log("DNA exists!");
failedCount++;
if (failedCount >= uniqueDnaTorrance) {
console.log(
`You need more layers or elements to grow your edition to ${layerConfigurations[layerConfigIndex].growEditionSizeTo} artworks!`
);
process.exit();
}
}
}
layerConfigIndex++;
}
writeMetaData(JSON.stringify(metadataList, null, 2));
};
module.exports = { startCreating, buildSetup, getElements };
I tried to use this code by inserting it inside the main document, but it doesn't work.
try {
fs.unlinkSync(path)
//file removed
} catch(err) {
console.error(err)
}
Can anyone tell me how can I solve?

javascript recursive function memory leak

I am not good at English. Successfully make recursive call function. However, there is a memory leak for some reason. The question is that there is no return. The purpose of this feature is to view and explore objects, arrays, and the rest of their properties.
How do I change the code if I have a problem with my return?
Thank you in advance.
I was able to know the cause of the memory leak through Google dev tools profiles.
function recursionProperty(prop, obj, fn) {
if (Array.isArray(obj)) {
obj.forEach(item => recursionProperty('files', item, fn));
} else if (obj instanceof Object) {
Object.keys(obj).forEach(prop => {
const value = obj[prop];
recursionProperty(prop, value, fn);
});
} else {
fn(prop, obj);
}
}
recursionProperty(null, {foo:'bar', baz: ['x','y']}, (prop, obj) => console.log(prop, obj));
my original code
import _ from 'lodash';
import fs from 'fs';
import path from 'path';
import errors from '#feathersjs/errors';
import connections from '../../../connections';
import config from '../../../config';
/**
* #param req
* #param serviceItem
* #param query
* #returns {Promise<any>}
*/
const getServicePromise = async (req, serviceItem, query) => {
let serviceName = serviceItem;
if (typeof serviceItem !== 'string') {
serviceName = `datasets/${serviceItem.name}`;
}
return new Promise(async (resolve, reject) => {
let result;
let objResult;
try {
result = await req.app.service(serviceName).find(query);
} catch (e) {
result = null;
console.log(e);
}
// console.log(result);
if (result) {
if (typeof serviceItem !== 'string') {
objResult = { [serviceItem.name]: result.data };
} else {
objResult = { [serviceName]: result.data };
}
resolve(objResult);
} if (result === null) {
objResult = { [serviceName]: [] };
resolve(objResult);
} else {
reject({
error: 'Not found data.'
});
}
});
};
/**
* 파일 경로 프로퍼티를 찾는 재귀함수
* 객체, 배열, 원시타입 등 여러 타입이 섞여있어도 사용 가능
* #param prop
* #param obj
* #param fn
*/
function recursionProperty(prop, obj, fn) {
if (Array.isArray(obj)) {
obj.forEach(item => recursionProperty('files', item, fn));
} else if (obj instanceof Object) {
Object.keys(obj).forEach(prop => {
const value = obj[prop];
recursionProperty(prop, value, fn);
});
} else {
fn(prop, obj);
}
}
/**
* #param req
* #returns {Promise<{any}>}
*/
const getService = async req => {
const result = {};
const serverPath = [];
const { sheet, dataset, icon } = req.data;
const iconState = Object.prototype.hasOwnProperty.call(req.data, 'icon');
const sheetState = Object.prototype.hasOwnProperty.call(req.data, 'sheet');
const datasetState = Object.prototype.hasOwnProperty.call(req.data, 'dataset');
try {
// sheets
if (sheetState) {
const itemList = ['sheets'];
if (sheet.length === 0) {
const query = {
query: {
},
};
await Promise.all(itemList.map(serviceItem => getServicePromise(req, serviceItem, query))).then(data => {
data.forEach(item => {
Object.assign(result, item);
});
});
} else if (sheet.length > 0) {
const query = {
query: {
_id: {
$in: sheet,
},
},
};
await Promise.all(itemList.map(serviceItem => getServicePromise(req, serviceItem, query))).then(data => {
data.forEach(item => {
Object.assign(result, item);
});
});
} else {
result.sheets = [];
}
} else {
result.sheets = [];
}
// 파일 경로 구하기
if (sheet) {
const { sheets } = result;
// const filePath = [];
recursionProperty('files', sheets, (prop, value) => {
// 여기서 원하는 필드인지 검색후 처리함
if (prop === 'fullPath' && fs.existsSync(path.join(__dirname, '../../../../files', value))) {
// filePath.push(path.join(__dirname, '../../../../files', value));
serverPath.push(value);
}
});
// const deduplication = Array.from(new Set(serverPath));
// const deduplicationPath = await deduplicationFilePath(deduplication);
//
// Object.assign(result, { filePath: deduplicationPath });
} else {
// result.filePath = [];
}
// files
if (sheet) {
const deduplicationFiles = Array.from(new Set(serverPath));
if (deduplicationFiles.length > 0) {
const query = {
query: {
$sort: {
createdAt: -1,
},
fullPath: {
$in: deduplicationFiles,
},
}
};
const files = await req.app.service('files').find(query);
Object.assign(result, { files: files.data });
} else {
result.files = [];
}
} else {
result.files = [];
}
// dataset
if (datasetState) {
const query = {
query: {
// $limit: 100000
}
};
if (dataset.length === 0) {
const meta = await req.app.service('datasets/_meta_').find();
Object.assign(result, { _meta_: meta });
const db = await connections.getConnection(connections.DATASETS_DB);
const collectionNames = _.filter(await db.client.db(config.database_datasets.dbname).listCollections().toArray(), o => o.name !== '_meta_');
// collectionNames.forEach(str => {
// const detectA = iconvDetect.detect(Buffer.from(str.name));
// console.log('collection type', str.name, detectA);
// });
await Promise.all(meta.map(serviceItem => {
// const detectA = iconvDetect.detect(Buffer.from(serviceItem.key));
// console.log('meta type', serviceItem.key, detectA);
return getServicePromise(req, `datasets/${serviceItem.key}`, query);
})).then(data => {
Object.assign(result, { datasets: data });
});
} else if (dataset.length > 0) {
const metaQuery = {
query: {
$sort: {
createdAt: -1,
},
key: {
$in: dataset
}
}
};
const meta = await req.app.service('datasets/_meta_').find(metaQuery);
// console.log(meta);
Object.assign(result, { _meta_: meta });
await Promise.all(dataset.map(serviceItem => getServicePromise(req, `datasets/${serviceItem}`, query))).then(data => {
const d = Array.from(new Set(data));
const s = d.filter(item => item !== null);
if (d.length > 0) {
Object.assign(result, { datasets: s });
} else {
result.datasets = [];
result._meta_ = [];
}
});
} else {
result.datasets = [];
result._meta_ = [];
}
} else {
result.datasets = [];
result._meta_ = [];
}
if (iconState) {
const itemList = ['iconCategories', 'iconItems'];
const query = {};
if (icon.length === 0) {
await Promise.all(itemList.map(serviceItem => getServicePromise(req, serviceItem, query))).then(data => {
data.forEach(item => {
Object.assign(result, item);
});
});
}
} else {
result.iconCategories = [];
result.iconItems = [];
}
} catch (e) {
throw new errors.BadRequest('The data is invalid.', e);
}
return result;
};
export default getService;
There is most likely no memory leak in your code. Yes, recursive functions can be more memory aggressive than normal functions, because the call stack can grow very quickly, but remember that all functions will implicitly return even if there is no return statement. (Imagine that there is always a return undefined; line at the end of all your functions)
When doing a recursion, call stack will grow until you reach the bottom of a recursion branch (no function returns until then). Once the recursion branch end is reached, in your case this happens anytime you reach your else block, call stack will 'collapse' and all functions preceding will 'return'.
Memory will ultimately be freed by garbage collection as required.

Cannot read property 'vehicle_full' of undefined

When I am loading my app I am getting this js exception. Not sure why!
Uncaught (in promise) TypeError: Cannot read property 'vehicle_full'
of undefined at ActionCreators.js:?
export function loadPairDataActionCreator(address, refresh, topPartner, bottomPartner) {
return dispatch => fetch(address, returnHeaders())
.then(res => checkLogout(res))
.then(
data => {
// Change primary key from SHA to cName
if (data) {
let nd = [];
for (let p in data) {
const pair = data[p];
let tmp = {};
const pair_ids = Object.keys(pair);
for (let pi of pair_ids) {
tmp[pair[pi].vehicle_full] = pair[pi];
}
nd.push(tmp);
}
data = nd;
store.dispatch(currentPairs(data));
let vehicles = store.getState().vehicles,
keys = Object.keys(vehicles);
for (var i = 0; i < data.length; i++) {
const pair = data[i];
// pair ids
const [vehicle1, vehicle2] = Object.keys(pair);
// Add pairedWith field to each truck in the pair
store.dispatch(
addPairedVehicleActionCreator(
pair[vehicle2].vehicle_full, //<--- error here
vehicle1
))
store.dispatch(
addPairedVehicleActionCreator(
pair[vehicle1].vehicle_full,
vehicle2
))
}
store.dispatch(pairingComplete(true))
}
},
err => dispatch({
type: 'LOAD_PAIR_DATA_FAILURE',
err
})
)
.then(
() => {
if (refresh === "unpair") {
store.dispatch(changePageModeActionCreator({
mode: "READY_TO_UNPAIR",
rowIdentifier: store.getState().pageMode.rowIdentifier
}));
} else if (refresh === "pair") {
store.dispatch(changePageModeActionCreator({
mode: "READY_TO_PAIR",
rowIdentifier: topPartner,
rowIdentifierBottom: bottomPartner
}));
}
}
)
.then(
() => {
store.dispatch(removePairedVehicleActionCreator(topPartner)),
store.dispatch(removePairedVehicleActionCreator(bottomPartner))
}
)
}

FormHandler in the Bucket

So, i'm trying to set this string to get uri of the image and the article, but i'm using positions and setting new key to create a new entry... How can i set this form with uri of the image? I'm not set yet the getDownload() method.
I already had the upload and it's working, now i need to storage in the bucket.
submitFormHandler = () => {
console.log("FormHandler: " + this.state.message)
let isFormValid = true;
let dataToSubmit = {};
const formCopy = this.state.form;
var user = firebase.auth().currentUser;
for (let key in formCopy) {
isFormValid = isFormValid && formCopy[key].valid;
dataToSubmit[key] = this.state.form[key].value;
}
if (isFormValid, user) {
this.setState({
loading: true
});
getTokens((value) => {
const imageRef = firebase.storage().ref('images/').child(imageSource.uri)
const dateNow = new Date();
const expiration = dateNow.getTime();
const form = {
...dataToSubmit,
uid: value[3][1]
}
if (expiration > value[2][1]) {
this.props.autoSignIn(value[1][1]).then(() => {
setTokens(this.props.User.userData, () => {
this.props.addArticle(form, this.props.User.userData.token).then(() => {
this.setState({ modalSuccess: true })
})
})
})
} else {
this.props.addArticle(form, value[0][1]).then(() => {
this.setState({ modalSuccess: true })
})
}
})
} else {
let errorsArray = [];
for (let key in formCopy) {
if (!formCopy[key].valid) {
errorsArray.push(formCopy[key].errorMsg)
}
}
this.setState({
loading: false,
upload: true,
hasErrors: true,
modalVisible: true,
errorsArray
})
}
}
showErrorsArray = (errors) => (
errors ?
errors.map((item, i) => (
<Text key={i} style={styles.errorItem}> - {item}</Text>
))
: null
)
clearErrors = () => {
this.setState({
hasErrors: false,
modalVisible: false,
errorsArray: []
})
}
resetScreen = () => {
const formCopy = this.state.form;
for (let key in formCopy) {
formCopy[key].valid = false;
formCopy[key].value = "";
}
this.setState({
modalSuccess: false,
hasErrors: false,
errorsArray: [],
loading: false
})
this.props.resetArticle();
}

How to wrap FileSystemFileEntry.file into a function that returns Observable?

I'm trying to read files recursively from the dropped folder.
onDrop(event) {
event.preventDefault();
this.folderData = [];
this.filesData = [];
const items = event.dataTransfer.items;
for (let i = 0; i < items.length; i++) {
const item = items[i].webkitGetAsEntry();
if (item) {
this.scanFiles(item, this.folderData);
}
}
// send out data
// this.dropped.emit({ folderData: this.folderData, filesData: this.filesData });
}
private scanFiles(item, container: Array<any>) {
const nodeData = {
name: item.name,
isDirectory: item.isDirectory,
item: item,
children: []
};
container.push(nodeData);
if (item.isDirectory) {
const directoryReader = item.createReader();
directoryReader.readEntries(entries => {
if (entries) {
entries.forEach(entry => this.scanFiles(entry, nodeData.children));
}
});
} else if (item.isFile) {
// How to return an Observable array here?
item.file(file => {
file.fullPath = item.fullPath;
this.filesData.push(file);
});
}
}
According to MDN, FileSystemFileEntry.file returns the result in its callback.
https://developer.mozilla.org/en-US/docs/Web/API/FileSystemFileEntry/file
So before onDrop sends out the result, it has to wait, until all the FileSystemFileEntry.file callbacks complete.
I want to use Observable.forkJoin to achieve this. But before that, how to wrap FileSystemFileEntry.file into a function that returns Observable?
It turns out that a single forkJoin can't resolve the problem. I finally complete this by recursive forkJoin.
onDrop(event) {
event.preventDefault();
this.folderData = [];
this.filesData = [];
const items = event.dataTransfer.items;
const obs = [];
for (let i = 0; i < items.length; i++) {
const item = items[i].webkitGetAsEntry();
if (item) {
obs.push(new Observable<any>(observer => this.scanFiles(item, this.folderData, observer)));
}
}
forkJoin(obs).subscribe(e => {
this.dropped.emit({ folderData: this.folderData, filesData: this.filesData });
});
}
private scanFiles(item, container: Array<any>, observer: Subscriber<any>) {
const nodeData = {
name: item.name,
isDirectory: item.isDirectory,
item: item,
children: []
};
container.push(nodeData);
if (item.isDirectory) {
const directoryReader = item.createReader();
directoryReader.readEntries(entries => {
if (entries) {
if (entries.length === 0) {
observer.next();
observer.complete();
} else {
const subObs = entries.map(entry => new Observable<any>(innerObserver =>
this.scanFiles(entry, nodeData.children, innerObserver)));
forkJoin(subObs).subscribe(e => {
observer.next();
observer.complete();
});
}
} else {
observer.next();
observer.complete();
}
});
} else if (item.isFile) {
item.file(file => {
file.fullPath = item.fullPath;
this.filesData.push(file);
observer.next();
observer.complete();
});
}
}
For more detial check https://github.com/ft115637850/ngx-folder-uploader.

Categories