Read from 2 Different file to be 2 dimensional array Javascript - javascript

How to read two .txt files and turn these 2 files to a 2d array?
I already have a code like this :
var fs = require('fs')
file = './text1.txt'
fs.readFile(file,'utf-8', (e,d) => {
textByLine = d.split('\r\n');
console.log("test : " + textByLine[1])
})
source
I succeeded to store the file in a 1d array but now I have 2 files and I want to store them in a 2d array.
How to do this?
Thank you

You can have a variable at top with an empty array, after you read the files and push that result to that variable , like this:
const 2dArray = [];
const fillArray = (path)=> {
fs.readFile(path,'utf-8', (e,d) => {
2dArray.push(d.split('\r\n')) // the result is already an array.
});
});
after that you can call each file like this :
// read the files and push the result to the variable 2dArray
fillArray('./text1.txt');
fillArray('./text2.txt');
//you can read the 1st result of your 1st file array like this
const firstPartOfArray = 2dArray[0][0]; // text1 first result value
if you don't need to have the result files in order i strongly recommend to use async function.
also you can use thinks like fs-jetpack package to handle this, or glob

Related

Getting a 'random' value from a module in node.js

Let's say I have a file that looks like this:
var random_nr = Math.floor(Math.random()*array.length);
var x = array[random_nr];
// do some things
exports.random_array_member = x
Now, if I 'require' this in another file, I will always get the same result as long as I don't restart my server, presumably because of caching?
What is the best way to run this code and get a random value, while not including the code into my main file?
The code you have shown is only executed once. The result from that code is then stored as a variable, ready to be exported to whatever file needs it.
Instead, you need to "call" the code at the moment you need a random variable:
exports.random_array_member = function(){
var random_nr = Math.floor(Math.random()*array.length);
return array[random_nr];
}
Now, instead of accessing exports.random_array_member, you call exports.random_array_member() in your other files.
Lets play with getters
random.js
var array = [1, 2, 3, 4, 5];
module.exports = {
get random_array_member() {
return array[Math.floor(Math.random()*array.length)]
}
}
consumer.js
var r = require('./random')
console.log(r.random_array_member)
console.log(r.random_array_member)
console.log(r.random_array_member)

Read a bunch of JSON files, transform them, and save them

I'm trying to achieve this with Gulp.
Read every .json file in a given directory including subdirectories.
Transform them in some way, for example add a new root level, etc.
Save them into a new directory keeping original structure.
The point where I'm lost is how to pipe reading/writing JSON to src.
I have the following skeleton now.
gulp.task("migratefiles", function () {
return gulp.src("files/**/*.json")
.pipe(/* WHAT HERE? */)
.pipe(gulp.dest("processed"));
});
There's a number of way you can do this:
(1) Use the gulp-json-transform plugin:
var jsonTransform = require('gulp-json-transform');
gulp.task("migratefiles", function () {
return gulp.src("files/**/*.json")
.pipe(jsonTransform(function(json, file) {
var transformedJson = {
"newRootLevel": json
};
return transformedJson;
}))
.pipe(gulp.dest("processed"));
});
Pros:
Easy to use
Supports asynchronous processing (if you return a Promise)
Gives access to path of each file
Cons:
Only rudimentary output formatting
(2) Use the gulp-json-editor plugin:
var jeditor = require('gulp-json-editor');
gulp.task("migratefiles", function () {
return gulp.src("files/**/*.json")
.pipe(jeditor(function(json) {
var transformedJson = {
"newRootLevel": json
};
return transformedJson;
}))
.pipe(gulp.dest("processed"));
});
Pros:
Easy to use
Automatically recognizes the indentation your input files use (two spaces, four spaces, tabs etc.) and formats your output files accordingly
Supports various js-beautify options
Cons:
Doesn't seem to support asynchronous processing
Doesn't seem to have a way to access path of each file
(3) Do it manually (by directly accessing the vinyl file object using map-stream):
var map = require('map-stream');
gulp.task("migratefiles", function () {
return gulp.src("files/**/*.json")
.pipe(map(function(file, done) {
var json = JSON.parse(file.contents.toString());
var transformedJson = {
"newRootLevel": json
};
file.contents = new Buffer(JSON.stringify(transformedJson));
done(null, file);
}))
.pipe(gulp.dest("processed"));
});
Pros:
Full control/access over everything
Supports asynchronous processing (through a done callback)
Cons:
Harder to use

Adding keys to an object inside readFile

I'm learning my way through node and gulp and trying to do something that there may already be a solution for but I'm doing it as a learning exercise. The idea is that I want to scan all the files in a directory, read the files and look for the gulp.task line, read in the task name and the comment above it. This information will be used to generate an object then sent to a file in order to make something readable by gulp-list.
I'm stuck trying to add items into the object during the reading of the file. Nothing I have tried so far enables me to add a key and value to the object.
Any help you can give would be great. Also if you know of another (potentially easier way) I would be really interested to hear. I've had a look at gulp-task-list but this does not seem to support the multiple file approach I want to use.
var gulp = require('gulp')
fs = require('fs');
var path = './gulp/tasks';
var taskList = {};
// DESC: Rebuilds the list
gulp.task('build:list', function() {
fs.readdir(path, function(err, files) {
if (err) throw err;
files.forEach(function (file) {
fs.readFile(path+'/'+file, function(err, data) {
if (err) throw err;
lines = data.toString().split("\n");
lines.forEach(function (item, index, array) {
if (match = item.match(/^gulp\.task\(\'(.*)\'/)) {
console.log(match[1]);
taskList[match[1]] = true;
}
})
});
})
})
console.log(taskList);
});
So I found a solution, I figured out that it's probably not possible to alter a variable out of scope while in an async function. I'm not entirely sure why but I am sure I will learn that over time unless anyone wants to point me in the right direction.
My solution, in full including writing out the JSON file
var gulp = require('gulp')
fs = require('fs')
gutil = require('gulp-util');
var path = './gulp/tasks';
var taskList = {};
// Rebuilds the task list
gulp.task('build:list', function() {
files = fs.readdirSync(path);
files.forEach(function (file) {
var contents = fs.readFileSync(path+'/'+file);
var lines = contents.toString().split("\n");
lines.forEach(function (item, index, array) {
if (match = item.match(/^gulp\.task\(\'(.*?)\'/)) {
taskList[match[1]] = array[index - 1];
}
})
});
fs.writeFileSync('./tasks.json', JSON.stringify(taskList));
gutil.log("Task list built");
});
The second solution I thought of might be a lot easier, I thought maybe I can read each file and concat all of the files into a single file which then might be able to replace the gulpfile.js file which then might allow me to use another tool to get all the task names and descriptions. Or possibly concat the files together and then instead of process multiple file I can process one. If I come up with one of these solutions I will update this.

Is it possible to combine two FileList objects?

I have a FileList object which holds previously uploaded documents. I'm trying to use another function to add more files to this collection, by using another FileList object, so I need to "append" the secondary FileList object onto the primary one. How would it be possible to achieve this?
You have to first convert the FileList objects to Arrays, after which you can simply concat the multiple arrays.
const joined = Array.from(fileListA).concat(Array.from(fileListB));
const filesArray = [...filesList1, ...filesList2];
console.log(filesArray) // [File, File, File, File, File, ...]
The selected answer clarifies the path completely. A temp variable can also be used here, like the following:
var temp = files
files=Array.prototype.slice.call(temp).concat(Array.prototype.slice.call(event.target.files))
Also in React for setting the files in the state the following code can be used inside handleInputChange function:
var temp = this.state.files;
this.setState({
files: Array.prototype.slice.call(temp).concat(Array.prototype.slice.call(event.target.files))
})
var fileLists = [fileListA, fileListB];
var files = fileLists.reduce(function(a, c) {
return Array.prototype.concat.call(Object.values(a), Object.values(c))
})

Return complete file name if only know a prefix (substring)

Using JavaScript / jQuery, how can I get the complete file name of a file, when I only know its prefix?
For example:
The folder I'm browsing contains pic files:
001_PicA.jpg
002_PicB.jpg
004_PicC.jpg
007_PicD.jpg
008_PicE.jpg
Now let's say in my script I only have __002__ as information available. How could I get the complete file name (that is: 002_PicB.jpg)?
As other said, it is not possible to invoke directly. However if the list of files are available as an array then try the below approach.
Iterate each item in the array and then check for it occurance using idexOf().
var fileName = ["001_PicA.jpg", "002_PicB.jpg", "003_PicC.jpg"];
var contains = [];
$.each(fileName, function (i, j) {
if (j.indexOf("002") != -1) {
contains.push(j); //push the items to the array
}
});
console.log(contains); //returns 002_PicB.jpg
JSFiddle

Categories