My main file is a "file watcher" that's watching a folder using "node-watch" module. For each file placed in the folder, it has to do couple of MSSQL calls and then process the file accordingly.
The file watcher script looks like that:
var watch = require('node-watch');
const timestamp = require('./SHR_modules/timestamp');
const sql = require('./SHR_modules/sql');
const configuration = require('./SHR_modules/config');
const logger = require('./SHR_modules/logger');
watch('./Incoming', { recursive: true }, async function (evt, name) {
if (evt == 'update') {
logger.addLog(configuration.logFile.watcher, 'File found: ' + name);
var startTime = await timestamp.dateTimeDelimited(); //capture time at proicessing start
filePath = await name.replace(name.split('\\')[name.split('\\').length-1], '');
fileName = await name.replace(filePath, '');
var myLoad = await sql.readLoadSpec(fileName, filePath).catch(err => {
logger.addLog(configuration.logFile.error, 'Error while reading Load Specs for file: ' + name);
logger.addLog(configuration.logFile.error, err);
});
if (typeof myLoad !== 'undefined' && myLoad){
console.log(evt);
logger.addLog(configuration.logFile.watcher, 'Finished processing: ' + name);
};
var finishTime = await timestamp.dateTimeDelimited(); //capture time at processing finish
}
else {
logger.addLog(configuration.logFile.watcher, 'File removed: ' + name);
}
console.log(startTime);
console.log(finishTime);
});
and the SQL function:
var readLoadSpec = (fileName, filePath) => {
return new Promise(async (resolve, reject) => {
specIDs = await findSpecs(fileName, filePath)
.catch(err => {
reject(err);
console.log(fileName + ' not found')
});
if (typeof specIDs !== 'undefined' && specIDs) {
console.log('FOUND!!!!');
var addLoadSpec = [];
for (let i = 0; i < specIDs.length; i++) {
console.log(specIDs);
var tempQuery1 = `select * from LoadSpec where LoadID = ${specIDs[i]}`;
var tempQuery2 = `select * from Mappings where LoadID = ${specIDs[i]} ORDER BY OutputColumn`;
console.log(specIDs); <========= code work sup to this line
const results = await queryDB(tempQuery1);
const resultsMapping = await queryDB(tempQuery2);
console.log(fileName);
console.log(results.recordset[0].OutputFileName);
console.log(specIDs);
const inputFrmt = {
name: results.recordset[0].InputFormatName,
format: results.recordset[0].InputFormat,
delimiter: results.recordset[0].InputDelimiter,
headerRows: results.recordset[0].InputHeaderRows
};
.
.
.
console.log(results.recordset[0].OutputColumnCount);
addLoadSpec.push(loadSpec);
};
resolve(addLoadSpec);
};
});
};
So the code runs to the row I've marked (before the await for query results) and the loops back to beginning.
So all is fine if I drop a single file into the folder, but when multiple files arrive at more or less the same time, the syncIDs var takes value of another file, before the code resumes after the query await part is completed. syncIDs is just an array so let's say for file 'a.csv' it's [1,2], for file 'b.csv' it's [3,4] and for file 'c.csv' it's undefined. So before it get's the answer from the DB, this loop:
specIDs = await findSpecs(fileName, filePath)
.catch(err => {
reject(err);
console.log(fileName + ' not found')
});
if (typeof specIDs !== 'undefined' && specIDs) {
console.log('FOUND!!!!');
var addLoadSpec = [];
for (let i = 0; i < specIDs.length; i++) {
Get's error for index of undefined
How do I make it process files one by one?
Related
This code works, I just need help with argument passing.
Goal:
To pass arguments to promises while iterating through a for loop. The arguments to pass are named folder, subfolder, pastsearch. The promises are called p1 and p2
What the Code does
This code works. Each promise takes a search term, looks for it in
its file and returns "yes" or "non" if file is there or not.
Promise.all prints a single line of yes/non to a file and
prints the total count of "yes" to another file.
CODE
const fs = require('fs');
const readline = require('readline')
const task_id = 1;
let folder = 1;
let subfolder = 1;
let pastsearch = 1;
const myarray = [6567, 50105, 67637, 293697];
const mylen = myarray.length;
const myfiles = ['file_1.txt', 'file_2.txt'];
fs.writeFile('/pathtomyfile/', '', function() {
console.log('done')
})
fs.writeFile('/pathtomyfile/', '', function() {
console.log('done')
})
const p1 = new Promise((resolve, reject) => {
let lineCount = 0;
let v = 0;
let yesnon = "non";
let readStream = readline.createInterface({
input: fs.createReadStream('/pathtofile/round_' + task_id + '/threehundred_' + folder + '/' + subfolder + '/' + myfiles[0], 'utf8')
});
readStream.on("line", (line) => {
lineCount++;
if (line == pastsearch) {
yesnon = "yes";
v++;
}
});
readStream.on('end', () => {
console.log('end');
readStream.destroy();
});
readStream.on("close", () =>
resolve({
yesnon
})
)
});
const p2 = new Promise((resolve, reject) => {
let readStream = readline.createInterface({
input: fs.createReadStream('/pathtofile/round_' + task_id + '/threehundred_' + folder + '/' + subfolder + '/' + myfiles[1], 'utf8')
});
let lineCount = 0;
let v = 0;
let yesnon = "non";
readStream.on("line", (line) => {
lineCount++;
if (line == pastsearch) {
yesnon = "yes";
v++;
}
});
readStream.on('end', () => {
console.log('end');
readStream.destroy();
});
readStream.on("close", () =>
resolve({
yesnon
}))
});
for (let h = 0; h < 3; h++) {
folder++
subfolder++
pastsearch = myarray[h];
Promise.all([p1, p2]).then((results) => {
const output = results.map(({
yesnon
}) => yesnon).join(' ');
fs.appendFileSync('/pathtofile/plain_round' + task_id + '.txt', output + "\n", 'utf8');
const output2 = results.map(({
yesnon
}) => yesnon);
let count = 0;
function countValues(array, countItem) {
array.forEach(itm => {
if (itm == countItem) count++;
});
return count;
}
const myresult34 = countValues(output2, "yes");
fs.appendFileSync('/pathtofile/round' + task_id + '.txt', myresult34 + "\n", 'utf8');
});
}
Note:
I am new to nodejs, mostly php experience, so I wrote this the best that I could from studying stackoverflow Q/A Posts.
Wrap your promises in a function that takes folder, subfolder, and pastsearch as arguments. For example with p1:
const p1 = (folder, subfolder, pastsearch) {
return new Promise((resolve, reject) => {
// `folder`, `subfolder` and `pastsearch` are available here
}
}
Then invoke p1 inside Promise.all:
Promise.all([p1(folder, subfolder, pastsearch), p2]).then((results) => {
In javascript this technique is called closure
I have a NodeJS script which is making thousands of REST API GET calls to SharePoint to retrieve all folders and files recursively. A problem I have encountered is once in a while I am receiving ECONNRESET errors and 404 Not Found for some folders with unsupported characters. This causes my code to stop and I lose my progress in retrieving all the files and folders. Is there something I can do to prevent these errors from stopping my code and just have it ignore the error and continue onto the next folder?
I thought catching the error and console logging it would suffice but this still ends the execution of the code. Thanks
//Imports and Declarations
const prompt = require("prompt-sync")({ sigint: true });
const { getCreds, pullFromSharePoint } = require('./helper');
const sprequest = require('sp-request');
var path = require('path');
let creds = getCreds();
let spr = sprequest.create(creds)
let SPUrl = 'url to sharepoint site';
let files = [];
let folders = [];
let scannedFolders = [];
let reiteration = false;
let processingReq = false;
//Prompt user for which folder to copy
let targetFolder = prompt("Folder to copy: ");
//Call function to get list of files and folders in specified folder
// getFolderContents(targetFolder);
processInfo([], targetFolder)
//Gets list of files/folders inside folder
function getFolderContents(targetFolder) {
return new Promise((resolve, reject) => {
logger.info('Scanning this folder: ' + targetFolder)
//Skip if folder name contains chars we dont support
if (targetFolder.includes("'")) {
targetFolder = targetFolder.replace(/'/g, "''")
}
//If reiteration = true, format the target folder
if (reiteration === true) {
targetFolder = targetFolder.replace('/sites/sharepointsite', '');
}
console.log('Scanning this folder: ' + targetFolder)
targetFolder = encodeURIComponent(targetFolder)
var url = `${SPUrl}/_api/web/GetFolderByServerRelativeUrl('Shared%20Documents/${targetFolder}')/Files?$filter=TimeLastModified gt datetime'2021-07-01T00:00:00'`;
var url2 = `${SPUrl}/_api/web/GetFolderByServerRelativeUrl('Shared%20Documents/${targetFolder}')?$expand=Folders`;
//THESE SPR CALLS ARE THE REST API CALLS TO SHAREPOINT RESULTING IN THE MOST 404 ERRORS, HOW TO IGNORE ERRORS FROM THESE?
// Call to get list of files
spr.get(url).then(response => {
console.log('Calling SP API to get files')
//Adds files to array
let fileRes = response.body.d.results;
for (let i = 0; i < fileRes.length; i++) {
files.push(fileRes[i].ServerRelativeUrl)
}
console.log(files.length)
}).catch(function (err) {
console.log('Fetch Error :-S', err);
});
//Call to get list of folders
spr.get(url2).then(response => {
//Adds folders to array
let folderRes = response.body.d.Folders.results;
for (let j = 0; j < folderRes.length; j++) {
folders.push(folderRes[j].ServerRelativeUrl)
}
//Push current folder read through to another array so we dont scan it multiple times
scannedFolders.push('/sites/sharepointsite/Shared Documents' + targetFolder);
resolve(folders);
}).catch(function (err) {
console.log('Fetch Error :-S', err);
});
})
}
//If folders exist in folders array scan them; once all folders have been scanned, send files to be copied
async function processInfo(folders, firstFolder) {
let firstRun = await getFolderContents(firstFolder);
let firstGone = false;
if (firstRun) {
if (firstRun.length > 0) {
for (let k = 0; k < firstRun.length; k++) {
await sleep(500);
//If folder has already been scanned, remove from array and skip to next iteration
if (scannedFolders.includes(firstRun[k])) {
if (k === 0) {
firstRun.splice(k, 1)
}
firstRun.splice(k, k)
continue;
}
//Else if folder has not been scanned, send it to be scanned
else {
if (firstRun[k].includes('%') || firstRun[k].includes('%')) {
console.log('skipping')
continue;
}
reiteration = true;
let foldersScanned = await getFolderContents(firstRun[k]).catch(function (err) {
console.log('Fetch Error :-S', err);
});;
//Send each file to pullFile() function to be downloaded
// if (foldersScanned && k == firstRun.length - 1) {
if (firstRun[k] == 10) {
do {
await pullFile(files[0]);
files.shift();
} while (files.length > 0)
}
}
}
}
}
console.log(files.length)
}
//Manipulate file string to get SP folder and File Name, then call the helper function to download the file
async function pullFile(file) {
let filename = file.replace(/^.*[\\\/]/, '')
let spFolder = file.replace('/sites/sharepointsite/', '')
spFolder = spFolder.replace(filename, '');
let downloadPath = path.join('./testfolder', spFolder)
const sppullContext = {
siteUrl: SPUrl, //SharePoint URL
creds: creds //Credentials
};
const sppullOptions = {
spRootFolder: spFolder, // The folder path for the file in SharePoint
dlRootFolder: downloadPath, // Where the file is saved locally
strictObjects: [filename], // Only download the filename specified as a query parameter
muteConsole: true
};
pullFromSharePoint(sppullContext, sppullOptions) //Pull file with filename from SharePoint
.then(() => {
return true;
}).catch(err => {
return err;
});
}
//Timer to wait specified amount of time
function sleep(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
Code from helper.js
//Imports and Declarations
var fs = require('fs');
const { SPPull } = require('sppull');
const winston = require('winston');
//Winston logger
const logger = winston.createLogger({
level: 'info',
format: winston.format.json(),
defaultMeta: { service: 'user-service' },
transports: [
new winston.transports.File({ filename: 'error.log', level: 'error' }),
new winston.transports.File({ filename: 'info.log' }),
],
});
/**
* Returns the SharePoint credentials object to use.
*/
function getCreds() {
return {
username: 'UN',
password: 'PW'
};
}
module.exports.getCreds = getCreds;
/**
* Pulls a file from SharePoint and then checks for errors.
*
* #param sppullContext context object for sppull
* #param sppullOptions options object for sppull
*/
async function pullFromSharePoint(sppullContext, sppullOptions) {
SPPull.download(sppullContext, sppullOptions)
.then((res) => {
logger.info(res)
return res
})
.catch((err) => {
logger.error(err)
return err
});
}
I have the following Node script:
var exec = require('child_process').exec;
var download_file_wget = function(file_url) {
// compose the wget command
var wget = `wget ${file_url}`;
// execute wget using child_process' exec function
var child = exec(wget, function(err) {
if (err) throw err;
else console.log(`${file_url} downloaded`);
});
};
download_file_wget('http://placekitten.com/10/10');
It does successfully download 1 file. I need to asynchronously download 128 files at once. How can I do this with this code?
If the files requested are big consider use spawn instead of exec.
const http = require('http');
const exec = require('child_process').exec;
const DOWNLOAD_DIR = './downloads/';
const generate_width_and_height = function() {
const random = Math.floor((Math.random() * 100) + 200);
console.log(random);
return random
}
const create_file_url = function() {
return "http://placekitten.com/" + generate_width_and_height() + "/" + generate_width_and_height()
}
const oneHundredTwentyEightElementsArray = Array.from(Array(127), (_,x) => x);
const oneHundredTwentyEightUrlsArray = oneHundredTwentyEightElementsArray.map( _ => create_file_url())
const download_file_wget = function(file_url, file_number) {
// extract the file name
const file_name = "file_" + file_number
// compose the wget command
const wget = 'wget -P ' + DOWNLOAD_DIR + ' ' + file_url;
// excute wget using child_process' exec function
const child = exec(wget, function(err, stdout, stderr) {
if (err) throw err;
else console.log(file_name + ' downloaded to ' + DOWNLOAD_DIR);
});
};
for (let index = 0; index < oneHundredTwentyEightElementsArray.length; index++) {
const url = oneHundredTwentyEightUrlsArray[index];
download_file_wget(url, index)
}
You can use Javascript Promises to download multiple files with node and wget.
First wrap your inner code in a promise:
const downloadFile = (url) => {
return new Promise((resolve) => {
console.log(`wget ${url} --no-check-certificate`)
exec(`wget ${url} --no-check-certificate`, function(err, stdout, stderr) {
if (err) {
console.log('ERR', err, url)
} else {
console.log('SUCCESS ' + url);
resolve(1)
}
});
})
}
Then use Promise.all to process all the downloads asynchronously:
const files = [
'http://placekitten.com/10/10',
'http://placekitten.com/10/10'
// etc
]
(async () => {
await Promise.all(files.map(url => downloadFile(url)))
})()
I am still pretty new to this, so forgive me if I dont' say this correctly. We have an array.reduce that calls a method with a returning promise that iterates through a list of files and post results to the db. Everything was working great, until it ran into a field that had an apostrophe in it and then the db insert fails. This is the field value. 'Expected 100002822' to be 100002822.'
I tried adding a replaceAll on the field and now get an error in the array.reduce.
Here is the .reduce
console.log('Found test results in ' + matches.length + ' files. Parsing and posting to the database now...');
var startTime = moment();
var parser = new Parser();
matches.reduce(function (p, val) {
return p.then(function () {
return parser.parseResults(val);
});
}, Promise.resolve()).then(function (finalResult) {
var endTime = moment();
var testDuration = moment.duration(endTime.diff(startTime));
console.log(chalk.blue('*** File parsing time: ' + testDuration.humanize() + ' ***'));
if (finalResult.insertSuccess == matches.length) {
var publishOut = {
totalFiles: matches.length,
totalTests: 0,
totalTestsSuccess: 0,
totalTestsFailed: 0
}
publishOut.totalTests += finalResult.totalTests;
publishOut.totalTestsSuccess += finalResult.testPassedCount;
publishOut.totalTestsFailed += finalResult.testFailedCount;
console.log(`Successfully inserted ${finalResult.insertSuccess} of ${publishOut.totalTests} test results.`);
// for (var i = 0; i < matches.length; i++) {
// var currentFile = `./testing/results/${matches[i]}`;
// fs.unlinkSync(currentFile);
// }
resolve(publishOut);
} else {
reject('Only ' + finalResult.insertSuccess + ' of ' + matches.length + ' successfully posted to the database');
}
}, function (err) {
reject('error in reduce', err);
});
I have tried several different ways of using the replaceAll with the same failure. It hits this code from the array.reduce
}, function (err) {
reject('error in reduce', err);
});
And this is the called method. The added code causing the failure in the .reduce is this Message = expectation.message.replaceAll("'", "");
protractorParser.prototype.parseResults = function (fileName) {
return new Promise((resolve, reject) => {
//console.log('In parseresults', fileName);
var currentFile = './testing/results/' + fileName
json.readFile(currentFile, function (err, obj) {
if (err != null) {
console.log('error reading file', err);
reject(err);
} else {
resolve(obj);
}
});
}).then(function (obj) {
var results = [];
for (var suite in obj) {
var specs = obj[suite].specs;
for (let i = 0; i < specs.length; i++) {
const assert = specs[i];
const tcR = /TC[\d]+/;
const tc = assert.description.match(tcR);
let Passed = 1;
let Message = '';
let Stack = '';
testResults.totalTests++;
if (assert.failedExpectations.length) {
const expectation = assert.failedExpectations[assert.failedExpectations.length - 1];
Passed = 0;
Message = expectation.message.replaceAll("'", "");
Stack = expectation.stack.split('\n')[1].trim();
testResults.testFailedCount++
} else {
testResults.testPassedCount++
}
if (tc != null) {
const time = moment().utcOffset(config.get('settings.timeOffset')).format('YYYY-MM-DDTHH:mm:ss');
const promise = utility.TestDataManager.insertAutomationResults(tc[0], assert.description, Passed, process.env.testBuild, 'P', Message, Stack, 0, time, '');
results.push(promise.then(() => {
//fs.unlinkSync(currentFile);
testResults.insertSuccess++;
//console.log('insertSuccess', testResults.insertSuccess);
},
err => { console.log('… failed', err); throw err; }
));
} else {
console.log('no test case found for test: ' + assert.description + ' -- skipping');
// I don't think you want to `throw err` here, right?
}
}
}
return Promise.all(results).then(() => testResults);
});
};
I am trying to iterate through the JSON files generated by the protractor tests. I pull all the file names into an array and call a method that opens and parses through the each file, post the results to the database and pass back a passed/failed flag.
I have tried all the examples here
Make angular.forEach wait for promise after going to next object and still get the same results.
The method is actually called, but the results are not posted to the db. I have tested the parser.parseResults on an individual file and it successfully posted to the db, so it has to have something to do with the promise not resolving correctly.
Is it not possible to do something like this in the jasmine/protractor framework? Or do I have something wrong in the code?
I have included the code for my latest attempt.
Thank You
Christine
matches.reduce(function (p, val) {
console.log('val', val);
return p.then(function () {
return parser.parseResults(val);
});
}, Promise.resolve()).then(function (finalResult) {
console.log('finalResult = ', finalResult);
}, function (err) {
console.log('error in reduce',err);
});
parser.parseResults code
protractorParser.prototype.parseResults = function (fileName) {
return new Promise((resolve, reject) => {
console.log('In parseresults', fileName);
json.readFile(fileName, function (err, obj) {
try {
if (err != null) {
console.log('error reading file',err);
reject(err);
}
console.log('obj - ',obj);
var results = [];
var Passed = 0;
var Message = '';
var Stack = '';
for (var suite in obj) {
var specs = obj[suite].specs;
console.log('spec - ', specs);
if (specs.length > 0) {
for (var i = 0; i < specs.length; i++) {
var assert = specs[i];
var tcR = new RegExp(/TC[\d]+/);
var tc = assert.description.match(tcR);
if (!assert.failedExpectations.length) {
Passed = 1;
}
else {
assert.failedExpectations.forEach((expectation) => {
Message = expectation.message;
Stack = expectation.stack.split('\n')[1].trim();
})
Passed = 0;
}
if (tc != null) {
utility.TestDataManager.insertAutomationResults(tc[0], assert.description, Passed, process.env.testBuild,
'P', Message, Stack, 0, moment().utcOffset(config.get('settings.timeOffset')).format('YYYY-MM-DDTHH:mm:ss'), '')
.then(function (resp) {
resolve(Passed);
}, (err) => {
console.log('Posting to Database failed ', err);
reject(err);
});
} else {
console.log('no test case found for test: ' + assert.description + ' -- skipping');
reject(err);
}
}
}
}
}
catch (err) {
console.log('rejecting opening file');
reject(err);
}
});
})
}
If there is not exactly one suite in the obj, with exactly one spec, then your promise is either resolved not at all or multiple times.
Avoid wrapping too many things in the new Promise constructor - always promisify on the smallest possible level, and use promise chaining afterwards.
protractorParser.prototype.parseResults = function (fileName) {
return new Promise((resolve, reject) => {
console.log('In parseresults', fileName);
json.readFile(fileName, function (err, obj) {
if (err != null) {
console.log('error reading file', err);
reject(err);
} else {
resolve(obj);
}
});
}).then(function(obj) {
console.log('obj - ',obj);
var results = [];
for (var suite in obj) {
var specs = obj[suite].specs;
console.log('spec - ', specs);
for (let i = 0; i < specs.length; i++) {
const assert = specs[i];
const tcR = /TC[\d]+/;
const tc = assert.description.match(tcR);
let Passed = 1;
let Message = '';
let Stack = '';
if (assert.failedExpectations.length) {
const expectation = assert.failedExpectations[assert.failedExpectations.length-1];
Passed = 0;
Message = expectation.message;
Stack = expectation.stack.split('\n')[1].trim();
}
if (tc != null) {
const time = moment().utcOffset(config.get('settings.timeOffset')).format('YYYY-MM-DDTHH:mm:ss');
const promise = utility.TestDataManager.insertAutomationResults(tc[0], assert.description, Passed, process.env.testBuild, 'P', Message, Stack, 0, time, '');
results.push(promise.catch(err => {
console.log('Posting to Database failed ', err);
throw err;
}));
} else {
console.log('no test case found for test: ' + assert.description + ' -- skipping');
// I don't think you want to `throw err` here, right?
}
}
}
return Promise.all(results);
});
};