npm protractor xlsx file wait and retry - javascript

I am trying to run protractor with multi capabilities (around 30 browsers with diff versions)
The data sheet is xlsx and is one sheet, which will be consumed. After each run the xlsx row will be updated that it has been 'USED'
I use exceljs to write the flag. But it throws error if its already been used/opened by another process. I handled the exception, but instead of failing , i would like the muti processes to wait and retry accessing the file.
Please suggest how to read/write one xlsx at same time by multiple processes - where the processed has to wait for any previous process to complete its access.
Write Function:
updateRunnerXLS: function(fileName) {
var Excel = require('exceljs'); //Require the exceljs package
var workbook = new Excel.Workbook(); //create a workbook reader
workbook.xlsx.readFile(fileName)
.then(function(workbook) {
var rowValues = []; //initialize empty array
var worksheet = workbook.getWorksheet('sheet2');
var nameCol = worksheet.getColumn('M');
nameCol.eachCell(function(cell, rowNumber) { //Loop through the cells
if (cell.value == 'Y') { //get all rows with Y
rowValues.push(rowNumber); //Fill the array with row number
}
});
var row = worksheet.getRow(rowValues[0]); //Get the first row from the array
row.getCell('M').value = 'X'; //update the first row that has a Y to X
row.commit(); //Commit the row change
workbook.xlsx.writeFile(fileName).then(function() {
//done
}).catch(function (err) {
console.log('Handled the error - ' + err);
})
})
}
Read Function: (simpler that reading using exceljs)
var Parser = require('parse-xlsx');
sheet = new Parser(testDataFolder + 'dataSheet.xlsx', 'sheet2');

Related

How to download the list of urls of images with renaming them with the values available in adjacent column in Google Sheets. function of Code.gs

I've made the script in google sheets and code of gs script (code.gs) it has been working fine and nice with the download function it is downloading each and every image in my google drive folder. But the main thing is that I want them to be downloaded with renaming them in the mean time with the column of text values available on the adjacent column
I just want the images to be renamed in the mean time when they are being downloaded in the google drive folder.
for example FIRST COLUMN ==>> "PICTURE URL" SECOND COLUMN ==>> "TEXT_TO_BE_RENAMED"
I've made the download button, when I click the download button it should execute the command to download the image and as well as rename the image the the values available on second column. Example of it is shown in below screen shot. I'm also sharing the sheet link below:
[HERE IS THE PICUTRE DEMONSTRATION WHAT I I WANT]
https://docs.google.com/spreadsheets/d/1jItuI2tQbpH4A5b9CFS9xWV3mNtnrV2jMRYPxEZSvlM/edit?usp=sharing
My code:
function downloadImage() {
let sheet = SpreadsheetApp.getActiveSpreadsheet().getActiveSheet();
let lastRow = sheet.getLastRow();
for (let i = 0; i < lastRow - 1; i++) {
let folder = DriveApp.getFolderById("Folder_id_of_your_drive");
let url = sheet.getRange(2 + i, 1).getValue();
let image = SpreadsheetApp.newCellImage().setSourceUrl(url);
let blob = UrlFetchApp.fetch(url).getBlob();
folder.createFile(blob);
}
}
I literally tried every thing to produce the result but unfortunately failed I am expecting the result that the images can be downloaded with the renamed text available in the separate list of cells values or with the adjacent columns
You can check for errors and rename the files like this:
function downloadImage() {
const folder = DriveApp.getFolderById('...folder ID goes here...');
const sheet = SpreadsheetApp.getActiveSheet();
const urls = sheet.getRange('A2:A').getDisplayValues().flat();
const names = sheet.getRange('C2:C').getDisplayValues().flat();
urls.forEach((url, index) => {
if (!url) return;
try {
const blob = UrlFetchApp.fetch(url).getBlob();
folder.createFile(blob).setName(names[index]);
} catch (error) {
console.log(error.message);
}
});
}
For additional information, see Folder.createFile() and the Blob class.
function downloadandrename() {
const f = DriveApp.getFolderById('fid');
const sh = SpreadsheetApp.getActiveSheet();
const vs = sh.getRange('A2:C' + sh.getLastRow()).getDisplayValues();
vs.forEach((r, i) => {
if (r[0] && r[2]) {
f.createFile(UrlFetchApp.fetch(r[0]).getBlob().setName(r[2]));
}
});
}

Create an array of arrays from file reader?

I need to convert an excel file with multiple work sheets to json, and I found the following script to do so. However it just console logs each sheet and I want a way to have an array and each element in the array be a sheet. I have tried initializing an array and pushing oJS to the array every time the forEach runs but it doesn't work :(
function filePicked(oEvent) {
// Get The File From The Input
var oFile = oEvent.target.files[0];
var sFilename = oFile.name;
// Create A File Reader HTML5
var reader = new FileReader();
// Ready The Event For When A File Gets Selected
reader.onload = function(e) {
var data = e.target.result;
var cfb = XLS.CFB.read(data, {type: 'binary'});
var wb = XLS.parse_xlscfb(cfb);
// Loop Over Each Sheet
wb.SheetNames.forEach(function(sheetName) {
// Obtain The Current Row As CSV
var sCSV = XLS.utils.make_csv(wb.Sheets[sheetName]);
var oJS = XLS.utils.sheet_to_row_object_array(wb.Sheets[sheetName]);
$("#my_file_output").html(sCSV);
console.log(oJS)
});
};
// Tell JS To Start Reading The File.. You could delay this if desired
fileOut += reader.readAsBinaryString(oFile);
console.log(fileOut);
}

CSV File Sender Information with App Script

I am trying to extract the details of Emails(Email Address,Subject,Date)from Gmail Inbox that contain CSV file in attachment. But I have managed to extract CSV file from certain email address. Is there any way that we can get the information of all emails that have CSV file in attachment? Any help would be appreciated.
function import() {
var threads = GmailApp.search('in:inbox from:"example#gmail.com"');
if (threads.length===0)
return;
var messages = threads[0].getMessages();
var message = messages[messages.length - 1];
var attachment = message.getAttachments()[0];
attachment.setContentType('text/csv');
// Is the attachment a CSV file
if (attachment.getContentType() === "text/csv") {
var sheet = SpreadsheetApp.getActiveSpreadsheet().getSheetByName("Sheet3");
var csvData = Utilities.parseCsv(attachment.getDataAsString(), ",");
// Remember to clear the content of the sheet before importing new data
sheet.clearContents().clearFormats();
sheet.getRange(1, 1, csvData.length, csvData[0].length).setValues(csvData);
// GmailApp.moveMessageToTrash(message);
}
}
var messages = threads[0].getMessages(); indicates that you are running your code only for the first thread.
If you want to retrieve attachements from all threads, you need to iterate over your threads with a loop.
In order to receive the sender of a message you can use the getFrom() method.
The following sample retrieves the senders of all the last messages of your threads that have a CSV attachment. The senders are stored in an array:
function import() {
var threads = GmailApp.search('in:inbox from:"example#gmail.com"');
if (threads.length==0)
return;
var senders = [];
for( var i = 0; i < threads.length; i++){
var messages = threads[i].getMessages();
var message = messages[messages.length - 1];
var attachment = message.getAttachments()[0];
attachment.setContentType('text/csv');
// Is the attachment a CSV file
if (attachment.getContentType() == "text/csv") {
var sender = message.getFrom();
senders.push(sender);
// do whatever else you want to do
...
}
}
Logger.log(senders);
}
This sample will iterate through the last message of each thread. If
you want to iterate through all messages of all threads, you need to
implement a second, nested loop.

Excel file gets corrupted after I write into it using ExcelJS

I have a function where I need to write a value in an excel file at a specific row.
var Excel = require('exceljs');
setExcelData: function (sheetName, keyword, value123, callback) {
var workbook = new Excel.Workbook();
workbook.xlsx.readFile('example.xlsx').then(function () {
var worksheet = workbook.getWorksheet(sheetName);
var y = worksheet.getColumn(1).values;
for (var i = 1; i <= y.length; i++) {
var q = worksheet.getRow(i).values;
if (q[1] == keyword) {
worksheet.getRow(i).getCell(2).value = value123;
workbook.xlsx.writeFile('example.xlsx');
break;
}
}
});
callback();
},
First I read the file and find the row where the keyword is present in first column. Then I try to write "value123" in the second column of the same row. But when i execute this function, the excel file gets corrupted and I cannot open it anymore.
I faced the same issue in Ubuntu if I am modifying the file in LibreOffice. So i created a doc in google drive as suggested in
https://github.com/guyonroche/exceljs/issues/35#issuecomment-283440743. and downloaded to perform modification on excel
Below is the code worked for me.
var Excel = require('exceljs');
async function excelOp() {
let workbook = new Excel.Workbook();
workbook = await workbook.xlsx.readFile('question_50508131.xlsx'); // replace question_50508131.xls with your file
let worksheet = workbook.getWorksheet('Inventory'); // replace solution with youe sheet name
let columnValues = worksheet.getColumn(1).values;
for (let row = 1; row <= columnValues.length; row += 1) {
var rowValues = worksheet.getRow(row).values;
if (rowValues[1] == 'Laptop') { // replace Laptop with required value
worksheet.getRow(row).getCell(2).value = 350; // replace 350 with replacable value
workbook.xlsx.writeFile('question_50508131.xlsx');
break;
}
}
}
excelOp();
PreModification Excel data
PostModification Excel data

TypeError: Cannot read property 'then' of undefined on Promise in Express route

I have a route in my MEEN app that accepts a file upload, then passes the data to a helper module that parses the file and stores it in an array, once this helper module finishes I want to pass it off to another module that will then process the returned array. for some reason even though I have a return statement at the end of the helper the then method is undefined.
route:
router.route('/tools/sku/reactivate').post(upload.single('csvdata'),function(req,res){
console.log('handing request over to helper.csv');
csv.reader(req,res).then(sku.reactivate(data));
});
csv.reader:
var csv = require('csv-parse');
var multer = require('multer');
var fs = require('fs');
module.exports.reader = function(req,res){
//define array for holding csv data in this case skus
const csvArray = [];
//max number of columns in csv
const maxColumns = parseInt(req.body.maxColumns);
//create an array of column headers to check chunks against so we dont parse headers
let columnHeader = req.body.moduleTarget.split(',');
//loopThrough Array to create arrays inside container array for each column
for(var i = 0; i < maxColumns; i++){
csvArray.push([]);
}
//define filesystem readstream from uploaded file
let readStream = fs.createReadStream(req.file.path).pipe(csv());
//push csv data to array ignoring headers to csvArray
readStream.on('data', function(chunk){
//get number of keys in the dataChunk
let chunkLength = Object.keys(chunk).length;
//check column count on csv if greater than expected throw error
if(chunk[maxColumns]){
throw '[ERROR] More columns than expected in CSV, please fix and try again';
}else{
//loop through chunk keys and store each one in csvArray by index
for(var i = 0; i < chunkLength; i++){
//if chunk at this index doesnt equal column header and chunk at this index exists push to array
if(chunk[i] !== columnHeader[i] && chunk[i]){
csvArray[i].push(chunk[i]);
}
}
}
});
//error handling
readStream.on('error',function(err){
console.log('Error while reading file stream '+ err);
res.json({message:err,errorType:'1'});
});
readStream.on('end',function(){
console.log('finished reading csv returning array back to router to process next middleware');
return csvArray;
});
}
i get an error right after the console.log message on the readStream end listener
From the Express website. https://expressjs.com/en/advanced/best-practice-performance.html
TL;DR
Try wrapping the contents of your reader module inside a promise and return the promise.
...However, there are two caveats:
All your asynchronous code must return promises (except emitters). If a particular library does not return promises, convert the base object by using a helper function like Bluebird.promisifyAll().
Event emitters (like streams) can still cause uncaught exceptions. So make sure you are handling the error event properly; for example:
app.get('/', wrap(async (req, res, next) => {
let company = await getCompanyById(req.query.id)
let stream = getLogoStreamById(company.id)
stream.on('error', next).pipe(res)
}))
#example:::
module.exports.reader = function(req,res){
return new Promise((resolve, reject)=>{
//define array for holding csv data in this case skus
const csvArray = [];
//max number of columns in csv
const maxColumns = parseInt(req.body.maxColumns);
//create an array of column headers to check chunks against so we dont parse headers
let columnHeader = req.body.moduleTarget.split(',');
...
//error handling
readStream.on('error',function(err){
console.log('Error while reading file stream '+ err);
res.json({message:err,errorType:'1'});
});
readStream.on('end',function(){
console.log('finished reading csv returning array back to router to process next middleware');
return csvArray;
});
})
}

Categories