Javascript event flow driving me up the wall - javascript

I have a javascript which uses the Google Drive API. Sometime it works flawlessly and other times it works only in part:
Basically it retrieves a list of files from google drive, downloads the files in a browser, put the files in another folder in google drive and finally erases the file from its original folder.
here is the portion of the script which carrys out the above process.
function makeRequest() {
console.log("make request");
var request = gapi.client.drive.files.list({
'q' : "'0BxTSfcTBxwlXflNjeVRZeWFQbUIzcDJMeElER1pDVEZla0NNVjhpWUpGTzY1ZDVUTS0yTFE' in Parents"}); //get only the files in a specified folder
request.execute(function(resp) {
var x = []; //array for revised list of file to only include those which have a suffix #FHM#
for (i = 0; i < resp.items.length; i++) {
if (resp.items[i].title.substring(0, 5) == "#FHM#") {
x.push([resp.items[i].title, resp.items[i].webContentLink, resp.items[i].id]);
}
}
if (x.length == 0) {
document.getElementById("downloadButton").value = "There are no files to download";
}
var originalFolder = "0BxTSfcTBxwlXflNjeVRZeWFQbUIzcDJMeElER1pDVEZla0NNVjhpWUpGTzY1ZDVUTS0yTFE" //original google drive folder ID
var processedFolder = "0BxTSfcTBxwlXfkVlYVRCdnBaMFRubWM4eUt5V0FYVEdIcHdjWDd6SFhYb3pPSjBZeFZ2T3M" // google drive folder for processed files
//loop through all files and trigger a download event for each for (i = 0; i < x.length; i++) {
console.log("download loop " + i);
var dlUrl = x[i][1];
var fileIdentity = x[i][2];
var fileTitle = x[i][0];//only used in the console log
downloadUrl(dlUrl);
}
//loop through all of final list of files and trigger the function moveFile (i.e link file to a new folder location)
for (i = 0; i < x.length; i++) {
console.log("moveFile loop " + i);
var dlUrl = x[i][1];
fileIdentity = x[i][2];
fileTitle = x[i][0];//only used in the console log
setTimeout(moveFile(processedFolder, originalFolder, fileIdentity, fileTitle),1000);
}
//loop through all of final list of files and trigger the function moveFile2 (i.e the one that deletes the original parent folder)
for (i = 0; i < x.length; i++) {
console.log("moveFile2 loop " + i);
var dlUrl = x[i][1];
fileIdentity = x[i][2];
fileTitle = x[i][0];//only used in the console log
setTimeout(moveFile2(processedFolder, originalFolder, fileIdentity, fileTitle),1000);
}
});
}
function downloadUrl(url) {
var iframe = document.createElement("iframe");
iframe.src = url;
iframe.style.display = "none";
document.body.appendChild(iframe);
console.log("download triggered");
}
function moveFile(processedFolder, originalFolder, fileIdentity, fileTitle) {
//move the file to the new processed folder
var body = {
'id': processedFolder
};
var request = gapi.client.drive.parents.insert({
'fileId': fileIdentity,
'resource': body
});
request.execute(function(resp) {});
console.log("file should be in new location : " + fileTitle);
}
function moveFile2(processedFolder, originalFolder, fileIdentity, fileTitle) {
//this bit removes the file from the the original folder
var request = gapi.client.drive.parents.delete({
'parentId': originalFolder,
'fileId': fileIdentity
});
request.execute(function(resp) {});
console.log("file should be removed from old location : " + fileTitle);
}
OK, so when I run this with 1 file to process the console.log code sections lists the events
as:
Download loop 0
download triggered
Movefile loop 0
file should be in new location : filename is XXXXX
movefile2 loop 0
file should be removed from original location
so I think that tells me the the order that the events have fired. yet sometimes I don't get all the events completing as expected. I might have some downloads missed, or some will move or some will still be in the original location.
If I run any one of the 3 for loops on its own it works flawlessly every time but as soon as I do all 3 it starts to fall apart and get all sorts of console error messages about being forbidden, about get request denied, deleted post request - all sorts of stuff
So.....
is this to do with the way Javascript handles events?
do I need to somehow delay the firing of events until the previous for loop has finished? (I've tried a setTimeout for the 2nd and 3rd For loop but doesn't seems to help)
or I am missing something fundamental to how javascript works
it's like if I try and add a file to a folder before the download has physically arrived on my hard drive then it falls apart, similary if I remove a parent folder before it has finished it also falls apart. as far as i understand Google Drive is not physically moving the file it is simply adding a label to the file. The actual physical location in googles cloud remains unchanged throughout.
help!!!!!

Too much code for me to read on my mobile, but there are a couple of obvious issues.
1/ The use of settimeout is almost always wrong since it introduces timing errors.
2/ you have an execute call with a null callback function, followed by console.log("done "). The api call is only done within the callback, not the next line of JavaScript.
So I'd say that your guess that you've misunderstood the event v handling is correct.

Related

Automating Photoshop with JS

I had to place videos(mp4-files) in one photoshop document. I thought it would be easier to find a solution with png/jpg, and then project it on mp4. but the fact is that photoshop saving png/jpg and mp4 in different ways. Therefore, despite the fact that there is an import solution, I have difficulties with exporting mp4 by code.
I have 2 arrays of mp4 files and each mp4 from the first array needs to be overlaid on each of the second and saved by mp4. I solved the problem by uploading a video to an open photoshop file with a simple code:
function replaceContents(newFile) {
var docRef = app.open(newFile);
return docRef;
}
function importVideos(order_number) {
var doc = app.activeDocument;
var file = new File('E:/path/' + order_number + '.mp4');
// open a new document with needed video
var docTemp = replaceContents(file);
// copy opend layer with video from new doc to my main doc
var layer = docTemp.activeLayer.duplicate(doc.layerSets.getByName(color), ElementPlacement.PLACEATEND);
// close new unnecessary doc
docTemp.close(SaveOptions.DONOTSAVECHANGES);
layer.name = order_number;
return layer;
}
Here is the code for saving videos and in doExport() doc should be saved as a video.
function Saving(color) {
var array1 = app.activeDocument.layerSets.getByName('s');
var array2 = app.activeDocument.layerSets.getByName(color);
for (i = 0; i < 5; i++) {
array1.artLayers[i].visible = true;
for (j = 0; j < 5; j++) {
array2.artLayers[i].visible = true;
doExport();
array2.artLayers[i].visible = false;
}
array1.artLayers[i].visible = false;
}
}
So a new question: how to export a video from photoshop with a code with the ability to specify the file name and the save path?
P.S. if you do this through Actions, you can't enter input parameters like the name of the saved file, it seals the Action as you did it.
If you know how to create arguments for Actions, you are welcome!

Exceeded maximum execution time error in google apps script

i am trying to run this script in google apps script. It lists all files recursively within a folder along with its size, name, url etc to spreadsheet. There is nothing wrong with the script but the problem is i am running it on a folder which has thousands of files and google script only allows few minutes of maximum runtime so each time just after few mins i get the error saying exceeded maximum execution time in google script.
Is there any workaround for this issue? i am fine even if i have to run this code somewhere outside of google apps script if that's the only way out but then again i have been told its not possible to execute this code outside of google script.
function start() {
var sheet = SpreadsheetApp.getActiveSheet();
sheet.clear();
sheet.appendRow(["Name", "Date", "Size", "URL", "Download", "Description", "Type", "Folder", "Folder Slug"]);
var folders = DriveApp.getFolderById('FOLDER_ID');
var folder = folders.getFolders();
if (folder.hasNext()) {
// 1. Retrieve the file list and put to an array.
// 2. Sort the array by the file size.
var list = processFolder(folder).sort((a, b) => a[2] < b[2] ? 1 : -1);
// 3. Put the array to the Spreadsheet.
sheet.getRange(2, 1, list.length, list[0].length).setValues(list);
} else {
Browser.msgBox('Folder not found!');
}
function processFolder(folder, list = []) {
while (folder.hasNext()) {
var f = folder.next();
var contents = f.getFiles();
addFilesToSheet(contents, f, list);
var subFolder = f.getFolders();
processFolder(subFolder, list);
}
return list;
}
function addFilesToSheet(files, folder, list) {
var folderName = folder.getName();
while (files.hasNext()) {
var file = files.next();
list.push([
file.getName(),
file.getDateCreated(),
Math.round(10 * file.getSize() / 1073741824) / 10, // Modified from file.getSize() / 1073741824,
file.getUrl(),
"https://docs.google.com/uc?export=download&confirm=no_antivirus&id=" + file.getId(),
file.getDescription() || "",
file.getMimeType(),
folderName
]);
}
}
}
I have managed this problem in the past using the Continous Execution Library by Patrick Martinent:
https://gist.github.com/patt0/8395003
The basic idea is to:
Set up your function so it can be terminated and resumed without issue
Set up a time trigger to re-run the function
Run it until it nears the execution timeout and exit gracefully
Allow the trigger to restart the function
Repeat until done, and remove the trigger

Using Drive API / DriveApp to convert from PDFs to Google Documents

This problem has been successfully resolved. I am editing my post to document my experience for posterity and future reference.
The Task
I have 117 PDF files (average size ~238 KB) uploaded to Google Drive. I want to convert them all to Google Docs and keep them in a different Drive folder.
The Problem
I attempted to convert the files using Drive.Files.insert. However, under most circumstances, only 5 files could be converted this way before the function expires prematurely with this error
Limit Exceeded: DriveApp. (line #, file "Code")
where the line referenced above is when the insert function is called. After calling this function for the first time, subsequent calls typically failed immediately with no additional google doc created.
Approach
I used 3 main ways to achieve my goal. One was using the Drive.Files.insert, as mentioned above. The other two involved using Drive.Files.copy and sending a batch of HTTP requests. These last two methods were suggested by Tanaike, and I recommend reading his answer below for more information. The insert and copy functions are from Google Drive REST v2 API, while batching multiple HTTP requests is from Drive REST v3.
With Drive.Files.insert, I experienced issues dealing with execution limitations (explained in the Problem section above). One solution was to run the functions multiple times. And for that, I needed a way to keep track of which files were converted. I had two options for this: using a spreadsheet and a continuation token. Therefore, I had 4 different methods to test: the two mentioned in this paragraph, batching HTTP requests, and calling Drive.Files.copy.
Because team drives behave differently from regular drives, I felt it necessary to try each of those methods twice, one in which the folder containing the PDFs is a regular non-Team Drive folder and one in which that folder is under a Team Drive. In total, this means I had 8 different methods to test.
These are the exact functions I used. Each of these was used twice, with the only variations being the ID of the source and destination folders (for reasons stated above):
Method A: Using Drive.Files.insert and a spreadsheet
function toDocs() {
var sheet = SpreadsheetApp.openById(/* spreadsheet id*/).getSheets()[0];
var range = sheet.getRange("A2:E118");
var table = range.getValues();
var len = table.length;
var resources = {
title: null,
mimeType: MimeType.GOOGLE_DOCS,
parents: [{id: /* destination folder id */}]
};
var count = 0;
var files = DriveApp.getFolderById(/* source folder id */).getFiles();
while (files.hasNext()) {
var blob = files.next().getBlob();
var blobName = blob.getName();
for (var i=0; i<len; i++) {
if (table[i][0] === blobName.slice(5, 18)) {
if (table[i][4])
break;
resources.title = blobName;
Drive.Files.insert(resources, blob); // Limit Exceeded: DriveApp. (line 51, file "Code")
table[i][4] = "yes";
}
}
if (++count === 10) {
range.setValues(table);
Logger.log("time's up");
}
}
}
Method B: Using Drive.Files.insert and a continuation token
function toDocs() {
var folder = DriveApp.getFolderById(/* source folder id */);
var sprop = PropertiesService.getScriptProperties();
var contToken = sprop.getProperty("contToken");
var files = contToken ? DriveApp.continueFileIterator(contToken) : folder.getFiles();
var options = {
ocr: true
};
var resource = {
title: null,
mimeType: null,
parents: [{id: /* destination folder id */}]
};
while (files.hasNext()) {
var blob = files.next().getBlob();
resource.title = blob.getName();
resource.mimeType = blob.getContentType();
Drive.Files.insert(resource, blob, options); // Limit Exceeded: DriveApp. (line 113, file "Code")
sprop.setProperty("contToken", files.getContinuationToken());
}
}
Method C: Using Drive.Files.copy
Credit for this function goes to Tanaike -- see his answer below for more details.
function toDocs() {
var sourceFolderId = /* source folder id */;
var destinationFolderId = /* destination folder id */;
var files = DriveApp.getFolderById(sourceFolderId).getFiles();
while (files.hasNext()) {
var res = Drive.Files.copy({parents: [{id: destinationFolderId}]}, files.next().getId(), {convert: true, ocr: true});
Logger.log(res)
}
}
Method D: Sending batches of HTTP requests
Credit for this function goes to Tanaike -- see his answer below for more details.
function toDocs() {
var sourceFolderId = /* source folder id */;
var destinationFolderId = /* destination folder id */;
var files = DriveApp.getFolderById(sourceFolderId).getFiles();
var rBody = [];
while (files.hasNext()) {
rBody.push({
method: "POST",
endpoint: "https://www.googleapis.com/drive/v3/files/" + files.next().getId() + "/copy",
requestBody: {
mimeType: "application/vnd.google-apps.document",
parents: [destinationFolderId]
}
});
}
var cycle = 20; // Number of API calls at 1 batch request.
for (var i = 0; i < Math.ceil(rBody.length / cycle); i++) {
var offset = i * cycle;
var body = rBody.slice(offset, offset + cycle);
var boundary = "xxxxxxxxxx";
var contentId = 0;
var data = "--" + boundary + "\r\n";
body.forEach(function(e){
data += "Content-Type: application/http\r\n";
data += "Content-ID: " + ++contentId + "\r\n\r\n";
data += e.method + " " + e.endpoint + "\r\n";
data += e.requestBody ? "Content-Type: application/json; charset=utf-8\r\n\r\n" : "\r\n";
data += e.requestBody ? JSON.stringify(e.requestBody) + "\r\n" : "";
data += "--" + boundary + "\r\n";
});
var options = {
method: "post",
contentType: "multipart/mixed; boundary=" + boundary,
payload: Utilities.newBlob(data).getBytes(),
headers: {'Authorization': 'Bearer ' + ScriptApp.getOAuthToken()},
muteHttpExceptions: true,
};
var res = UrlFetchApp.fetch("https://www.googleapis.com/batch", options).getContentText();
// Logger.log(res); // If you use this, please remove the comment.
}
}
What Worked and What Didn't
None of the functions using Drive.Files.insert worked. Every
function using insert for conversion failed with this error
Limit Exceeded: DriveApp. (line #, file "Code")
(line number replaced with generic symbol). No further details or
description of the error could be found. A notable variation was one
in which I used a spreadsheet and the PDFs were in a team drive
folder; while all other methods failed instantly without converting a
single file, this one converted 5 before failing. However, when
considering why this variation did better than the others, I think it
was more of a fluke than any reason related to the use of particular
resources (spreadsheet, team drive, etc.)
Using Drive.Files.copy and batch HTTP requests worked only
when the source folder was a personal (non-Team Drive) folder.
Attempting to use the copy function while reading from a Team Drive
folder fails with this error:
File not found: 1RAGxe9a_-euRpWm3ePrbaGaX5brpmGXu (line #, file "Code")
(line number replaced with generic symbol). The line being referenced
is
var res = Drive.Files.copy({parents: [{id: destinationFolderId}]}, files.next().getId(), {convert: true, ocr: true});
Using batch HTTP requests while reading from a Team Drive folder
does nothing -- no doc files are created and no errors are thrown.
Function silently terminates without having accomplished anything.
Conclusion
If you wish to convert a large number of PDFs to google docs or text files, then use Drive.Files.copy or send batches of HTTP requests and make sure that the PDFs are stored in a personal drive rather than a Team Drive.
Special thanks to #tehhowch for taking such an avid interest in my question and for repeatedly coming back to provide feedback, and to #Tanaike for providing code along with explanations that successfully solved my problem (with a caveat, read above for details).
You want to convert from PDF files in the folder to Google Documents. PDF files are in a folder of team drive. You want to import converted them to a folder of your Google Drive. If my understanding is correct, how about this method?
For the conversion from PDF to Google Document, it can convert using not only Drive.Files.insert(), but also Drive.Files.copy(). The advantage of use of Drive.Files.copy() is
Although Drive.Files.insert() has the size limitation of 5 MB, Drive.Files.copy() can use over the size of 5 MB.
In my envoronment, the process speed was faster than Drive.Files.insert().
For this method, I would like to propose the following 2 patterns.
Pattern 1 : Using Drive API v2
In this case, Drive API v2 of Advanced Google Services is used for converting files.
function myFunction() {
var sourceFolderId = "/* source folder id */";
var destinationFolderId = "/* dest folder id */";
var files = DriveApp.getFolderById(sourceFolderId).getFiles();
while (files.hasNext()) {
var res = Drive.Files.copy({parents: [{id: destinationFolderId}]}, files.next().getId(), {convert: true, ocr: true});
// Logger.log(res) // If you use this, please remove the comment.
}
}
Pattern 2 : Using Drive API v3
In this case, Drive API v3 is used for converting files. And here, I used the batch requests for this situation. Because the batch requests can use 100 API calls by one API call. By this, the issue of API quota can be removed.
function myFunction() {
var sourceFolderId = "/* source folder id */";
var destinationFolderId = "/* dest folder id */";
var files = DriveApp.getFolderById(sourceFolderId).getFiles();
var rBody = [];
while (files.hasNext()) {
rBody.push({
method: "POST",
endpoint: "https://www.googleapis.com/drive/v3/files/" + files.next().getId() + "/copy",
requestBody: {
mimeType: "application/vnd.google-apps.document",
parents: [destinationFolderId]
}
});
}
var cycle = 100; // Number of API calls at 1 batch request.
for (var i = 0; i < Math.ceil(rBody.length / cycle); i++) {
var offset = i * cycle;
var body = rBody.slice(offset, offset + cycle);
var boundary = "xxxxxxxxxx";
var contentId = 0;
var data = "--" + boundary + "\r\n";
body.forEach(function(e){
data += "Content-Type: application/http\r\n";
data += "Content-ID: " + ++contentId + "\r\n\r\n";
data += e.method + " " + e.endpoint + "\r\n";
data += e.requestBody ? "Content-Type: application/json; charset=utf-8\r\n\r\n" : "\r\n";
data += e.requestBody ? JSON.stringify(e.requestBody) + "\r\n" : "";
data += "--" + boundary + "\r\n";
});
var options = {
method: "post",
contentType: "multipart/mixed; boundary=" + boundary,
payload: Utilities.newBlob(data).getBytes(),
headers: {'Authorization': 'Bearer ' + ScriptApp.getOAuthToken()},
muteHttpExceptions: true,
};
var res = UrlFetchApp.fetch("https://www.googleapis.com/batch", options).getContentText();
// Logger.log(res); // If you use this, please remove the comment.
}
}
Note :
If the number of API calls at 1 batch request is large (the current value is 100), please modify var cycle = 100.
If Drive API v3 cannot be used for team drive, please tell me. I can convert it for Drive API v2.
If the team drive is the reason of issue for your situation, can you try this after it copied PDF files to your Google Drive?
Reference :
Batching Requests
If these are not useful for you, I'm sorry.
You can first of all fetch and store id of all files in a google sheet. Then you can proceed with processing each file normally by using it's id. Then after you have processed them mark that file as processed. And before processing a file check if that file is already processed.
If there are several files then you can also store the row number till where you have processed, next time continue after that.
Then at last create a trigger to execute your function every 10 minutes or so.
By this you can overcome execution time limit for single execution. API request quota and all will not be by-passed by this method.

Execution Timeout for Google Apps Script

I'm currently writing a script that copies a folder, and all of its sub-folders and files of a very large directory from one location in Google Drive to another. This is being used for archiving purposes for the company which I am employed by.
My issue is that the size and quantity of the file that I am trying to archive is WAY too large to handle in the 5 minute execution time given by Google. I am trying to keep this script as a standalone web app, however I am happy to extend on it as needed.
My second issue is that I need the script to run over and over again until the folders have been copied but once it is finished I need it to stop. I was originally going to use triggers, however time-based triggers are inappropriate for the task I am trying to fulfil.
In short, I need my script to run until the task is completed, automatically restarting itself, and avoiding execution time errors.
The full code is included below.
//Global Variables
var classroomFolderName = "Classroom";
var archiveFolderName = "Archive";
function doGet(){
var classroomFolder = DriveApp.getFoldersByName(classroomFolderName).next();
var archiveFolder = DriveApp.getFoldersByName(archiveFolderName).next();
start(archiveFolder);
}
function getFolder(folderName){
var foldercount = 0;
//Selects all folders named exactally as parameter is given
var folder = DriveApp.getFoldersByName(folderName);
while (folder.hasNext()) {
var folders = folder.next();
foldercount++;
}
//Throws errors if number of folders != 1
if (foldercount < 1){
throw 1;
}
else if (foldercount > 1){
throw 2;
}
else{
return folder;
}
}
function start(archiveFolder) {
var sourceFolder = classroomFolderName;
var targetFolder = "New Archive";
var source = DriveApp.getFoldersByName(sourceFolder);
var target = archiveFolder.createFolder(targetFolder);
if (source.hasNext()) {
copyFolder(source.next(), target);
}
}
function copyFolder(source, target) {
var folders = source.getFolders();
var files = source.getFiles();
while(files.hasNext()) {
var file = files.next();
file.makeCopy(file.getName(), target);
}
while(folders.hasNext()) {
var subFolder = folders.next();
var folderName = subFolder.getName();
var targetFolder = target.createFolder(folderName);
copyFolder(subFolder, targetFolder);
}
}
This is something I came across- you may find some value in it-
https://ctrlq.org/code/20016-maximum-execution-time-limit

Nodejs/Javascript Getting Process Memory of any process

I am looking for a way of getting the process memory of any process running.
I am doing a web application. I have a server (through Nodejs), my file app.js, and an agent sending information to app.js through the server.
I would like to find a way to get the process memory of any process (in order to then sending this information to the agent) ?
Do you have any idea how I can do this ? I have searched on google but I haven't found my answer :/
Thank you
PS : I need a windows compatible solution :)
Windows
For windows, use tasklist instead of ps
In the example below, i use the ps unix program, so it's not windows compatible.
Here, the %MEM is the 4st element of each finalProcess iterations.
On Windows the %MEM is the 5th element.
var myFunction = function(processList) {
// here, your code
};
var parseProcess = function(err, process, stderr) {
var process = (process.split("\n")),
finalProcess = [];
// 1st line is a tab descriptor
// if Windows, i should start to 2
for (var i = 1; i < process.length; i++) {
finalProcess.push(cleanArray(process[i].split(" ")));
}
console.log(finalProcess);
// callback to another function
myFunction(finalProcess);
};
var getProcessList = function() {
var exec = require('child_process').exec;
exec('ps aux', parseProcess.bind(this));
}
// thx http://stackoverflow.com/questions/281264/remove-empty-elements-from-an-array-in-javascript
function cleanArray(actual){
var newArray = new Array();
for(var i = 0; i<actual.length; i++){
if (actual[i]){
newArray.push(actual[i]);
}
}
return newArray;
}
getProcessList();

Categories