Related
I have been stumped on this for a while. I am fairly new to Google script app and wanted to see if there is a way to make this happen. So far, I've used a few methods within Google Sheet but seem to not get it working.
The code below does give me an output of all the data, however, the data that is nested in the data.custom_fields[x] has multiple objects that is separated by ",". I would like to be able to filter out the other key words and just use whatever is inside "display_value=". The display_value= is not always in the same area so have to run a search for them.
I am assuming some kind of If statement would be used here..
An example of the object is:
{type=x, resource_subtype=x, created_by={name=x, gid=x, resource_type=x}, display_value=Cool Value, description=x, enabled=x, resource_type=custom_field, gid=x, enum_options=[x.lang.Object;x, enum_value={x}, name=x}
I've tried to split function as well but not sure how to filter out the words I need.
function Users() {
var options = {
"headers" : {
"Authorization": "API Key here"
}
}
var response = UrlFetchApp.fetch("URL here", options);
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheets = ss.getSheets();
var sheet = ss.getSheetByName("Tab Name here"); // specific sheet name getSheetByName(""); alternatively use ss.getActiveSheet()
var dataAll = JSON.parse(response.getContentText()); //
var dataSet = dataAll.data; // "data" is the key containing the relevant objects
var rows = [],
data;
for (i = 0; i < dataSet.length; i++) {
data = dataSet[i];
rows.push([
data.gid,
data.name,
data.permalink_url,
data.due_on,
data.custom_fields[1],
data.custom_fields[2],
data.custom_fields[4],
data.custom_fields[5],
data.custom_fields[6],
data.custom_fields[7],
data.custom_fields[8],
data.custom_fields[9],
]); //your JSON entities here
}
// [row to start on], [column to start on], [number of rows], [number of entities]
dataRange = sheet.getRange(2, 1, rows.length, 12);
dataRange.setValues(rows);
Thank you in advance!
Example Image of JSON imported data
Although they appear separated by ,'s, that is only how they're displayed in the log. Because you're using JSON.parse, you're receiving/converting to an Object, not a string.
Because data.custom_fields is an array of objects, you can access the property/key values as : data.custom_fields[x].display_value.
Learn More:
JSON.parse()
Accessing Object Properties
If you want to extract display_value, try
let myVal = myData.match(/(?<=display_value=)[^,]+/g)[0]
I guess that myData could be data.custom_fields[5], so replace it by
data.custom_fields[5].match(/(?<=display_value=)[^,]+/g)[0]
I have a script that should create a pdf file from a google form submission and grabs the data to be changed as an object. However I am using the replaceText action to make the changes to the doc and I'm getting the following error.
Exception: Invalid argument: replacement
at Create_PDF(Code:37:8)
at After_Submit(Code:13:19)
It is supposed to change the values in the generated doc file and it worked when I used the namedValues function. However now that I'm using range instead it doesn't seem to work.
function After_Submit(e){
var range = e.range;
var row = range.getRow(); //get the row of newly added form data
var sheet = range.getSheet(); //get the Sheet
var headers = sheet.getRange(1, 1, 1, 129).getValues().flat(); //get the header names from A-O
var data = sheet.getRange(row, 1, 1, headers.length).getValues(); //get the values of newly added form data + formulated values
var values = {}; // create an object
for( var i = 0; i < headers.length; i++ ){
values[headers[i]] = data[0][i]; //add elements to values object and use headers as key
}
Logger.log(values);
const pdfFile = Create_PDF(values);
sendEmail(e.namedValues['Email Address to Receive File '][0],pdfFile);
}
function sendEmail(email,pdfFile){
GmailApp.sendEmail(email, "Subject", "Files Attached", {
attachments: [pdfFile],
name: "From Email"
});
}
function Create_PDF(values) {
const PDF_folder = DriveApp.getFolderById("ID_1");
const TEMP_FOLDER = DriveApp.getFolderById("ID_2");
const PDF_Template = DriveApp.getFileById('ID_3');
const newTempFile = PDF_Template.makeCopy(TEMP_FOLDER);
const OpenDoc = DocumentApp.openById(newTempFile.getId());
const body = OpenDoc.getBody();
console.log(body);
body.replaceText("{{Timestamp}}", values['Timestamp'][0]);
body.replaceText("{{Location}}", values['Location'][0]);
body.replaceText("{{Item1}}", values['Item1'][0]);
body.replaceText("{{Item2}}", values['Item2'][0]);
body.replaceText("{{Itme3}}", values['Item3'][0]);
body.replaceText("{{e1}}", values['e1'][0]);
body.replaceText("{{e2}}", values['e2'][0]);
body.replaceText("{{e3}}", values['e3'][0]);
body.replaceText("{{e4}}", values['e4'][0]);
body.replaceText("{{e5}}", values['e5'][0]);
body.replaceText("{{e6}}", values['e6'][0]);
body.replaceText("{{e7}}", values['e7'][0]);
body.replaceText("{{e8}}", values['e8'][0]);
body.replaceText("{{e9}}", values['e9'][0]);
body.replaceText("{{e10}}", values['e10'][0]);
body.replaceText("{{e11}}", values['e11'][0]);
body.replaceText("{{e12}}", values['e12'][0]);
body.replaceText("{{e13}}", values['e13'][0]);
body.replaceText("{{e14}}", values['e14'][0]);
body.replaceText("{{e15}}", values['e15'][0]);
body.replaceText("{{e16}}", values['e16'][0]);
body.replaceText("{{e17}}", values['e17'][0]);
body.replaceText("{{e18}}", values['e18'][0]);
body.replaceText("{{e19}}", values['e19'][0]);
body.replaceText("{{e20}}", values['e20'][0]);
body.replaceText("{{e21}}", values['e21'][0]);
body.replaceText("{{e22}}", values['e22'][0]);
body.replaceText("{{e23}}", values['e23'][0]);
body.replaceText("{{e24}}", values['e24'][0]);
body.replaceText("{{e25}}", values['e25'][0]);
body.replaceText("{{e26}}", values['e26'][0]);
body.replaceText("{{e27}}", values['e27'][0]);
body.replaceText("{{e28}}", values['e28'][0]);
body.replaceText("{{e29}}", values['e29'][0]);
body.replaceText("{{e30}}", values['e30'][0]);
body.replaceText("{{e31}}", values['e31'][0]);
body.replaceText("{{e32}}", values['e32'][0]);
body.replaceText("{{e33}}", values['e33'][0]);
body.replaceText("{{e34}}", values['e34'][0]);
body.replaceText("{{e35}}", values['e35'][0]);
body.replaceText("{{e36}}", values['e36'][0]);
body.replaceText("{{e37}}", values['e37'][0]);
body.replaceText("{{e38}}", values['e38'][0]);
body.replaceText("{{e39}}", values['e39'][0]);
body.replaceText("{{H1}}", values['H1'][0]);
body.replaceText("{{H2}}", values['H2'][0]);
body.replaceText("{{H3}}", values['H3'][0]);
body.replaceText("{{H4}}", values['H4'][0]);
body.replaceText("{{H5}}", values['H5'][0]);
body.replaceText("{{H6}}", values['H6'][0]);
body.replaceText("{{H7}}", values['H7'][0]);
body.replaceText("{{H8}}", values['H8'][0]);
body.replaceText("{{H9}}", values['H9'][0]);
body.replaceText("{{H10}}", values['H10'][0]);
body.replaceText("{{H11}}", values['H11'][0]);
body.replaceText("{{H12}}", values['H12'][0]);
body.replaceText("{{H13}}", values['H13'][0]);
body.replaceText("{{H14}}", values['H14'][0]);
OpenDoc.saveAndClose();
const BLOBPDF = newTempFile.getAs(MimeType.PDF);
const pdfFile = PDF_folder.createFile(BLOBPDF).setName("FLHA");
console.log("The file has been created ");
return pdfFile;
}
Your question was how to loop through an object and replace text
This creates an object from Sheet0:
Sheet0:
one
pattern
two
this is the pattern
three
pattern pattern
four
nothing
five
nothing
Code:
function replacepattern() {
const ss = SpreadsheetApp.getActive();
const sh = ss.getSheetByName('Sheet0');
const vs = sh.getRange(1,1,sh.getLastRow(), 2).getValues();
//creating object from spreadsheet
let obj = {pA:[]};
vs.forEach(r =>{
obj[r[0]]=r[1];
obj.pA.push(r[0]);
});
Logger.log(JSON.stringify(obj));
let oA = obj.pA.map(p => [obj[p].replace(/pattern/g,'replacement')]);//doing the replacement in an object
sh.getRange(1,sh.getLastColumn() + 1,oA.length, oA[0].length).setValues(oA);//outputting the replaced string in the next column
Logger.log(JSON.stringify(oA));
}
Sheet0 after running once:
one
pattern
replacement
two
this is the pattern
this is the replacement
three
pattern pattern
replacement replacement
four
nothing
nothing
five
nothing
nothing
This is related to my answer here.
The error code Exception: Invalid argument: replacement at Create_PDF(Code:37:8) at After_Submit(Code:13:19) is caused by the null value of values['Timestamp'][0]. If you try to print the data type of values['Timestamp'], it will return a type object, since that object does not have value for index 0 to it will return a null value.
For entries that are type String, if you add [0] to it, it will return only the first element of the string. Example you have "Test" string, adding [0] to it will return "T"
To fix that, just remove the [0] in all of body.replaceText(..., values['...'][0]) entries.
OR
Loop through values object by replacing the body.replaceText entries in your code with this:
for (const key in values) {
body.replaceText("{{"+key+"}}", values[key]);
}
Example usage:
Form inputs:
Output:
Reference:
JavaScript for..in
I'm trying to use Google Apps Script to take a CSV from Google Drive and put it into Big Query. When I upload, I get this error:
"Error while reading data, error message: Error detected while parsing row starting at position: 560550. Error: Data between close double quote (") and field separator."
I've tried looking at that byte position of the file and its way outside the bounds of the CSV (it only goes to ~501500 bytes).
Here's a link to the CSV that I'm using which is a scrape of a website: https://drive.google.com/file/d/1k3cGlTSA_zPQCtUkt20vn6XKiLPJ7mFB/view?usp=sharing
Here's my relevant code:
function csvToBigQuery(exportFolder, csvName, bqDatasetId){
try{
//get most recent export from Screaming Frog
var mostRecentFolder = [];
while(exportFolder.hasNext()){
var folder = exportFolder.next();
var lastUpdated = folder.getLastUpdated();
if(mostRecentFolder.length == 0)
mostRecentFolder = [folder.getLastUpdated(),folder.getId()];
else if(lastUpdated > mostRecentFolder[0])
mostRecentFolder = [lastUpdated, folder.getId()];
}
var folderId = mostRecentFolder[1];
var file = DriveApp.getFolderById(folderId).getFilesByName(csvName + '.csv').next();
if(!file)
throw "File doesn't exist";
//get csv and add date column.
//getBlob().getDataAsString().replace(/(["'])(?:(?=(\\?))\2[\s\S])*?\1/g, function(e){return e.replace(/\r?\n|\r/g, ' ')})
var rows = Utilities.parseCsv(file.getBlob().getDataAsString());
Logger.log(rows);
var numColumns = rows[0].length;
rows.forEach(function(row){
row[numColumns] = date;
});
rows[0][numColumns] = 'Date';
let csvRows = rows.map(values =>values.map(value => JSON.stringify(value).replace(/\\"/g, '""')));
let csvData = csvRows.map(values => values.join(',')).join('\n');
//log(csvData)
var blob = Utilities.newBlob(csvData, 'application/octet-stream');
//create job for inserting to BQ.
var loadJob = {
configuration: {
load: {
destinationTable: {
projectId: bqProjectId,
datasetId: bqDatasetId,
tableId: csvName
},
autodetect: true, // Infer schema from contents.
writeDisposition: 'WRITE_APPEND',
}
}
};
//append to table in BQ.
BigQuery.Jobs.insert(loadJob, bqProjectId, blob);
}catch(e){
Logger.log(e);
}
}
Modification points:
From your error message, I thought that there might be the parts which are not enclosed by the double quota. So, I searched When I saw your CSV data and your CSV data is replaced \"(|.+?)\" with "" using the following script, it was found that the row 711 has the value.
function sample() {
var id = "###"; // File ID of your CSV file.
// This is your script.
var file = DriveApp.getFileById(id);
var rows = Utilities.parseCsv(file.getBlob().getDataAsString());
var numColumns = rows[0].length;
var date = "sample";
rows.forEach(function(row){
row[numColumns] = date;
});
rows[0][numColumns] = 'Date';
let csvRows = rows.map(values =>values.map(value => JSON.stringify(value).replace(/\\"/g, '""')));
let csvData = csvRows.map(values => values.join(',')).join('\n');
// I added below script for checking your CSV data.
var res = csvData.replace(/\"(|.+?)\"/g, "");
DriveApp.createFile("sample.txt", res);
}
The row 711 is as follows.
"https://supergoop.com/products/lip-shield-trio/?utm_source=Gorgias&utm_medium=CustomerCare&utm_campaign=crosssellhello\","text/html; charset=utf-8","200","OK","Non-Indexable","Canonicalised","Lip Shield Trio - Restores, Protects + Water-resistant – Supergoop!","67","595","Moisturizing lip protection made from antioxidant-rich coconut, avocado, and grape seed oil.","92","576","","0","Lip Shield Trio","15","Lip Shield Trio","15","Why We Love It","14","Ingredients","11","","","","https://supergoop.com/products/lip-shield-trio","","","","","451488","754","1.686","5","","12","4","0.590","205","80","8","5","","","","","f6d1476960d22b1c5964581e161bdd49","0.064","","","","","HTTP/1.1","https://supergoop.com/products/lip-shield-trio/?utm_source=Gorgias&utm_medium=CustomerCare&utm_campaign=crosssellhello%5C"
From this value, I found that \" is used at "https://supergoop.com/products/lip-shield-trio/?utm_source=Gorgias&utm_medium=CustomerCare&utm_campaign=crosssellhello\". I thought that the reason of your issue might be due to this.
So in order to avoid this issue, how about the following modification?
Modified script:
From:
let csvRows = rows.map(values =>values.map(value => JSON.stringify(value).replace(/\\"/g, '""')));
To:
let csvRows = rows.map(values =>values.map(value => JSON.stringify(value).replace(/\\"/g, '""').replace(/\\"/g, '')));
or
From:
var rows = Utilities.parseCsv(file.getBlob().getDataAsString());
To:
var rows = Utilities.parseCsv(file.getBlob().getDataAsString().replace(/\\/g, ''));
By this modification, I could confirm that the file size was reduced with 2 bytes between your script and the modified script. And also, when above check script is used for the CSV data using the modified script, I could confirm that all rows have no values.
I am learning how to code so sorry if this is too basic, but I am getting troubles here:
I've been trying to invoke the Google Natural Language API, to give me information about information on 210 rows of my Google Spreadsheet (the whole table has 211 rows). I would like to save the results on 1 Json File.
I am trying to run a loop with the code below, but I am getting the Json file only with the information corresponding to the 1st row. Tried as well to put the "Driveapp.createFile line of code" inside of the loop function, but then I have many Json files, each one with the information corresponding to one row. And what I would like is 1 Json file, with the corresponding information of the 210 rows.
I would appreciate your help, please.
function analyzeText() {
var client = "Spreadsheet_ID";
var query = SpreadsheetApp.openById(client).getSheetByName("1. Template");
var result = SpreadsheetApp.openById(client).getSheetByName("Teste - Natural Language API");
var lrow = query.getLastRow();
for(var i=2; i<=211;i++)
{
var text = query.getRange(i,211).getValue()
var requestUrl = [
'https://language.googleapis.com/v1beta2/documents:analyzeEntities?key=',
'API_KEY_XXXXXXXXXXXXXXXXXXX'
].join("");
var data = {
"document": {
"language": "en-us",
"type": "PLAIN_TEXT",
"content": text
},
"encodingType": "UTF8"
};
var options = {
method : "POST",
contentType: "application/json",
payload : JSON.stringify(data)
};
var response = UrlFetchApp.fetch(requestUrl, options);
var data = JSON.parse(response);
}
DriveApp.createFile('response3.json', response, MimeType.PLAIN_TEXT);
}
I would suggest you instead of the approach you are taking (using a for loop and the method getValue(), which it's a slow method to call in a loop), consider this one I am giving you with this code:
function analyzeText() {
var clientId = "your-sheet-id";
var ss = SpreadsheetApp.openById(clientId);
var templateSheet = ss.getSheetByName("1. Template");
// .getRange(row, column, numRows) -> From the first row and col, take the next 4 rows
// Modify these arguments depending in where you want to start and how many rows you want
var data = templateSheet.getRange(1, 1, 4).getValues();
// You will get an array 2D, using join you will able to get an string from
// all the elements in that array
var text = data.join();
var requestUrl = [
'https://language.googleapis.com/v1beta2/documents:analyzeEntities?key=',
'API_KEY_XXXXXXXXXXXXXXXXXXX'
].join("");
// Now text will have all your cell values and you only need to do one request
var data = {
"document": {
"language": "en-us",
"type": "PLAIN_TEXT",
"content": text
},
"encodingType": "UTF8"
};
var options = {
method : "POST",
contentType: "application/json",
payload : JSON.stringify(data)
};
var response = UrlFetchApp.fetch(requestUrl, options);
var data = JSON.parse(response);
DriveApp.createFile('response3.json', response, MimeType.PLAIN_TEXT);
}
In this way, you only need to make one request and it will be faster than running 211 times your loop. I would also recommend you to check:
Apps Script Quotas: Running your code as you have it, it would give you more chances of hitting these quotas.
Best Practices: You can check more about the best practices so you can have a better idea about why I was telling you to avoid the getValue() method in a loop.
Background:
I'm trying to upload an individual row of data from a Google Sheet and append it to a BigQuery table.
Method: I've been using https://developers.google.com/apps-script/advanced/bigquery to do this, but instead of a file of data as the example is, I am using my own sheet with data from a specific row:
var file = SpreadsheetApp.getActiveSpreadsheet();
var currentSheet = file.getSheetByName(name);
var lastRow = currentSheet.getLastRow()
var lastC = currentSheet.getLastColumn()
var rows = currentSheet.getRange(2,1,1,lastC).getValues();
"rows" is the row of data to be imported to BQ. I've tried a multitude of things, and according to another StackOverflow question, "rowsCSV" makes the 2D array of values CSV.
var rowsCSV = rows.join("\n");
var data = rowsCSV.getBlob().setContentType('application/octet-stream');
Problem: Every time I run the function, I get the error "Cannot find function getBlob in object Blob. " or, "Cannot convert Array to (class)[][]", or "Cannot find function getBlob in object Tue May 16 2017 00:00:00 GMT+0200 (CEST),58072.4,,,,,,,,,,,test ", where the last bit ("Tue May..") is the actual data of the row.
What am I doing wrong here?
There is no getBlob method for an array. You will have to use the Utilities.newBlob() to get your blob from a string. You can find the documentation on the same here
var rowsCSV = rows.join("\n");
var blob = Utilities.newBlob(rowsCSV, "text/csv")
Logger.log(blob.getDataAsString())
var data = blob.setContentType('application/octet-stream');
Equivalently you can do this
var rowsCSV = rows.join("\n");
var data = Utilities.newBlob(rowsCSV, 'application/octet-stream')
For anyone else viewing this, Jack Brown's answer is correct, you just need to change
var rows = currentSheet.getRange(2,1,1,lastC).getValues();
to
var rows = currentSheet.getRange(2,1,lastRow,lastC).getValues();
Based on the correction given by #JackBrown I have edited my code, however unable to push data to the Big Query. The below code create table, but don't push values.
/**
* Loads a CSV into BigQuery
*/
function loadCsv() {
// Replace this value with the project ID listed in the Google
// Cloud Platform project.
var projectId = 'master-ad-data';
// Create a dataset in the BigQuery UI (https://bigquery.cloud.google.com)
// and enter its ID below.
var datasetId = 'DataImportFromSpreadsheet';
// Sample CSV file of Google Trends data conforming to the schema below.
// https://docs.google.com/file/d/0BwzA1Orbvy5WMXFLaTR1Z1p2UDg/edit
var csvFileId = '17kYH6hP2RlsFeUmwM1v6WOgm2FKrwLTXWDhA2ZLISN8';
var name = 'Sheet1';
// Create the table.
var tableId = 'pets_' + new Date().getTime();
var table = {
tableReference: {
projectId: projectId,
datasetId: datasetId,
tableId: tableId
},
schema: {
fields: [
{name: 'CampaignLabels', type: 'STRING'},
{name: 'ImpressionShare', type: 'INTEGER'}
]
}
};
table = BigQuery.Tables.insert(table, projectId, datasetId);
Logger.log('Table created: %s', table.id);
var file = SpreadsheetApp.getActiveSpreadsheet();
var currentSheet = file.getSheetByName(name);
var lastRow = currentSheet.getLastRow()
var lastC = currentSheet.getLastColumn()
var rows = currentSheet.getRange(2,1,1,lastC).getValues();
var rowsCSV = rows.join("\n");
Logger.log("Check This"+" "+rowsCSV);
var data = Utilities.newBlob(rowsCSV, 'application/octet-stream')
// Create the data upload job.
var job = {
configuration: {
load: {
destinationTable: {
projectId: projectId,
datasetId: datasetId,
tableId: tableId
},
skipLeadingRows: 1
}
}
};
job = BigQuery.Jobs.insert(job, projectId, data);
Logger.log('Load job started. Check on the status of it here: ' +
'https://bigquery.cloud.google.com/jobs/%s', projectId);
}