Apps Script, convert a Sheet range to Blob - javascript

Background:
I'm trying to upload an individual row of data from a Google Sheet and append it to a BigQuery table.
Method: I've been using https://developers.google.com/apps-script/advanced/bigquery to do this, but instead of a file of data as the example is, I am using my own sheet with data from a specific row:
var file = SpreadsheetApp.getActiveSpreadsheet();
var currentSheet = file.getSheetByName(name);
var lastRow = currentSheet.getLastRow()
var lastC = currentSheet.getLastColumn()
var rows = currentSheet.getRange(2,1,1,lastC).getValues();
"rows" is the row of data to be imported to BQ. I've tried a multitude of things, and according to another StackOverflow question, "rowsCSV" makes the 2D array of values CSV.
var rowsCSV = rows.join("\n");
var data = rowsCSV.getBlob().setContentType('application/octet-stream');
Problem: Every time I run the function, I get the error "Cannot find function getBlob in object Blob. " or, "Cannot convert Array to (class)[][]", or "Cannot find function getBlob in object Tue May 16 2017 00:00:00 GMT+0200 (CEST),58072.4,,,,,,,,,,,test ", where the last bit ("Tue May..") is the actual data of the row.
What am I doing wrong here?

There is no getBlob method for an array. You will have to use the Utilities.newBlob() to get your blob from a string. You can find the documentation on the same here
var rowsCSV = rows.join("\n");
var blob = Utilities.newBlob(rowsCSV, "text/csv")
Logger.log(blob.getDataAsString())
var data = blob.setContentType('application/octet-stream');
Equivalently you can do this
var rowsCSV = rows.join("\n");
var data = Utilities.newBlob(rowsCSV, 'application/octet-stream')

For anyone else viewing this, Jack Brown's answer is correct, you just need to change
var rows = currentSheet.getRange(2,1,1,lastC).getValues();
to
var rows = currentSheet.getRange(2,1,lastRow,lastC).getValues();

Based on the correction given by #JackBrown I have edited my code, however unable to push data to the Big Query. The below code create table, but don't push values.
/**
* Loads a CSV into BigQuery
*/
function loadCsv() {
// Replace this value with the project ID listed in the Google
// Cloud Platform project.
var projectId = 'master-ad-data';
// Create a dataset in the BigQuery UI (https://bigquery.cloud.google.com)
// and enter its ID below.
var datasetId = 'DataImportFromSpreadsheet';
// Sample CSV file of Google Trends data conforming to the schema below.
// https://docs.google.com/file/d/0BwzA1Orbvy5WMXFLaTR1Z1p2UDg/edit
var csvFileId = '17kYH6hP2RlsFeUmwM1v6WOgm2FKrwLTXWDhA2ZLISN8';
var name = 'Sheet1';
// Create the table.
var tableId = 'pets_' + new Date().getTime();
var table = {
tableReference: {
projectId: projectId,
datasetId: datasetId,
tableId: tableId
},
schema: {
fields: [
{name: 'CampaignLabels', type: 'STRING'},
{name: 'ImpressionShare', type: 'INTEGER'}
]
}
};
table = BigQuery.Tables.insert(table, projectId, datasetId);
Logger.log('Table created: %s', table.id);
var file = SpreadsheetApp.getActiveSpreadsheet();
var currentSheet = file.getSheetByName(name);
var lastRow = currentSheet.getLastRow()
var lastC = currentSheet.getLastColumn()
var rows = currentSheet.getRange(2,1,1,lastC).getValues();
var rowsCSV = rows.join("\n");
Logger.log("Check This"+" "+rowsCSV);
var data = Utilities.newBlob(rowsCSV, 'application/octet-stream')
// Create the data upload job.
var job = {
configuration: {
load: {
destinationTable: {
projectId: projectId,
datasetId: datasetId,
tableId: tableId
},
skipLeadingRows: 1
}
}
};
job = BigQuery.Jobs.insert(job, projectId, data);
Logger.log('Load job started. Check on the status of it here: ' +
'https://bigquery.cloud.google.com/jobs/%s', projectId);
}

Related

Filtering google sheets import data before import

I have a script that imports data from a google sheet. Before importing the data into the new sheet I would like to filter it. In Column2 are our Cost centers listed and all cost centers starting with '41' should be filtered and imported to the sheet. Right now I'm a little bit blocked in saying filtering by column2 where the string startswith '41'.
// Gets the active sheet.
var sheet = SpreadsheetApp.getActiveSheet();
// Gets a different spreadsheet from Drive using
// the spreadsheet's ID.
var employeeActuals = SpreadsheetApp.openById(
"1yL_0eB9b6CQLOshjPglDA-MnP2HZdLeIrKh4DO-qN0c"
);
// Gets the sheet, data range, and values of the
// spreadsheet stored in employeeActuals.
var employeeActualsSheet = employeeActuals.getSheetByName("Overview")
var range = employeeActualsSheet.getDataRange();
var rangeValues = range.getValues();
var databasis = rangeValues.filter(function(Item){return Item[1] === String().startsWith(["41"]) })
Logger.log(databasis); ```
Try something like this:
function myfunk() {
const sheet = SpreadsheetApp.getActiveSheet();
const ss = SpreadsheetApp.openById(gobj.globals.testsourceid);
const esh = ss.getSheetByName("Sheet1")
const range = esh.getDataRange();
const vs = range.getValues();
let data = vs.filter(e => e[0].startsWith('17'))
Logger.log(data);
}
Data:
COL1
COL2
COL3
10,1
10,2
10,3
11,1
11,2
11,3
12,1
12,2
12,3
13,1
13,2
13,3
14,1
14,2
14,3
15,1
15,2
15,3
16,1
16,2
16,3
17,1
17,2
17,3
18,1
18,2
18,3
19,1
19,2
19,3
20,1
20,2
20,3
output:
12:09:45 PM Notice Execution started
12:09:45 PM Info [[17,1, 17,2, 17,3]]
12:09:47 PM Notice Execution completed
The getValues() method returns a two dimensional string so in order to filter out the values, you will also need a loop in order to the filtering properly:
var filterVals = [];
for (let i = 0; i < rangeValues.length; i++) {
var values = rangeValues[i].filter(element => element.toString().startsWith('41'));
filterVals.push(values);
}
let databasis = filterVals.filter(element => element.toString() != "")
Logger.log(databasis);
Reference
Apps Script Range Class getValues().
You can filter data before importing. Gsheets allow doing that with a query formula or a filter formula.
The flow will be employeeActualsSheet -> filtered employeeActualsSheet -> import data.
Question - what is your data destination?

Creating a Function to Process an RSS Feed in Google Sheets

I am trying to create a function that I can import into Google Sheets to view the latest bills from this website. A problem that I am having is that when I create only one variable to be appended to the Google Sheet this code will work and append the first cell. But when I create multiple variables using the same logic, but for different parts of the xml file that this link brings you to, it gives me this error even when I create completely different variables for the original document and root: TypeError: Cannot read property 'getValue' of null. Would anyone be able to show me what I am doing wrong so that I can at least get it so that all of these items can be appended to the Google Sheet through solving for this error and show me a way to do a loop to get all these items?
function getData() {
//get the data from boardgamegeek
var url = 'https://legis.delaware.gov/rss/RssFeeds/IntroducedLegislation';
var xmlrss = UrlFetchApp.fetch(url).getContentText();
var document = XmlService.parse(xmlrss);
var root = document.getRootElement();
//Clear out existing content
var sheet = SpreadsheetApp.getActiveSheet();
var rangesAddressesList = ['A:F'];
sheet.getRangeList(rangesAddressesList).clearContent();
//set variables to data from rss feed
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheets()[0];
var title = root.getChild('channel').getChild('item').getChild('title').getText();
var session = root.getChild('channel').getChild('item').getChild('derss:legislativeSession').getText();
var link = root.getChild('channel').getChild('item').getChild('link').getText();
var category = root.getChild('channel').getChild('item').getChild('category').getText();
var description = root.getChild('channel').getChild('item').getChild('description').getText();
var pubDate = root.getChild('channel').getChild('item').getChild('pubDate').getText();
sheet.appendRow(["Session", "Title", "Category", "Pub Date", "Description", "Link"]);
sheet.appendRow([session, title, category, pubDate, description, link]);
}
I believe your goal as follows.
You want to retrieve the values of legislativeSession, title, category, pubDate, description, link in order using Google Apps Script.
You want to put the retrieved values to Google Spreadsheet.
Modification points:
In the case of derss:legislativeSession, derss is the name space. So in this case, it is required to use the name space.
When I saw your XML data, there are many item tags. But in your script, 1st item is trying to be retrieved.
When the values from all items are retrieved, when appendRow is used in a loop, the process cost will become high.
When above points are reflected to your script, it becomes as follows.
Modified script:
function getData() {
var url = 'https://legis.delaware.gov/rss/RssFeeds/IntroducedLegislation';
var xmlrss = UrlFetchApp.fetch(url).getContentText();
// Set the object for retrieving values in order.
var retrieveNames = {legislativeSession: "Session", title: "Title", category: "Category", pubDate: "PubDate", description: "Description", link: "Link"};
// Parse XML data.
var document = XmlService.parse(xmlrss);
var root = document.getRootElement();
// Retrieve itmes.
var item = root.getChild('channel').getChildren("item");
// Retrieve name space of "derss".
var derssNs = root.getChild('channel').getNamespace("derss");
// By retrieving values from each item, create an array for putting values to Spreadsheet.
var values = item.reduce((ar, e) => ar.concat(
[Object.keys(retrieveNames).map(k => e.getChild(...(k == "legislativeSession" ? [k, derssNs] : [k])).getText())]
), [Object.values(retrieveNames)]);
// Put the created array to Spreadsheet.
var sheet = SpreadsheetApp.getActiveSheet();
var rangesAddressesList = ['A:F'];
sheet.getRangeList(rangesAddressesList).clearContent();
sheet.getRange(1, 1, values.length, values[0].length).setValues(values);
}
In this modified script, it supposes that the active sheet is the 1st sheet. If your actual situation is different, please modify above script.
References:
XML Service
reduce()

Error: Data between close double quote (") and field separator

I'm trying to use Google Apps Script to take a CSV from Google Drive and put it into Big Query. When I upload, I get this error:
"Error while reading data, error message: Error detected while parsing row starting at position: 560550. Error: Data between close double quote (") and field separator."
I've tried looking at that byte position of the file and its way outside the bounds of the CSV (it only goes to ~501500 bytes).
Here's a link to the CSV that I'm using which is a scrape of a website: https://drive.google.com/file/d/1k3cGlTSA_zPQCtUkt20vn6XKiLPJ7mFB/view?usp=sharing
Here's my relevant code:
function csvToBigQuery(exportFolder, csvName, bqDatasetId){
try{
//get most recent export from Screaming Frog
var mostRecentFolder = [];
while(exportFolder.hasNext()){
var folder = exportFolder.next();
var lastUpdated = folder.getLastUpdated();
if(mostRecentFolder.length == 0)
mostRecentFolder = [folder.getLastUpdated(),folder.getId()];
else if(lastUpdated > mostRecentFolder[0])
mostRecentFolder = [lastUpdated, folder.getId()];
}
var folderId = mostRecentFolder[1];
var file = DriveApp.getFolderById(folderId).getFilesByName(csvName + '.csv').next();
if(!file)
throw "File doesn't exist";
//get csv and add date column.
//getBlob().getDataAsString().replace(/(["'])(?:(?=(\\?))\2[\s\S])*?\1/g, function(e){return e.replace(/\r?\n|\r/g, ' ')})
var rows = Utilities.parseCsv(file.getBlob().getDataAsString());
Logger.log(rows);
var numColumns = rows[0].length;
rows.forEach(function(row){
row[numColumns] = date;
});
rows[0][numColumns] = 'Date';
let csvRows = rows.map(values =>values.map(value => JSON.stringify(value).replace(/\\"/g, '""')));
let csvData = csvRows.map(values => values.join(',')).join('\n');
//log(csvData)
var blob = Utilities.newBlob(csvData, 'application/octet-stream');
//create job for inserting to BQ.
var loadJob = {
configuration: {
load: {
destinationTable: {
projectId: bqProjectId,
datasetId: bqDatasetId,
tableId: csvName
},
autodetect: true, // Infer schema from contents.
writeDisposition: 'WRITE_APPEND',
}
}
};
//append to table in BQ.
BigQuery.Jobs.insert(loadJob, bqProjectId, blob);
}catch(e){
Logger.log(e);
}
}
Modification points:
From your error message, I thought that there might be the parts which are not enclosed by the double quota. So, I searched When I saw your CSV data and your CSV data is replaced \"(|.+?)\" with "" using the following script, it was found that the row 711 has the value.
function sample() {
var id = "###"; // File ID of your CSV file.
// This is your script.
var file = DriveApp.getFileById(id);
var rows = Utilities.parseCsv(file.getBlob().getDataAsString());
var numColumns = rows[0].length;
var date = "sample";
rows.forEach(function(row){
row[numColumns] = date;
});
rows[0][numColumns] = 'Date';
let csvRows = rows.map(values =>values.map(value => JSON.stringify(value).replace(/\\"/g, '""')));
let csvData = csvRows.map(values => values.join(',')).join('\n');
// I added below script for checking your CSV data.
var res = csvData.replace(/\"(|.+?)\"/g, "");
DriveApp.createFile("sample.txt", res);
}
The row 711 is as follows.
"https://supergoop.com/products/lip-shield-trio/?utm_source=Gorgias&utm_medium=CustomerCare&utm_campaign=crosssellhello\","text/html; charset=utf-8","200","OK","Non-Indexable","Canonicalised","Lip Shield Trio - Restores, Protects + Water-resistant – Supergoop!","67","595","Moisturizing lip protection made from antioxidant-rich coconut, avocado, and grape seed oil.","92","576","","0","Lip Shield Trio","15","Lip Shield Trio","15","Why We Love It","14","Ingredients","11","","","","https://supergoop.com/products/lip-shield-trio","","","","","451488","754","1.686","5","","12","4","0.590","205","80","8","5","","","","","f6d1476960d22b1c5964581e161bdd49","0.064","","","","","HTTP/1.1","https://supergoop.com/products/lip-shield-trio/?utm_source=Gorgias&utm_medium=CustomerCare&utm_campaign=crosssellhello%5C"
From this value, I found that \" is used at "https://supergoop.com/products/lip-shield-trio/?utm_source=Gorgias&utm_medium=CustomerCare&utm_campaign=crosssellhello\". I thought that the reason of your issue might be due to this.
So in order to avoid this issue, how about the following modification?
Modified script:
From:
let csvRows = rows.map(values =>values.map(value => JSON.stringify(value).replace(/\\"/g, '""')));
To:
let csvRows = rows.map(values =>values.map(value => JSON.stringify(value).replace(/\\"/g, '""').replace(/\\"/g, '')));
or
From:
var rows = Utilities.parseCsv(file.getBlob().getDataAsString());
To:
var rows = Utilities.parseCsv(file.getBlob().getDataAsString().replace(/\\/g, ''));
By this modification, I could confirm that the file size was reduced with 2 bytes between your script and the modified script. And also, when above check script is used for the CSV data using the modified script, I could confirm that all rows have no values.

Appending a new row with only formulas using Sheets API v4

I'm pretty new to Sheets API and get a lot of bugs.
I would like to append a new row to sheet based on last row. This would include copying the format and pasting formulas with an autofill but not the values.
Here what I've came up using app script.
I'm sure I'm not using the best way so for the moment I've
retrieved formulas from range SUCCESS
tried using autoFillRequest to populate next row with 10 columns(just a try). FAILED
I've put in comment the getFormulas-like request and show you what I have for the moment with the autoFill request.
I get the following error:
Invalid JSON payload received. Unknown name "source_and_destination" at 'requests[0]': Cannot find field.
function insertNewRow(){
var ssId = "my_spreadsheet_id"
/*var params = {
ranges: ['Feuille1!21:21'],
valueRenderOption: 'FORMULA'
};
var values = Sheets.Spreadsheets.Values.batchGet(ssId, params);
var valueRange = Sheets.newValueRange();
valueRange.majorDimension = "ROWS";
valueRange.values = values.valueRanges[0].values;
Logger.log(values.valueRanges[0].values[0].length);
valueRange.range= 'Feuille1!22:22'
//var req = Sheets.Spreadsheets.Values.update(valueRange , ssId, 'Feuille1!22:22', {
// valueInputOption: 'USER_ENTERED'
//})*/
var AFreq = Sheets.newAutoFillRequest();
AFreq.range = Sheets.newGridRange();
AFreq.range.startRowIndex = 1;
AFreq.range.startColumnIndex = 0;
AFreq.range.endRowIndex = 2;
AFreq.range.endColumnIndex = 10;
AFreq.range.sheetId = 0;
AFreq.sourceAndDestination = Sheets.newSourceAndDestination();
AFreq.sourceAndDestination.dimension = "ROWS";
AFreq.sourceAndDestination.fillLength = 10;
AFreq.sourceAndDestination.source = Sheets.newGridRange();
AFreq.sourceAndDestination.source.startRowIndex = 0;
AFreq.sourceAndDestination.source.startColumnIndex = 0;
AFreq.sourceAndDestination.source.endColumnIndex = 10
AFreq.sourceAndDestination.source.endRowIndex = 1;
AFreq.sourceAndDestination.source.sheetId = 0;
var req = Sheets.newBatchUpdateSpreadsheetRequest();
req.requests = [AFreq];
Sheets.Spreadsheets.batchUpdate(req, ssId);
}
Tell me if I'm wrong but I though about separating the tasks into multiple requests
grab the formulas
insert new row
copy/paste preceding fromat to new row
pasting formulas
Am I going in the right direction?
Any help is greatly appreciated.
Issues:
Request object is missing in Request body.
AutoFillRequest has two union area fields, whereas exactly one is acceptable.
Empty range selection in GridRange.
Solution:
Fix syntax errors mentioned above
Used plain JSON request body to easily identify such errors
Sample Script:
function autoFill() {
var ssId = 'my_spreadsheet_id';
var req = {//request body
requests: [ //requests array
{//request Object
autoFill: { //autoFill request
//range OR sourceAndDestination;
//equal to selecting Sheet1!A1:J10 and clicking autoFill from menu
range: {//GridRange
sheetId: 0,
startRowIndex: 0,
endRowIndex: 10, //end exclusive
startColumnIndex: 0,
endColumnIndex: 10,
},
},
},
],
};
Sheets.Spreadsheets.batchUpdate(req, ssId);
}
References:
RequestBody
AutoFillRequest
GridRange

Read Yahoo or Google Finance API data into javascript object for use in google visualization chart

I am quite novice at javascript and web programming in general.
I'm trying to make a basic chart of stock prices using the Google Visualization Chart API.
My data is historical market data from either yahoo finance or google finance.
How do I read this finance data into a javascript object?
For example, using python I can read this data into a Dictionary or DataFrame object to store it before generating a chart from that python object. I want to do this with JS / Jquery / etc..
Here is a basic example of the code im trying to make work where im reading in a JSON response in this case from Google Finance for 1 day of stock price info:
function drawVisualization() {
// How to get the stock data into my DataTable.?
var dataTable = google.visualization.arrayToDataTable([
// [ { "id": "694653" ,"t" : "GOOG" ,"e" : "NASDAQ" ,"l" : "1,133.16" ,"l_fix" : "1133.16" ,"l_cur" : "1,133.16" ,"s": "0" ,"ltt":"1:11PM EST" ,"lt" : "Jan 7, 1:11PM EST" ,"c" : "+15.84" ,"c_fix" : "15.84" ,"cp" : "1.42" ,"cp_fix" : "1.42" ,"ccol" : "chg" } ]
], true);
// Draw the chart.
var chart = new google.visualization.CandlestickChart(document.getElementById('visualization'));
chart.draw(dataTable, {legend:'none', width:600, height:400});
}
​
So my question is how do I store the stock data in JavaScript, then how do I pass that data into google.visualization.arrayToDataTable() ?
Do i pass the URL directly? Or store the JSON object the URL returns in a variable then pass that into google.visualization.arrayToDataTable() ?
Assuming you have already input the data into javascript, you need to parse it into an appropriate format for the Visualization API DataTable (as the format implied by your code is not compatible). You can parse it like this:
var financeData = /* finance data, assumed in this code to be an array of objects */;
var parsedData = [];
// create row of column headers
parsedData.push(['ID', 'ticker', 'exchange' /* ... filling out all columns you want to use */]);
// parse financeData
for (var i = 0; i < financeData.length; i++) {
parsedData.push([financeData[i].id, financeData[i].t, financeData[i].e /* ... filling out all columns you want to use */]);
}
var dataTable = google.visualization.arrayToDataTable(parsedData);
[edit - example code for building a DataTable from Yahoo Finance data source using jQuery AJAX]
function drawTable () {
// use jQuery to make an AJAX request for data
$.ajax({
url: 'http://finance.yahoo.com/webservice/v1/symbols/allcurrencies/quote?format=json',
dataType: 'jsonp',
success: function (json) {
var data = new google.visualization.DataTable();
data.addColumn('string', 'Name');
data.addColumn('string', 'Symbol');
data.addColumn('number', 'Price');
data.addColumn('date', 'UTC Time');
// parse the JSON into the DataTable
for (var i = 0; i < json.list.resources.length; i++) {
var name = json.list.resources[i].resource.fields.name;
var symbol = json.list.resources[i].resource.fields.symbol;
var price = parseFloat(json.list.resources[i].resource.fields.price);
var dateTimeArr = json.list.resources[i].resource.fields.utctime.split('T');
var dateArr = dateTimeArr[0].split('-');
var year = dateArr[0];
var month = dateArr[1] - 1; // subtract 1 to make compatible with javascript dates
var day = dateArr[2];
var timeArr = dateTimeArr[1].split(/[:\+]/);
var hour = timeArr[0];
var minute = timeArr[1];
var second = timeArr[2];
data.addRow([name, symbol, price, new Date(year, month, day, hour, minute, second)]);
}
var table = new google.visualization.Table(document.querySelector('#table_div'));
table.draw(data);
}
});
}
google.load('visualization', '1', {packages:['table'], callback: drawTable});
see it working here: http://jsfiddle.net/asgallant/dSWe7/

Categories