Google earth engine real time data - javascript

i am new to google earth engine.
I have a program for Atmosphere Monitoring.
I have managed to pull data for historical data. However , i can't pull recent data e.g. (a couple of days, today , yesterday etc.) , i get ' memory limit exceeded.' or (Collection.toList: The value of 'count' must be positive. Got: 0.)
As it says 'Near-Real-Time' that mean that the data must update many times a day? My final goal is to be able to pull data in (near) real time so that I can this data through some other program visualize it. Is that possible?
Any advice will be helpful!
Thanks in advance!
var pt = ee.Geometry.Point(23.72801716860135,37.984115295446216);
Map.centerObject(pt,14);
Map.addLayer(pt);
var starDate = '2023-02-09';
var endDate = '2023-02-10';
var data = ee.ImageCollection('ECMWF/CAMS/NRT').select('total_aerosol_optical_depth_at_550nm_surface')
.filterDate(starDate, endDate);
print(data);
var allDates = ee.List(data.aggregate_array('system:time_start'));
var allDatesSimple = allDates.map(function(date){
return ee.Date(date).format().slice(0,10);
});
//print(allDatesSimple);
var getTemp = function(image) {
// Reducing region and getting value
var value_tmmx = ee.Image(image)
.reduceRegion(ee.Reducer.first(), pt)
.get('total_aerosol_optical_depth_at_550nm_surface');
return ee.Number(value_tmmx).multiply(0.1);
};
var count = data.size();
var tmmx_list = data.toList(count).map(getTemp);
print("tmmn_list", tmmx_list);
var paired = allDatesSimple.zip(tmmx_list);
print ("paired", paired);
var myFeatures = ee.FeatureCollection(paired.map(function(el){
el = ee.List(el); // cast every element of the list
return ee.Feature(null, {
'date': ee.String(el.get(0)),
'aerosol':ee.Number(el.get(1)),
});
}));
print(myFeatures);
// Export features, specifying corresponding names.
Export.table.toDrive(myFeatures,
"export_aerosol", //my task
"GEE_Folder", //my export folder
"aerosol2", //file name
"CSV");

Related

Retrieving List of Transactions with Square API

Attempting to return a list of all transactions on my Square account for a given location, however, when I run my code I get back an empty JSON object from the Transactions API. Looking for a correct example of how to pull a list of all transactions and read/save them with Transactions API in javascript.
Here's my code:
var SquareConnect = require('square-connect');
var defaultClient = SquareConnect.ApiClient.instance;
var oauth2 = defaultClient.authentications.oauth2;
oauth2.accessToken = "ACCESS-TOKEN";
var TransactionsApi = new SquareConnect.TransactionsApi();
var listTransactions = TransactionsApi.listTransactions(locationId);
console.log(JSON.stringify(listTransactions));
The return is simply:
>>> {}

Apps Script, convert a Sheet range to Blob

Background:
I'm trying to upload an individual row of data from a Google Sheet and append it to a BigQuery table.
Method: I've been using https://developers.google.com/apps-script/advanced/bigquery to do this, but instead of a file of data as the example is, I am using my own sheet with data from a specific row:
var file = SpreadsheetApp.getActiveSpreadsheet();
var currentSheet = file.getSheetByName(name);
var lastRow = currentSheet.getLastRow()
var lastC = currentSheet.getLastColumn()
var rows = currentSheet.getRange(2,1,1,lastC).getValues();
"rows" is the row of data to be imported to BQ. I've tried a multitude of things, and according to another StackOverflow question, "rowsCSV" makes the 2D array of values CSV.
var rowsCSV = rows.join("\n");
var data = rowsCSV.getBlob().setContentType('application/octet-stream');
Problem: Every time I run the function, I get the error "Cannot find function getBlob in object Blob. " or, "Cannot convert Array to (class)[][]", or "Cannot find function getBlob in object Tue May 16 2017 00:00:00 GMT+0200 (CEST),58072.4,,,,,,,,,,,test ", where the last bit ("Tue May..") is the actual data of the row.
What am I doing wrong here?
There is no getBlob method for an array. You will have to use the Utilities.newBlob() to get your blob from a string. You can find the documentation on the same here
var rowsCSV = rows.join("\n");
var blob = Utilities.newBlob(rowsCSV, "text/csv")
Logger.log(blob.getDataAsString())
var data = blob.setContentType('application/octet-stream');
Equivalently you can do this
var rowsCSV = rows.join("\n");
var data = Utilities.newBlob(rowsCSV, 'application/octet-stream')
For anyone else viewing this, Jack Brown's answer is correct, you just need to change
var rows = currentSheet.getRange(2,1,1,lastC).getValues();
to
var rows = currentSheet.getRange(2,1,lastRow,lastC).getValues();
Based on the correction given by #JackBrown I have edited my code, however unable to push data to the Big Query. The below code create table, but don't push values.
/**
* Loads a CSV into BigQuery
*/
function loadCsv() {
// Replace this value with the project ID listed in the Google
// Cloud Platform project.
var projectId = 'master-ad-data';
// Create a dataset in the BigQuery UI (https://bigquery.cloud.google.com)
// and enter its ID below.
var datasetId = 'DataImportFromSpreadsheet';
// Sample CSV file of Google Trends data conforming to the schema below.
// https://docs.google.com/file/d/0BwzA1Orbvy5WMXFLaTR1Z1p2UDg/edit
var csvFileId = '17kYH6hP2RlsFeUmwM1v6WOgm2FKrwLTXWDhA2ZLISN8';
var name = 'Sheet1';
// Create the table.
var tableId = 'pets_' + new Date().getTime();
var table = {
tableReference: {
projectId: projectId,
datasetId: datasetId,
tableId: tableId
},
schema: {
fields: [
{name: 'CampaignLabels', type: 'STRING'},
{name: 'ImpressionShare', type: 'INTEGER'}
]
}
};
table = BigQuery.Tables.insert(table, projectId, datasetId);
Logger.log('Table created: %s', table.id);
var file = SpreadsheetApp.getActiveSpreadsheet();
var currentSheet = file.getSheetByName(name);
var lastRow = currentSheet.getLastRow()
var lastC = currentSheet.getLastColumn()
var rows = currentSheet.getRange(2,1,1,lastC).getValues();
var rowsCSV = rows.join("\n");
Logger.log("Check This"+" "+rowsCSV);
var data = Utilities.newBlob(rowsCSV, 'application/octet-stream')
// Create the data upload job.
var job = {
configuration: {
load: {
destinationTable: {
projectId: projectId,
datasetId: datasetId,
tableId: tableId
},
skipLeadingRows: 1
}
}
};
job = BigQuery.Jobs.insert(job, projectId, data);
Logger.log('Load job started. Check on the status of it here: ' +
'https://bigquery.cloud.google.com/jobs/%s', projectId);
}

Page is not displaying JSON object properties

My code is not playing object properties into the console, however the code displays the object just fine. What am I not able to access the information in this object?
Here is my code:
// APOD
(function Apod() {
var api_key = 'NNKOjkoul8n1CH18TWA9gwngW1s1SmjESPjNoUFo';
var url = 'https://api.nasa.gov/planetary/apod' + "?api_key=" + api_key;
var apodRequest = new XMLHttpRequest();
var apodDATA = "";
apodRequest.onreadystatechange = function() {
apodRequest.onload = function() {
var responseObject = apodRequest.response;
apodDATA = responseObject;
$("document").ready(function() {
$("#apodimage").attr("src", responseObject.hdurl);
});
console.log(responseObject.url);
};
}
apodRequest.open("GET", url, true);
apodRequest.send(null);
}());
Here is the JSON "object" that displays fine on the responseObject variable (properties are giving me undefined):
{
"date": "2016-11-06",
"explanation": "A mere 20,000 light-years from the Sun lies NGC 3603, a resident of the nearby Carina spiral arm of our Milky Way Galaxy. NGC 3603 is well known to astronomers as one of the Milky Way's largest star-forming regions. The central open star cluster contains thousands of stars more massive than our Sun, stars that likely formed only one or two million years ago in a single burst of star formation. In fact, nearby NGC 3603 is thought to contain a convenient example of the massive star clusters that populate much more distant starburst galaxies. Surrounding the cluster are natal clouds of glowing interstellar gas and obscuring dust, sculpted by energetic stellar radiation and winds. Recorded by the Hubble Space Telescope, the image spans about 17 light-years. Follow APOD on: Facebook, Google Plus, Instagram, or Twitter",
"hdurl": "http://apod.nasa.gov/apod/image/1611/ngc3603_hubble_3885.jpg",
"media_type": "image",
"service_version": "v1",
"title": "Starburst Cluster in NGC 3603",
"url": "http://apod.nasa.gov/apod/image/1611/ngc3603_hubble_960.jpg"
}
What you get from the server is probably just a string, not an object.
You can parse the JSON string and convert it to object using JSON.parse.
var obj = JSON.parse(responseObject);
console.log(obj.url);
You can check the type of the variable using typeof. So if you print console.log(typeof responseObject), you'll get "string". If it was an object, you'd get "object".
Also, since you are already using jQuery, consider doing ajax requests by jQuery itself. It would be way more elegant. Read the documentation here.
USE JSON.parse for converting your response to json because your request is returning string
Note:- Do not use $("document").ready() inside ajax response
its working fine for me
(function Apod() {
var api_key = 'NNKOjkoul8n1CH18TWA9gwngW1s1SmjESPjNoUFo';
var url = 'https://api.nasa.gov/planetary/apod' + "?api_key=" + api_key;
var apodRequest = new XMLHttpRequest();
var apodDATA = "";
apodRequest.onreadystatechange = function() {
apodRequest.onload = function() {
var responseObject = apodRequest.response;
apodDATA = responseObject;
$("#apodimage").attr("src", responseObject.hdurl);
var json = JSON.parse(responseObject);
console.log(json.url);
};
}
apodRequest.open("GET", url, true);
apodRequest.send(null);
}());

Organic Search Segment in Google Analaytics API for Apps Script

I'm trying to get all organic entrances for a single URI. I filtered for ga:pagepath==uri and tried to use the segment ga:organicSearches. However the segment doesn't seem to work! I get the following error: "Invalid value 'ga:organicSearches' for segment parameter" Any ideas on how to fix this?
Here is my funtion:
function getEntrancesForUri(uri) {
var endDate = '2016-01-26';
var startDate = '2015-12-28';
var profileId = xxxxxxxx;
var tableId = 'ga:' + profileId;
var optArgs = {
'filters': 'ga:pagePath=='+uri,
'segment': 'ga:organicSearches'
};
var result = Analytics.Data.Ga.get(
tableId,
startDate,
endDate,
'ga:entrances',
optArgs
);
if (result) {
return result;
} else {
return 0;
}
}
That is not how you construct a segment. Also ga:organicSearches is a metric, and you probably want to segment by a dimension.
You can use a dynamic segment as described here which would probably look like this:
sessions::condition::ga:medium==organic
This segments out sessions that have arrived via an organic search.
Alternatively you can create your segment in the GA interface and find the segment id via the Query Explorer, and use that in your query. Testing your queries in the Query Explorer is a good idea in any case, since you get instant feedback and sometimes even a useful error message.

Birt: access content of dataset from beforeFactory

Im trying (desperately) to access the content of a dataset by script in the beforeFactory.
The task at hand is to create design elements from a linked library and place them in a certain cell of a grid. Everything works fine except for the "place them in a certain cell of a grid"-part.
The information about which element is to be created and where it is to be placed is available in a dataset (dsDesignInformation), which contains three columns: targetRow, targetColumn, targetContent. targetContent contains a string, which is used to find an element in the library.
For example: There is a grid placed on the body (grdMasterGrid), with two rows and two columns. If the dsDesignInformation would contain a row like (1,1,"testObjectName"), I want to create the element "testObject" from a linked library and place it in the intersection of row 1 and column 1 of my grdMasterGrid.
The code for creating and placing the element:
importPackage(org.eclipse.birt.report.model.api);
var myLibraryHandle = reportContext.getDesignHandle().getLibrary("myLibraryName");
var myElementFactory = reportContext.getDesignHandle().getElementFactory();
// should be the objectname as defined in the dsDesignInformation
var myTargetElementHandle = myLibraryHandle.findElement("testObjectName");
var myCreatedElementHandle = myElementFactory.newElementFrom(myTargetElementHandle , "someUniqueElementName");
var myMasterGridHandle = reportContext.getDesignHandle().findElement("grdMasterGrid");
// should be target coordinates as defined in dsDesignInformation
var myTargetCellHandle= myMasterGridHandle.getCell(1,1);
myTargeCellHandle.getContent().add(myCreatedElementHandle);
This works like a charm when used with hard coded target-information and placed in the beforeFactory of the report design.
I do however need to access the contents of dsDesignInformation and pass them on to the script above. So far (4 days in) I had zero (as in null) success.
I would be glad for any help or ideas on the topic.
Regards,
maggutz
It is possible to do this, but with some severe restrictions.
The main restriction is: You cannot use your DataSource and your DataSet directly.
Instead, you'll have to copy them and work with the copy.
Don't ask my why this is, because I don't know. But I learned it the hard way during hours and days of trying...
The next restriction is: You cannot access report parameter values, unfortunately. This is not a problem if your query works without parameters.
Otherwise, you'll have to find a way to access the parameter value anyhow. Depending on how your report is integrated into the app, you could try writing the value into the appContext before calling BIRT, for example.
Here is a fragment of working code (in the beforeFactory event) to show you how to workaround this limitation:
importPackage( Packages.org.eclipse.birt.report.model.api );
importPackage(Packages.org.eclipse.birt.data.engine.api);
importPackage(Packages.org.eclipse.birt.report.model.api);
importPackage(Packages.org.eclipse.birt.data.engine.api.querydefn);
importPackage(Packages.org.eclipse.birt.data.engine.core);
importPackage( Packages.org.eclipse.birt.report.model.api );
var myconfig = reportContext.getReportRunnable().getReportEngine().getConfig();
var de = DataEngine.newDataEngine( myconfig, null );
var dsrc = reportContext.getDesignHandle().findDataSource("lisa");
// This is the existing data source.
var odaDataSource = new OdaDataSourceDesign( "Test Data Source" );
// We create a new DataSource which is only to be used in this event
// Now we copy the relevant properties from the existing DataSource to the new one.
var dbUrl = dsrc.getProperty("odaURL").toString();
var dbUsr = dsrc.getProperty("odaUser").toString();
var dbPwd = dsrc.getProperty("odaPassword").toString();
var dbDrv = dsrc.getProperty("odaDriverClass").toString();
odaDataSource.setExtensionID( "org.eclipse.birt.report.data.oda.jdbc" );
odaDataSource.addPublicProperty( "odaURL", dbUrl );
odaDataSource.addPublicProperty( "odaDriverClass", dbDrv);
odaDataSource.addPublicProperty( "odaUser", dbUsr );
odaDataSource.addPublicProperty( "odaPassword", dbPwd );
// log.info("odaURL=" + dbUrl); // Only if you have a logging framework at hand
// Now create a new DataSet and set its query etc.
// I suppose that it is possible to copy the properties from an existing DataSet instead.
// However, I didn't try that.
var odaDataSet = new OdaDataSetDesign( "Test Data Set" );
odaDataSet.setDataSource( odaDataSource.getName() );
odaDataSet.setExtensionID( "org.eclipse.birt.report.data.oda.jdbc.JdbcSelectDataSet" );
// This is the SQL query (in my application).
// You'll have to modify this as needed.
odaDataSet.setQueryText( " select STEDA.TEDA_ID, STBST.LANGTEXT" +
" from STEDA, STBST" +
" where STEDA.ZUSATZ_1 = 'MATRIX'" +
" and STBST.TBST_ID = STEDA.TEDA_ID");
// Tell the DataEngine about the new objects.
de.defineDataSource( odaDataSource );
de.defineDataSet( odaDataSet );
// Now execute the query:
// This seems overly complicated, but hey: it works.
var queryDefinition = new QueryDefinition( );
queryDefinition.setDataSetName( odaDataSet.getName() );
queryDefinition.setAutoBinding(true);
var pq = de.prepare( queryDefinition );
var qr = pq.execute( null );
rowcount=0;
var elementFactory = reportContext.getDesignHandle().getElementFactory()
var ri = qr.getResultIterator( );
// Our application is using the query to generate a layout structure
// into an (already existing) placeholder element "Layout MATRIX".
var containerGrid = reportContext.getDesignHandle().findElement("Layout MATRIX");
// Iterate through the query results
while ( ri.next( ) )
{
// get the actual values of the query output columns
var tedaId = ri.getString("TEDA_ID");
var langtext = ri.getString("LANGTEXT");
// log.info("langtext: " + langtext);
rowcount++;
// Do something with the current result row.
... myModifyLayout(containerGrid, tedaId, langtext); ...
}
// Cleanup
ri.close( );
qr.close( );
de.shutdown( );
// You may want to save the modified design file while developing.
// That way you can check the mresults in the Report Designer.
if (false) {
reportContext.getDesignHandle().saveAs("c:/temp/modified.rptdesign");
}

Categories