I was able to connect to this API using this javascript code.
function get_info(){
const fs = require('fs');
const request = require('request');
var macaroonFile = '/Path_to_Folder/access.macaroon'
var abc = fs.readFileSync (macaroonFile);
var macaroon = Buffer.from(abc).toString("base64");
console.log(macaroon)
let options = {
url: 'https://localhost:2000/v1/getinfo',
// Work-around for self-signed certificates.
rejectUnauthorized: false,
json: true,
headers: {
'macaroon': macaroon,
},
}
request.get(options, function(error, response, body) {
console.log(body);
});
}
However, when modifying this code to google script like bellow, I keep getting this error.
Exception: Bad request: https://localhost:2000/v1/getinfo.
function getinfo(){
var url = 'https://localhost:2000/v1/getinfo'
// ↓ access.macaroon encoded to base 64.
var macaroon = 'AgELYy1saWdodG5pbmcCN1RodSBOb3YgMTIgMjByMCAyMToyNjozOCBHTVQAMDkwMCAoSmBwYW4gU3RhbmRhcmQgVGltZSkAAAYg0lCjv2MeZJQ20NeB+L92W0mGHER92YGxEpTgYPaIct0=';
var options = {};
options.headers = {"Authorization": "Basic " + macaroon};
var response = UrlFetchApp.fetch(url, options)
var json = response.getContentText();
return json
}
Can someone help me to figure out what could be the problem. Thank you in advance.
Related
I'm trying to send image from front-end script to my server.
Front-end script:
var img_data = canvas.toDataURL('image/jpg'); // contains screenshot image
// Insert here POST request to send image to server
And I'm trying to accept the data in the backend and store it into req.files to be able to access like this:
const get_image = (req, res) => {
const File = req.files.File.tempFilePath;
}
What way can I do to send the image to the server and get the image like in the example above?
your img_data is a base 64 string, which you can send to server directly in a post request
e.g.
await fetch('/api/path', { method: 'POST', headers: { "content-type": "application/json"}, body: JSON.stringify({ file: img_data }) });
On your backend, you can convert this string to binary, and save to file.
var fs = require('fs');
app.post('/api/path', async (req, res) => {
const img = req.body.file;
var regex = /^data:.+\/(.+);base64,(.*)$/;
var matches = string.match(regex);
var ext = matches[1];
var data = matches[2];
var buffer = Buffer.from(data, 'base64'); //file buffer
.... //do whatever you want with the buffer
fs.writeFileSync('imagename.' + ext, buffer); //if you do not need to save to file, you can skip this step.
....// return res to client
})
You have to convert it to a Blob first, and then append it to a Form. The form would be the body of the request that you send to server.
canvas.toBlob(function(blob){
var form = new FormData(),
request = new XMLHttpRequest();
form.append("image", blob, "filename.png");
request.open("POST", "/upload", true);
request.send(form);
}, "image/png");
My goal is to insert into bigquery the result of a post request using google cloud function.
My problem is that the cloud function is inserting null values into my table so it's not able to get back the parameters from the post request.
Here is my post request:
<script>
var event_category = 'action';
var event_name = 'click';
var page_url = 'test'
var request = new XMLHttpRequest();
request.open('POST', 'url to my cloud function');
request.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded; charset=UTF-8');
params = {
event_category: event_category,
event_name: event_name,
page_url: page_url
}
request.send( JSON.stringify(params));
console.log(JSON.stringify(params));
</script>
Code of the google cloud function
const bigquery = require('#google-cloud/bigquery')();
var result = "";
function insertHandler(err, apiResponse) {
result.writeHead("204",
"No Content",
{
"access-control-allow-origin": "urlofmywebsite",
"access-control-allow-methods": "GET, POST, OPTIONS",
"access-control-allow-headers": "content-type, accept",
"access-control-max-age": 10, // Seconds.
"content-length": 0
}
);
return(result.end());
}
exports.loadBQ = function loadBQ (req, res) {
result = res;
var dataset = bigquery.dataset("google_analytics_test");
var table = dataset.table("google_tag_manager");
var event_category;
var event_name;
var page_url;
try{
event_category = req.body.event_category;
event_name = req.body.event_name;
page_url = req.body.page_url;
var row = {
json: {
event_category : event_category,
event_name: event_name,
page_url: page_url
}
};
var options = {
raw: true
};
table.insert(row, options, insertHandler);
} catch(e){insertHandler(true, false)};
};
As said earlier the problem reside on reading the post request from the google cloud function , not sure how to unblock that
Instead of req.body use req.query then pass the variables through the URL like this, for an HTTP event trigger Google Cloud Function https://YOUR_REGION-YOUR_PROJECT_ID.cloudfunctions.net/FUNCTION_NAME?event_category=CATEGORY&event_name=EVENT&page_url=URL
When I was trying to reproduce using your code, the variables were not being assigned, therefore at the end they were undefined, however after modifying req.body to req.query, I was able to get the proper values.
Here is the example code for the Google Cloud Function:
var event_category;
var event_name;
var page_url;
event_category = req.query.event_category;
event_name = req.query.event_name;
page_url = req.query.page_url;
console.log("===VALUES===");
console.log(event_category);
console.log(event_name);
console.log(page_url);
After that you should see the values logged in the Google Stackdrive logging page.
There is a way to process an actual POST without having to resort to GET... from https://cloud.google.com/functions/docs/writing/http#writing_http_helloworld-nodejs
const escapeHtml = require('escape-html');
/**
* Responds to an HTTP request using data from the request body parsed according
* to the "content-type" header.
*
* #param {Object} req Cloud Function request context.
* #param {Object} res Cloud Function response context.
*/
exports.helloContent = (req, res) => {
let name;
switch (req.get('content-type')) {
// '{"name":"John"}'
case 'application/json':
name = req.body.name;
break;
// 'John', stored in a Buffer
case 'application/octet-stream':
name = req.body.toString(); // Convert buffer to a string
break;
// 'John'
case 'text/plain':
name = req.body;
break;
// 'name=John' in the body of a POST request (not the URL)
case 'application/x-www-form-urlencoded':
name = req.body.name;
break;
}
res.status(200).send(`Hello ${escapeHtml(name || 'World')}!`);
};
I'm trying to save the JSON from the GET request to an object and then upload the object to the Cloudant DB
Anyone know what I'm doing wrong?
var request = require("request");
var EventEmitter = require("events").EventEmitter;
var body = new EventEmitter();
var sample = cloudant.db.use('sample')
request("http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?cs=IBM&hc=1000&rs=1001", function(error, response, data) {
body.data = data;
body.emit('update');
sample.insert({ crazy: true }, body.data, function(err, body, header{
// hmm
});
console.log('hmm');
});
You have a male formatted URL for the request. And the code for inserting in cloudant data base is wrong written:
var request = require("request");
var EventEmitter = require("events").EventEmitter;
var body = new EventEmitter();
var sample = cloudant.db.use('sample')
request("http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?cs=IBM&hc=1000&rs=1001", function(error, response, data) {
body.data = data;
body.emit('update');
//implement code for inserting in cloudant db for homework
});
I am trying to implement this (.net) example in Node.js using request and iconv-lite (its an HTTP web service that requires url requests are encoded in ISO-8859-1):
WebClient wc = new WebClient();
var encoding = Encoding.GetEncoding("iso-8859-1");
var url = new StringBuilder();
url.Append("https://url.com?");
url.Append("¶m=" + HttpUtility.UrlEncode(foo, encoding));
wc.Encoding = encoding;
return wc.DownloadString(url.ToString());
The problem is with encoding the URL (doesnt work). I am trying to do the same GET request, in which the URL must be encoded as ISO-8859-1. However, by doing something like this:
var options = {
url : iconv.encode(url, 'ISO-8859-1').toString(),
method: 'get',
headers : {
'Content-Type': 'application/x-www-form-urlencoded; charset=ISO-8859-1'
}
};
request(options, function (error, response, body) {
if (!error && response.statusCode == 200) {
console.log(body);
}else {
console.log('err: ' + error);
}
});
It's still not sent as a ISO-8859-1 string. Any clues on how to get this to work like with the .NET example?
I think it's decoding the response, not encoding the url. Try this:
var https = require('https'),
qs = require('querystring');
// ...
var url = 'https://url.com/?' + qs.stringify({ param: 'foo' });
https.get(url, function(res) {
if (res.statusCode === 200) {
var bufs = [], bufsize = 0;
res.on('data', function(data) {
bufs.push(data);
bufsize += data.length;
}).on('end', function() {
var buffer = Buffer.concat(bufs, bufsize),
body = iconv.decode(buffer, 'iso-8859-1');
console.log('Body: ' + body);
});
return;
} else
console.log('Non-OK status code: ' + res.statusCode);
res.resume();
});
I'm using Node's request module.
The response I get is "gziped" or otherwise encoded.
How can I
1. Build the request to not encode the response?
2. Decode the response?
The data is coming from http://api.stackexchange.com.
var myRequest = require('request');
var zlib = require('zlib');
var stackRequest = require('request');
var apikey = '<MyKey>';
var fromdate = '1359417601';
var tagged = 'node.js';
stackRequest(
{ method: 'GET'
, uri: 'http://api.stackexchange.com/2.1/questions?key=' + apikey +
'&site=stackoverflow&fromdate=' + fromdate + '&order=desc&' +
'sort=activity&tagged=' + tagged + '&filter=default'
}, function(err, response, body) {
console.log(response.body); // How can I decode this?
});
The encoding has nothing to do with request. StackOverflow's API returns GZip encoded data always, as explained in the API documentation. You need to use Node's zlib module to unzip the contents. This is a simple example:
var zlib = require('zlib');
// Other code
, function(err, response, body) {
zlip.gunzip(body, function(err, data){
console.log(data);
});
});
The main downside of this, which is bad, is that this forces the request module to process the entire response content into one Buffer as body. Instead, you should normally use Node's Stream system to send the data from the request directly through the unzipping library, so that you use less memory. You'll still need to join the parts together to parse the JSON, but it is still better.
var zlib = require('zlib');
var request = require('request');
var apikey = '<MyKey>';
var fromdate = '1359417601';
var tagged = 'node.js';
var compressedStream = request('http://api.stackexchange.com/2.1/questions?' +
'key=' + apikey + '&site=stackoverflow&fromdate=' + fromdate +
'&order=desc&sort=activity&tagged=' + tagged + '&filter=default');
var decompressedStream = compressedStream.pipe(zlib.createGunzip());
var chunks = [];
decompressedStream.on('data', function(chunk){
chunks.push(chunk);
});
decompressedStream.on('end', function(){
var body = Buffer.concat(chunks);
var data = JSON.parse(body);
// Do your thing
});
First set accept: identity as a header. If stacked change doesn't send data as regular UTF8, then it's a bug on their end.
Secondly, you want to set the encoding as UTF8 so the response isn't a buffer.