I am somewhat new to coding and recently created this script in order to pull data from Zoom and push it to Google Drive via API. I am trying to push this to a Google Cloud Function, but when running it in a Cloud Function and console logging each step in the process, it seems like the uploadFile function, specifically the drive.files.create method, is being skipped. Every other step is being console logged, but neither the err or res is being logged after the drive.files.create method. Google Cloud Functions does not show errors, instead it shows OK and that the function took 1500ms to execute. It works fine on my local machine, I am only having issues in Cloud Functions. Any suggestions on how to get this to act right would be super helpful. Thank you!
const axios = require("axios");
require("dotenv").config();
const stream = require("stream");
const request = require("request");
const { google } = require("googleapis");
const KEYFILEPATH = "./credentials.json";
const SCOPES = ["https://www.googleapis.com/auth/drive"];
const auth = new google.auth.GoogleAuth({
keyFile: KEYFILEPATH,
scopes: SCOPES,
});
let today = new Date().toISOString();
let zoomAccessToken;
let zoomDownloadUrl;
///////////////////////////////////////////////////////////////// Searching for latest Town Hall recording in Google.
const searchFile = async (auth) => {
const service = google.drive({ version: "v3", auth });
const files = [];
try {
const res = await service.files.list({
corpora: "drive",
includeItemsFromAllDrives: true,
supportsAllDrives: true,
driveId: "XXXXXXXXXXXXXXXX",
q: '"XXXXXXXXXXXXXXX" in parents',
fields: "nextPageToken, files(id, name)",
spaces: "drive",
});
Array.prototype.push.apply(files, res.files);
const filesArray = res.data.files;
const filesName = filesArray.map((x) => x.name).sort().reverse()[0];
console.log(filesName);
return filesName;
} catch (err) {
throw err;
}
};
///////////////////////////////////////////////////////////////// Get Zoom OAuth access token.
const getAccessToken = async () => {
return axios({
method: "post",
url: `https://zoom.us/oauth/token?grant_type=account_credentials&account_id=${process.env.ZOOM_ACCOUNT_ID}`,
headers: {
Authorization: "Basic" +new Buffer.from(process.env.ZOOM_CLIENT_ID + ":" + process.env.ZOOM_CLIENT_SECRET).toString("base64"),
},
});
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's data.
const getRecordingData = async () => {
const token = await getAccessToken();
zoomAccessToken = await token.data.access_token;
return axios({
method: "get",
url: "https://api.zoom.us/v2/meetings/XXXXXXXXX/recordings",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${zoomAccessToken}`,
},
});
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's date.
const getRecordingDate = async () => {
const recording = await getRecordingData();
const lastRecordingDate = await recording.data.start_time;
const recordingDateFormatted = `${lastRecordingDate.substring(0,4)}.${lastRecordingDate.substring(5, 7)}.${lastRecordingDate.substring(8,10)} - Town Hall.mp4`;
return recordingDateFormatted;
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's download link.
const zoomDownloadLink = async () => {
const recording = await getRecordingData();
zoomDownloadUrl = `${recording.data.recording_files[0].download_url}?access_token=${zoomAccessToken}`;
return zoomDownloadUrl;
};
///////////////////////////////////////////////////////////////// Upload data from latest Town Hall recording's download link to Google Drive.
const uploadFile = async (auth) => {
const buffer = await zoomDownloadLink();
const bs = new stream.PassThrough();
request(buffer).pipe(bs);
const drive = google.drive({ version: "v3", auth });
var fileMetadata = {
name: `${today.substring(0, 4)}.${today.substring(5, 7)}.${today.substring(8,10)} - Town Hall.mp4`,
parents: ["XXXXXXXXXXXXXXXXX"],
};
var media = {
mimeType: "video/mp4",
body: bs,
};
drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
uploadType: "resumable",
supportsAllDrives: true,
},
function (err, res) {
if (err) {
console.log(err);
} else {
console.log("File Id: ", res.data.id);
}
}
);
};
///////////////////////////////////////////////////////////////// Compares Town Hall files in Google Drive and Zoom. If different, run uploadFile function.
exports.townHall = async () => {
const townHallFile = await searchFile(auth);
const lastRecordingDate = await getRecordingDate();
if (townHallFile != lastRecordingDate) {
await uploadFile(auth);
} else {
console.log("No Recording Today");
}
};
As you are calling an API inside a cloud function which is an async function but does not have a return statement, it will only execute the function but doesn't wait for the response, because the drive.files.create call is running.
So to fix that just need to await the result of the API. Just add
return await statement on the API call
like:
const uploadFile = async (auth) => {
const buffer = await zoomDownloadLink();
const bs = new stream.PassThrough();
request(buffer).pipe(bs);
const drive = google.drive({ version: "v3", auth });
var fileMetadata = {
name: `${today.substring(0, 4)}.${today.substring(5, 7)}.${today.substring(8,10)} - Town Hall.mp4`,
parents: ["XXXXXXXXXXXXXXXXX"],
};
var media = {
mimeType: "video/mp4",
body: bs,
};
return await drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
uploadType: "resumable",
supportsAllDrives: true,
},
function (err, res) {
if (err) {
console.log(err);
} else {
console.log("File Id: ", res.data.id);
}
}
);
};
Also, something important when you are calling APIs inside cloud functions is the time out. Check on your CF time out is enough to wait for the API call response.
Also, you can use the Promise function to force wait for the response:
const result = uploadFile(aut);
const _response = await Promise.all(result);
Related
I use the Firestore Rest API in nextJs getServersideProps to fetch a firestore doc. It works as expected, but every 5:30min the function getServersideProps gets retriggered without reloading or navigating (is this only on dev environment?) and then the result of the rest api is simply
[ { readTime: '2022-10-28T14:24:01.348248Z' } ]
The document key is missing and no data is present, which breaks the server function (App behaves without showing error).
The fetching function looks like this:
const { GoogleToken } = require('gtoken');
const { documentToJson } = require('./helpers');
const getConfig = require('next/config').default;
const FIRESTORE = getConfig().serverRuntimeConfig.firestore;
export async function fetchWebsitePropsByPath(path: string) {
const body = JSON.stringify({
structuredQuery: {
from: [{ collectionId: 'websites' }],
where: {
compositeFilter: {
op: 'AND',
filters: [
{
fieldFilter: {
field: {
fieldPath: 'path',
},
op: 'ARRAY_CONTAINS',
value: {
stringValue: path,
},
},
},
],
},
},
limit: 1,
},
});
// Authenticate with Google
const gtoken = new GoogleToken({
key: FIRESTORE.key,
email: FIRESTORE.email,
scope: ['https://www.googleapis.com/auth/datastore'], // or space-delimited string of scopes
eagerRefreshThresholdMillis: 5 * 60 * 1000,
});
const getToken = () =>
new Promise((resolve, reject) => {
gtoken.getToken((err, token) => {
if (err) {
reject(err);
}
resolve(token);
});
});
const token: any = await getToken();
let headers = new Headers();
headers.append('Authorization', 'Bearer ' + token.access_token);
const res = await fetch(`${FIRESTORE.api}:runQuery`, {
method: 'POST',
headers,
body: body,
});
const rawData = await res.json();
const id = rawData[0].document.name.split('/').pop();
const docData = documentToJson(rawData[0].document.fields);
docData.id = id;
return docData;
}
I would like to know if I can prevent the refetching every 5:30 min if it is not dev env specific and why the rest api returns nothing here.
reading through this resource, https://docs.metamask.io/guide/provider-migration.html#summary-of-breaking-changes, it seems it is still possible to interact with a contract using window.ethereum,
eg (taken from the above link)
const transactionHash = await ethereum.request({
method: 'eth_sendTransaction',
params: [
{
to: '0x...',
'from': '0x...',
value: '0x...',
// And so on...
},
],
});
but I cannot figure out how to connect to the contract.
What do Ireplace this with?
contract = await new window.web3.eth.Contract(ABI,ADDRESS);
I am using the web3 library via this resource
https://cdn.jsdelivr.net/npm/web3#latest/dist/web3.min.js
my simple script is below;
var account = null;
var contract = null;
const ABI = "the abi is here"
const ADDRESS = "contract address is here";
async function asyncCall() {
console.log('async');
if (window.ethereum) {
try {
const accounts = await window.ethereum.request({ method: 'eth_requestAccounts' });
account = accounts[0];
document.getElementById('wallet-address').textContent = account;
// this is the bit i'm stuck on currently
contract = await new window.web3.eth.Contract(ABI,ADDRESS);
//
var mintButton = document.getElementById('mint');
mintButton.addEventListener("click", function(){
console.log('minting');
contract.methods.mint(account).send({from:account,value:"100"});
});
var totalsupply = await contract.methods.totalsupply().call();
document.getElementById('ttt').textContent = totalsupply;
} catch (error) {
if (error.code === 4001) {
console.log('User rejected request');
}
console.log(error);
}
}
}
asyncCall();
So instead of using window.web3 since Metamask no longer injects it into webpages, you can do this before making the call to get the contract:
const Web3 = require('web3');
// Create Web3 instance
const web3 = new Web3(window.ethereum); // Where window.etherem is your provider.
You can then get your contract by calling:
const contract = new web3.eth.Contract(ABI, ADDRESS);
try this:
let provider = window.ethereum;
if (provider) {
await provider.request({ method: "eth_requestAccounts" });
this.web3 = new Web3(provider);
this.contract = new this.web3.eth.Contract(abi_v2, address);
}
I have built a simple telegram bot using telegraf and used this code to log a specific information that I need :
bot.on('text', (ctx, next) => {
console.log(`[text] ${ ctx.message.chat.id } ${ ctx.from.username } ${ ctx.message.chat.first_name+ " " + ctx.message.chat.last_name } ${ ctx.message.text }`);
return next();
});
and as result, the log be something like this
[text] 563789142 xMA3x Mohammed Abbas /start
Now I want to save that information in a google spreadsheet, I had followed this Tutorial and was able to push a quotation marked values into the spreadsheet only, but i don't know how to push the console.log result into the spreddsheet
anyway, here is my code
const { Telegraf } = require('telegraf');
const bot = new Telegraf("xyz")
const { google } = require("googleapis");
const keys = require("./Keys.json")
bot.on('text', (ctx, next) => {
console.log(`[text] ${ ctx.message.chat.id } ${ ctx.from.username } ${ ctx.message.chat.first_name+ " " + ctx.message.chat.last_name } ${ ctx.message.text }`);
return next();
});
bot.start((ctx) => ctx.reply('Welcome'))
bot.help((ctx) => ctx.reply('Send me a sticker'))
bot.on('sticker', (ctx) => ctx.reply('👍'))
bot.hears('hi', (ctx) => ctx.reply('Hey there'))
const client = new google.auth.JWT(
keys.client_email,
null,
keys.private_key,
["https://www.googleapis.com/auth/spreadsheets"]
);
client.authorize(function(err){
if(err){
console.log(err);
return;
} else {
console.log("connected");
gsrun(client);
}
});
async function gsrun(cl){
const gsapi = google.sheets({version:"v4", auth: cl});
const updateOptions = {
spreadsheetId: "xyz",
range: "Sheet1",
valueInputOption: "RAW",
insertDataOption: "INSERT_ROWS",
resource: {
values:[
["this is working"]
]}
};
let res = await gsapi.spreadsheets.values.append(updateOptions);
console.log(res);
}
bot.launch()
so as you see the "this is working" is pushed successfully in the spreadsheet, but when I try to add another value like ctx.message.chat.id it give me ReferenceError: ctx is not defined
so how I can make the google sheet API recognize the telegraf commands? or to be more general, how I can save the *ctx.message.chat.id,ctx.from.username..etc * info (that come form the telegram) into the spreedhsset ?
ctx lives within your bot hooks, so to save the information to the sheet, you have to call your googlesheets function inside the relevant hook.
Possible updates:
const { Telegraf } = require('telegraf');
const bot = new Telegraf("xyz")
const { google } = require("googleapis");
const keys = require("./Keys.json")
const client = new google.auth.JWT(
keys.client_email,
null,
keys.private_key,
["https://www.googleapis.com/auth/spreadsheets"]
);
async function gsrun(cl, data){
const gsapi = google.sheets({version:"v4", auth: cl});
const updateOptions = {
spreadsheetId: "xyz",
range: "Sheet1",
valueInputOption: "RAW",
insertDataOption: "INSERT_ROWS",
resource: {
values:[
[data]
]}
};
let res = await gsapi.spreadsheets.values.append(updateOptions);
console.log(res);
}
const saveMetadataToSheets = (data) => {
client.authorize(function(err){
if(err){
console.log(err);
return;
} else {
console.log("connected");
gsrun(client, data);
}
});
}
bot.on('text', (ctx, next) => {
const data = `[text] ${ ctx.message.chat.id } ${ ctx.from.username } ${ ctx.message.chat.first_name+ " " + ctx.message.chat.last_name } ${ ctx.message.text }`
console.log(data);
// pass any data that you need to save to the sheets
saveMetadataToSheets(data)
return next();
});
bot.start((ctx) => ctx.reply('Welcome'))
bot.help((ctx) => ctx.reply('Send me a sticker'))
bot.on('sticker', (ctx) => ctx.reply('👍'))
bot.hears('hi', (ctx) => ctx.reply('Hey there'))
bot.launch()
So I am writing a Lambda that will take in some form data via a straight POST through API Gateway (testing using Postman for now) and then send that image to S3 for storage. Every time I run it, the image uploaded to S3 is corrupted and won't open properly. I have seen people having to decode/encode the incoming data but I feel like I have tried everything using Buffer.from. I am only looking to store either .png or .jpg. The below code does not reflect my attempts using Base64 encoding/decoding seeing they all failed. Here is what I have so far -
Sample Request in postman
{
image: (uploaded .jpg/.png),
metadata: {tag: 'iPhone'}
}
Lambda
const AWS = require('aws-sdk')
const multipart = require('aws-lambda-multipart-parser')
const s3 = new AWS.S3();
exports.handler = async (event) => {
const form = multipart.parse(event, false)
const s3_response = await upload_s3(form)
return {
statusCode: '200',
body: JSON.stringify({ data: data })
}
};
const upload_s3 = async (form) => {
const uniqueId = Math.random().toString(36).substr(2, 9);
const key = `${uniqueId}_${form.image.filename}`
const request = {
Bucket: 'bucket-name',
Key: key,
Body: form.image.content,
ContentType: form.image.contentType,
}
try {
const data = await s3.putObject(request).promise()
return data
} catch (e) {
console.log('Error uploading to S3: ', e)
return e
}
}
EDIT:
I am now atempting to save the image into the /tmp directory then use a read stream to upload to s3. Here is some code for that
s3 upload function
const AWS = require('aws-sdk')
const fs = require('fs')
const s3 = new AWS.S3()
module.exports = {
upload: (file) => {
return new Promise((resolve, reject) => {
const key = `${Date.now()}.${file.extension}`
const bodyStream = fs.createReadStream(file.path)
const params = {
Bucket: process.env.S3_BucketName,
Key: key,
Body: bodyStream,
ContentType: file.type
}
s3.upload(params, (err, data) => {
if (err) {
return reject(err)
}
return resolve(data)
}
)
})
}
}
form parser function
const busboy = require('busboy')
module.exports = {
parse: (req, temp) => {
const ctype = req.headers['Content-Type'] || req.headers['content-type']
let parsed_file = {}
return new Promise((resolve) => {
try {
const bb = new busboy({
headers: { 'content-type': ctype },
limits: {
fileSize: 31457280,
files: 1,
}
})
bb.on('file', function (fieldname, file, filename, encoding, mimetype) {
const stream = temp.createWriteStream()
const ext = filename.split('.')[1]
console.log('parser -- ext ', ext)
parsed_file = { name: filename, path: stream.path, f: file, type: mimetype, extension: ext }
file.pipe(stream)
}).on('finish', () => {
resolve(parsed_file)
}).on('error', err => {
console.err(err)
resolve({ err: 'Form data is invalid: parsing error' })
})
if (req.end) {
req.pipe(bb)
} else {
bb.write(req.body, req.isBase64Encoded ? 'base64' : 'binary')
}
return bb.end()
} catch (e) {
console.error(e)
return resolve({ err: 'Form data is invalid: parsing error' })
}
})
}
}
handler
const form_parser = require('./form-parser').parse
const s3_upload = require('./s3-upload').upload
const temp = require('temp')
exports.handler = async (event, context) => {
temp.track()
const parsed_file = await form_parser(event, temp)
console.log('index -- parsed form', parsed_file)
const result = await s3_upload(parsed_file)
console.log('index -- s3 result', result)
temp.cleanup()
return {
statusCode: '200',
body: JSON.stringify(result)
}
}
The above edited code is a combination of other code and a github repo I found that is trying to achieve the same results. Even with this solution the file is still corrupted
Figured out this issue. Code works perfectly fine - it was an issue with API Gateway. Need to go into the API Gateway settings and set thee Binary Media Type to multipart/form-data then re-deploy the API. Hope this helps someone else who is banging their head against the wall on figuring out sending images via form data to a lambda.
Can this anyhow in the feature damage the flow they belong to?
I have a lambda that works behind a API Gateway websocket endpoint.
This simply asks for a clientId and a message payload, query all connections on dynamo for that clientId (multi device realtime dashboard frontend) and updates all interested users.
It's working fine if you test trought "wscat" on command line but it is buggy on real world browser using js websocket api or c# websocket api.
Doest this exceptin has anything to do with it?
const AWS = require("aws-sdk");
let dynamo = new AWS.DynamoDB.DocumentClient();
require("aws-sdk/clients/apigatewaymanagementapi");
const ORDERS_TABLE = "ordersTable";
const successfullResponse = {
statusCode: 200,
body: "everything is alright"
};
module.exports.sendMessageHandler = (event, context, callback) => {
console.log(event);
sendMessageToAllConnectedClientDevices(event)
.then(data => {
console.log("sucesso", data);
callback(null, successfullResponse);
})
.catch(err => {
console.log("erro: ", err);
callback(null, JSON.stringify(err));
});
};
const sendMessageToAllConnectedClientDevices = async event => {
try {
const body = JSON.parse(event.body);
const { clientId } = body;
console.log(
"handler.sendMessageToAllConnectedClientDevices.clientId: ",
clientId
);
const connectionIds = await getConnectionIds(clientId);
return await Promise.all(
connectionIds.Items.map(connectionId => {
send(event, connectionId.connectionId);
})
);
} catch (error) {
console.log("erro sendMessageToAllConnectedClientDevices");
return error;
}
};
const getConnectionIds = async clientId => {
console.log("handler.getConnectionIds.clientId: ", clientId);
const params = {
TableName: ORDERS_TABLE,
// IndexName: "client_gsi",
FilterExpression: "clientId = :cliend_id",
// KeyConditionExpression: "clientId = :cliend_id",
ProjectionExpression: "connectionId",
ExpressionAttributeValues: {
":cliend_id": clientId
}
};
console.log("handler.getConnectionIds.params: ", JSON.stringify(params));
const data = await dynamo.scan(params).promise();
return data;
};
const send = async (event, connectionId) => {
const body = JSON.parse(event.body);
const postData = body.data;
const endpoint =
event.requestContext.domainName + "/" + event.requestContext.stage;
const apigwManagementApi = new AWS.ApiGatewayManagementApi({
apiVersion: "2018-11-29",
endpoint: endpoint
});
const params = {
ConnectionId: connectionId,
Data: postData
};
return await apigwManagementApi.postToConnection(params).promise();
};
ERROR Unhandled Promise Rejection
I think problem is with API Gateway, check how you are handling information passing through to Lambda function (because browser sends some extra information as compared to command line call)