I have created a Javascript file which uses twit(npm) to get a stream of tweets based on certain rules to retrieve tweets in real time
const Twit = require('twit')
const notifier = require('node-notifier');
const open = require('open');
const franc = require('franc')
const apikey = 'XXX'
const apiSecretKey = 'XXX'
const accessToken = 'XXX'
const accessTokenSecret = 'XXX'
var T = new Twit({
consumer_key: apikey,
consumer_secret: apiSecretKey,
access_token: accessToken,
access_token_secret: accessTokenSecret,
});
( function getTweets() {
// //1. GET RECENT TWEETS
//T.get('search/tweets', { q: '#tesla since:2020-04-15', count: 100 }, function(err, data, response) {
// const tweets = data.statuses
// // .map(tweet => `LANG: ${franc(tweet.text)} : ${tweet.text}`) //CHECK LANGUAGE
// .map(tweet => tweet.text)
// .filter(tweet => tweet.toLowerCase().includes('elon'));
// console.log(tweets);
// })
// //2. REAL TIME MONITORING USING STREAM (HASHTAG)
var stream = T.stream('statuses/filter', { track: '#travel' })
stream.on('tweet', function (tweet) {
console.log(tweet.text);
console.log('Language: ' + franc(tweet.text));
console.log('------');
})
// 3. REAL TIME MONITORING USING STREAM (LOCATION)
//var sanFrancisco = [ '-122.75', '36.8', '-121.75', '37.8' ]
//var stream = T.stream('statuses/filter', { locations: sanFrancisco })
//SHOW NOTIFICATION FOR EACH RECEIVED TWEET
stream.on('tweet', function (tweet) {
console.log(tweet.text);
let url = `https://twitter.com/${tweet.user.screen_name}/status/${tweet.id_str}`
notifier.notify({
title: tweet.user.name,
message: tweet.text
});
notifier.on('click', async function(notifierObject, options, event) {
console.log('clicked');
await open(url);
});
})
})();
My issue is that I want to take the returned objects, and display it on a standalone page in my react app. I have tried wrapping this code in a component, but I am having no luck
I don't know how Twit works and I have only tested a little React, but I would try the following:
Define
function getTweets() {
return new Promise((resolve, reject) => {
T.stream('statuses/filter', { track: '#travel' }).on('tweet', resolve);
});
}
and then use
let tweets = await getTweets();
inside the React component code.
Related
I am somewhat new to coding and recently created this script in order to pull data from Zoom and push it to Google Drive via API. I am trying to push this to a Google Cloud Function, but when running it in a Cloud Function and console logging each step in the process, it seems like the uploadFile function, specifically the drive.files.create method, is being skipped. Every other step is being console logged, but neither the err or res is being logged after the drive.files.create method. Google Cloud Functions does not show errors, instead it shows OK and that the function took 1500ms to execute. It works fine on my local machine, I am only having issues in Cloud Functions. Any suggestions on how to get this to act right would be super helpful. Thank you!
const axios = require("axios");
require("dotenv").config();
const stream = require("stream");
const request = require("request");
const { google } = require("googleapis");
const KEYFILEPATH = "./credentials.json";
const SCOPES = ["https://www.googleapis.com/auth/drive"];
const auth = new google.auth.GoogleAuth({
keyFile: KEYFILEPATH,
scopes: SCOPES,
});
let today = new Date().toISOString();
let zoomAccessToken;
let zoomDownloadUrl;
///////////////////////////////////////////////////////////////// Searching for latest Town Hall recording in Google.
const searchFile = async (auth) => {
const service = google.drive({ version: "v3", auth });
const files = [];
try {
const res = await service.files.list({
corpora: "drive",
includeItemsFromAllDrives: true,
supportsAllDrives: true,
driveId: "XXXXXXXXXXXXXXXX",
q: '"XXXXXXXXXXXXXXX" in parents',
fields: "nextPageToken, files(id, name)",
spaces: "drive",
});
Array.prototype.push.apply(files, res.files);
const filesArray = res.data.files;
const filesName = filesArray.map((x) => x.name).sort().reverse()[0];
console.log(filesName);
return filesName;
} catch (err) {
throw err;
}
};
///////////////////////////////////////////////////////////////// Get Zoom OAuth access token.
const getAccessToken = async () => {
return axios({
method: "post",
url: `https://zoom.us/oauth/token?grant_type=account_credentials&account_id=${process.env.ZOOM_ACCOUNT_ID}`,
headers: {
Authorization: "Basic" +new Buffer.from(process.env.ZOOM_CLIENT_ID + ":" + process.env.ZOOM_CLIENT_SECRET).toString("base64"),
},
});
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's data.
const getRecordingData = async () => {
const token = await getAccessToken();
zoomAccessToken = await token.data.access_token;
return axios({
method: "get",
url: "https://api.zoom.us/v2/meetings/XXXXXXXXX/recordings",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${zoomAccessToken}`,
},
});
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's date.
const getRecordingDate = async () => {
const recording = await getRecordingData();
const lastRecordingDate = await recording.data.start_time;
const recordingDateFormatted = `${lastRecordingDate.substring(0,4)}.${lastRecordingDate.substring(5, 7)}.${lastRecordingDate.substring(8,10)} - Town Hall.mp4`;
return recordingDateFormatted;
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's download link.
const zoomDownloadLink = async () => {
const recording = await getRecordingData();
zoomDownloadUrl = `${recording.data.recording_files[0].download_url}?access_token=${zoomAccessToken}`;
return zoomDownloadUrl;
};
///////////////////////////////////////////////////////////////// Upload data from latest Town Hall recording's download link to Google Drive.
const uploadFile = async (auth) => {
const buffer = await zoomDownloadLink();
const bs = new stream.PassThrough();
request(buffer).pipe(bs);
const drive = google.drive({ version: "v3", auth });
var fileMetadata = {
name: `${today.substring(0, 4)}.${today.substring(5, 7)}.${today.substring(8,10)} - Town Hall.mp4`,
parents: ["XXXXXXXXXXXXXXXXX"],
};
var media = {
mimeType: "video/mp4",
body: bs,
};
drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
uploadType: "resumable",
supportsAllDrives: true,
},
function (err, res) {
if (err) {
console.log(err);
} else {
console.log("File Id: ", res.data.id);
}
}
);
};
///////////////////////////////////////////////////////////////// Compares Town Hall files in Google Drive and Zoom. If different, run uploadFile function.
exports.townHall = async () => {
const townHallFile = await searchFile(auth);
const lastRecordingDate = await getRecordingDate();
if (townHallFile != lastRecordingDate) {
await uploadFile(auth);
} else {
console.log("No Recording Today");
}
};
As you are calling an API inside a cloud function which is an async function but does not have a return statement, it will only execute the function but doesn't wait for the response, because the drive.files.create call is running.
So to fix that just need to await the result of the API. Just add
return await statement on the API call
like:
const uploadFile = async (auth) => {
const buffer = await zoomDownloadLink();
const bs = new stream.PassThrough();
request(buffer).pipe(bs);
const drive = google.drive({ version: "v3", auth });
var fileMetadata = {
name: `${today.substring(0, 4)}.${today.substring(5, 7)}.${today.substring(8,10)} - Town Hall.mp4`,
parents: ["XXXXXXXXXXXXXXXXX"],
};
var media = {
mimeType: "video/mp4",
body: bs,
};
return await drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
uploadType: "resumable",
supportsAllDrives: true,
},
function (err, res) {
if (err) {
console.log(err);
} else {
console.log("File Id: ", res.data.id);
}
}
);
};
Also, something important when you are calling APIs inside cloud functions is the time out. Check on your CF time out is enough to wait for the API call response.
Also, you can use the Promise function to force wait for the response:
const result = uploadFile(aut);
const _response = await Promise.all(result);
I have a cloud function receiving a json string in a pubsub topic.
The goal is to extracts some data into a new json string.
Next parse it as JSONL.
And finally stream it to Google Cloud Storage.
I notice that sometimes the files seem to contain data and sometimes they do not.
The pubsub is working fine and data is coming into this cloud function just fine.
I tried adding some async awaits where I seem it might fit but I am afraid it has do to with the bufferstream. Both topics on where I have trouble getting my head around.
What could be the issue?
const stream = require('stream');
const { Storage } = require('#google-cloud/storage');
// Initiate the source
const bufferStream = new stream.PassThrough();
// Creates a client
const storage = new Storage();
// save stream to bucket
const toBucket = (message, filename) => {
// Write your buffer
bufferStream.end(Buffer.from(message));
const myBucket = storage.bucket(process.env.BUCKET);
const file = myBucket.file(filename);
// Pipe the 'bufferStream' into a 'file.createWriteStream' method.
bufferStream.pipe(file.createWriteStream({
validation: 'md5',
}))
.on('error', (err) => { console.error(err); })
.on('finish', () => {
// The file upload is complete.
console.log(`${filename} is uploaded`);
});
};
// extract correct fields
const extract = (entry) => ({
id: entry.id,
status: entry.status,
date_created: entry.date_created,
discount_total: entry.discount_total,
discount_tax: entry.discount_tax,
shipping_total: entry.shipping_total,
shipping_tax: entry.shipping_tax,
total: entry.total,
total_tax: entry.total_tax,
customer_id: entry.customer_id,
payment_method: entry.payment_method,
payment_method_title: entry.payment_method_title,
transaction_id: entry.transaction_id,
date_completed: entry.date_completed,
billing_city: entry.billing.city,
billing_state: entry.billing.state,
billing_postcode: entry.billing.postcode,
coupon_lines_id: entry.coupon_lines.id,
coupon_lines_code: entry.coupon_lines.code,
coupon_lines_discount: entry.coupon_lines.discount,
coupon_lines_discount_tax: entry.coupon_lines.discount_tax,
});
// format json to jsonl
const format = async (message) => {
let jsonl;
try {
// extract only the necessary
const jsonMessage = await JSON.parse(message);
const rows = await jsonMessage.map((row) => {
const extractedRow = extract(row);
return `${JSON.stringify(extractedRow)}\n`;
});
// join all lines as one string with no join symbol
jsonl = rows.join('');
console.log(jsonl);
} catch (e) {
console.error('jsonl conversion failed');
}
return jsonl;
};
exports.jsonToBq = async (event, context) => {
const message = Buffer.from(event.data, 'base64').toString();
const { filename } = event.attributes;
console.log(filename);
const jsonl = await format(message, filename);
toBucket(jsonl, filename);
};
it's fixed by moving the bufferstream const into the tobucket function.
I am trying to use Twitter's API with node.js using async/await (which I admit I am new to) but I am struggling to get to the next cursor value.
Why does my getFollowers function bellow always returns before the await block?
require('dotenv').config();
const Twitter = require('twitter');
const client = new Twitter({
consumer_key: process.env.API_KEY,
consumer_secret: process.env.API_KEY_SECRET,
access_token_key: process.env.ACCESS_TOKEN,
access_token_secret: process.env.ACCESS_TOKEN_SECRET
});
const getFollowers = async (screen_name, count, cursor) => {
console.log("Cursor: " + cursor);
const params = {
screen_name: screen_name,
count: count,
cursor: cursor
};
const promise = await client.get('followers/list', params)
.then(data => {
console.log("This promise is never executed...");
return data.next_cursor;
})
.catch(err => console.error(err));
return promise;
}
const main = async () => {
let cursor = -1;
while (cursor != 0) {
getFollowers(process.env.SCREEN_NAME, 200, cursor)
.then(next_cursor => {
cursor = next_cursor;
console.log("This promise is never executed either... " + cursor);
});
}
}
main();
With your .then statement in main(), you weren't awaiting for client.get() to resolve, but for data.next_cursor(). Therefore, promise of client.get() remained pending.
Instead, return the promise of client.get() as a in getFollowers(). This will make sure that when you call getFollowers().then() in main(), you are referring to client.get.
Edit:
Following the line of thought in the answer in this question, I have modified getFollowers(). It now includes a promise that is resolved when cursor hits the value of 0. Every other value, a request will be made.
I have a concern though with the rate limit of requests, which is set to 15 per 15 minutes. Since a new request is made for every non 0 next_cursor value, you'll reach this limit quite soon for accounts with many followers.
Also note that the data retrieved will be stored in an array. I am not sure what your use case exactly is.
const Twitter = require('twitter');
const client = new Twitter({
consumer_key: '',
consumer_secret: '',
bearer_token: ''
});
let output = [];
const getFollowers = (screen_name, count) => {
let cursor = -1;
const params = {
screen_name: screen_name,
count: count,
cursor: cursor
};
return new Promise((resolve, reject) => {
client.get('followers/list', params, function getData(err, data, response) {
if (err) reject(response.body);
output.push(data);
cursor = data.next_cursor;
if (cursor > 0) {
client.get('followers/list', params, getData);
}
if (cursor = 0) {
resolve('done');
}
});
});
};
const main = async () => {
await getFollowers('MozDevNet', 200);
console.log(output);
};
I gave up on the implementation using the Twitter package and switched to using axios instead.
require('dotenv').config();
const axios = require('axios');
const credentials = {
consumer_key: process.env.API_KEY,
consumer_secret: process.env.API_KEY_SECRET,
access_token_key: process.env.ACCESS_TOKEN,
access_token_secret: process.env.ACCESS_TOKEN_SECRET
};
const FOLLOWERS_LIST_ENDPOINT = "https://api.twitter.com/1.1/followers/list.json";
//documentation: https://developer.twitter.com/en/docs/authentication/oauth-2-0/application-only
const generateToken = async () => {
return process.env.BEARER_TOKEN;
}
//documentation: https://developer.twitter.com/en/docs/twitter-api/v1/accounts-and-users/follow-search-get-users/api-reference/get-followers-list
const getFollowers = async (screen_name, count, cursor) => {
let token = await generateToken();
let requestConfig = {
params: {
screen_name: screen_name,
count: count,
cursor: cursor,
include_user_entities: false
},
headers: {
Authorization: `Bearer ${token}`
}
};
let response = await axios.get(FOLLOWERS_LIST_ENDPOINT, requestConfig);
let users = response.data.users;
processUsers(users);
return response.data.next_cursor;
};
const processUsers = (users) => {
users.map(user => {
console.log(user.screen_name);
});
}
const main = async () => {
let cursor = -1;
while (cursor != 0) {
cursor = await getFollowers(process.env.SCREEN_NAME, 200, cursor);
}
}
main();
Can this anyhow in the feature damage the flow they belong to?
I have a lambda that works behind a API Gateway websocket endpoint.
This simply asks for a clientId and a message payload, query all connections on dynamo for that clientId (multi device realtime dashboard frontend) and updates all interested users.
It's working fine if you test trought "wscat" on command line but it is buggy on real world browser using js websocket api or c# websocket api.
Doest this exceptin has anything to do with it?
const AWS = require("aws-sdk");
let dynamo = new AWS.DynamoDB.DocumentClient();
require("aws-sdk/clients/apigatewaymanagementapi");
const ORDERS_TABLE = "ordersTable";
const successfullResponse = {
statusCode: 200,
body: "everything is alright"
};
module.exports.sendMessageHandler = (event, context, callback) => {
console.log(event);
sendMessageToAllConnectedClientDevices(event)
.then(data => {
console.log("sucesso", data);
callback(null, successfullResponse);
})
.catch(err => {
console.log("erro: ", err);
callback(null, JSON.stringify(err));
});
};
const sendMessageToAllConnectedClientDevices = async event => {
try {
const body = JSON.parse(event.body);
const { clientId } = body;
console.log(
"handler.sendMessageToAllConnectedClientDevices.clientId: ",
clientId
);
const connectionIds = await getConnectionIds(clientId);
return await Promise.all(
connectionIds.Items.map(connectionId => {
send(event, connectionId.connectionId);
})
);
} catch (error) {
console.log("erro sendMessageToAllConnectedClientDevices");
return error;
}
};
const getConnectionIds = async clientId => {
console.log("handler.getConnectionIds.clientId: ", clientId);
const params = {
TableName: ORDERS_TABLE,
// IndexName: "client_gsi",
FilterExpression: "clientId = :cliend_id",
// KeyConditionExpression: "clientId = :cliend_id",
ProjectionExpression: "connectionId",
ExpressionAttributeValues: {
":cliend_id": clientId
}
};
console.log("handler.getConnectionIds.params: ", JSON.stringify(params));
const data = await dynamo.scan(params).promise();
return data;
};
const send = async (event, connectionId) => {
const body = JSON.parse(event.body);
const postData = body.data;
const endpoint =
event.requestContext.domainName + "/" + event.requestContext.stage;
const apigwManagementApi = new AWS.ApiGatewayManagementApi({
apiVersion: "2018-11-29",
endpoint: endpoint
});
const params = {
ConnectionId: connectionId,
Data: postData
};
return await apigwManagementApi.postToConnection(params).promise();
};
ERROR Unhandled Promise Rejection
I think problem is with API Gateway, check how you are handling information passing through to Lambda function (because browser sends some extra information as compared to command line call)
I am using the ffmpeg Lambda layer to get the duration and channels from an audio file. I am then outputting these details to variables to use later in my code?
Can anyone spot/tidy this code so it only outputs the actual value and not one prepended with a comma
const { spawnSync } = require("child_process");
var fs = require('fs');
const https = require('https');
exports.handler = async (event) => {
const source_url = 'https://upload.wikimedia.org/wikipedia/commons/b/b2/Bell-ring.flac';
const target_path = '/tmp/test.flac';
async function downloadFile() {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(target_path);
const request = https.get(source_url, function(response) {
response.pipe(file);
console.log('file_downloaded!');
resolve();
});
});
}
await downloadFile();
const duration = spawnSync(
"/opt/bin/ffprobe",
[
target_path,
"-show_entries",
"stream=duration",
"-select_streams",
"a",
"-of",
"compact=p=0:nk=1",
"-v",
"0"
]
);
const channel = spawnSync(
"/opt/bin/ffprobe",
[
target_path,
"-show_entries",
"stream=channels",
"-select_streams",
"a",
"-of",
"compact=p=0:nk=1",
"-v",
"0"
]
);
var durations = duration.output.toString('utf8');
console.log(durations);
var channels = channel.output.toString('utf8');
console.log(channels);
/*const response = {
statusCode: 200,
//body: JSON.stringify([channel.output.toString('utf8')])
body: 'Complete'
};
return response;*/
};
Just not sure where these comma values are coming from and I need these as number values for comparison functions later in the code.
It uses this easy Lambda layer with no external modules required
https://github.com/serverlesspub/ffmpeg-aws-lambda-layer