I am using the ffmpeg Lambda layer to get the duration and channels from an audio file. I am then outputting these details to variables to use later in my code?
Can anyone spot/tidy this code so it only outputs the actual value and not one prepended with a comma
const { spawnSync } = require("child_process");
var fs = require('fs');
const https = require('https');
exports.handler = async (event) => {
const source_url = 'https://upload.wikimedia.org/wikipedia/commons/b/b2/Bell-ring.flac';
const target_path = '/tmp/test.flac';
async function downloadFile() {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(target_path);
const request = https.get(source_url, function(response) {
response.pipe(file);
console.log('file_downloaded!');
resolve();
});
});
}
await downloadFile();
const duration = spawnSync(
"/opt/bin/ffprobe",
[
target_path,
"-show_entries",
"stream=duration",
"-select_streams",
"a",
"-of",
"compact=p=0:nk=1",
"-v",
"0"
]
);
const channel = spawnSync(
"/opt/bin/ffprobe",
[
target_path,
"-show_entries",
"stream=channels",
"-select_streams",
"a",
"-of",
"compact=p=0:nk=1",
"-v",
"0"
]
);
var durations = duration.output.toString('utf8');
console.log(durations);
var channels = channel.output.toString('utf8');
console.log(channels);
/*const response = {
statusCode: 200,
//body: JSON.stringify([channel.output.toString('utf8')])
body: 'Complete'
};
return response;*/
};
Just not sure where these comma values are coming from and I need these as number values for comparison functions later in the code.
It uses this easy Lambda layer with no external modules required
https://github.com/serverlesspub/ffmpeg-aws-lambda-layer
Related
I'm creating a generator that uses your google catchall domain to generate a list of email accounts and how I'm going about it, is that a function will generate a random first & last name from an array and merges it together with the catchall domain. Essentially the result would be fname + lname + domain = johndoe#domain.com, but for some reason I'm getting an error. The terminal says "Fetch is not defined" but when I define it by either the node-fetch package (const fetch = require('node-fetch');, it then says "fetch is not a function". I was attempting to use the built in Fetch API to fetch the data because the script I'm basing it off of instructed to do so, after the terminal said it wasn't defined, I tried using the node-fetch package to define the variable fetch in hopes of it fixing it, but no luck either. Does anyone have a solution on why I'm getting both fetch is not a function and fetch is not defined?
const prompt = require("prompt-sync") ({sigint: true });
const fs = require("fs").promises;
const request = require('request');
// const fetch = require('node-fetch');
const random_useragent = require('random-useragent');
const { Webhook, MessageBuilder } = require('discord-webhook-node');
const StealthPlugin = require('puppeteer-extra-plugin-stealth');
puppeteer.use(StealthPlugin());
( async () => {
const browser = await puppeteer.launch({
headless: false,
executablePath: `/Applications/Google Chrome.app/Contents/MacOS/Google Chrome`,
userDataDir: `/Users/bran_d0_n/Library/Application Support/Google/Chrome/Default`,
ignoreHTTPSErrors: true,
ignoreDefaultArgs: ['--enable-automation'],
args: [
`--disable-blink-features=AutomationControlled`,
`--enable-blink-feautres=IdleDetection`,
`--window-size=1920,1080`,
`--disable-features=IsolateOrigins,site-per-process`,
`--blink-settings=imagesEnabled=true`
]
});
//------------------ Random Password Generator Function ------------------//
function generatePassword() {
let pass = '';
let str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' +
'abcdefghijklmnopqrstuvwxyz0123456789##$';
for ( let i = 1; i <= 8; i++) {
var char = Math.floor(Math.random() * str.length + 1);
pass += str.charAt(char)
}
return pass;
}
//------------------ First & Last Name Generator Function ------------------//
async function fetchData(url) {
const response = await fetch(url);
return response.json();
}
async function fetchData(url) {
try {
const response = await fetch(url);
if (!response.ok) {
throw new Error('Network Response Invalid');
}
return response.json();
} catch (error) {
console.error('Unable To Fetch Data:', error)
}
}
function fetchNames(nameType) {
return fetchData(`https://www.randomlists.com/data/names-${nameType}.json`);
}
function pickRandom(list) {
return list[Math.floor(Math.random() * list.length)];
}
async function generateName(gender) {
try {
const response = await Promise.all ([
fetchNames(gender || pickRandom(['male', 'female'])),
fetchNames('surnames')
]);
const [ firstNames, lastNames] = response;
const firstName = pickRandom(firstNames.data);
const lastName = pickRandom(lastNames.data);
return `${firstName} ${lastName}`;
} catch (error) {
console.error('Unable To Generate Name:', error);
}
}
console.log('Loading Browser...');
// Account Values
var bDayval = '01/05/22' + (Math.floor((Math.random() * ( 99-55 )) + 55 )).toString();
var passwordVal = generatePassword();
var fnameVal = generateName();
var lnameVal = generateName();
var info;
var themessage;
var phoneNum;
var userpass;
Loading and configuring the module
node-fetch from v3 is an ESM-only module - you are not able to import it with require().
If you cannot switch to ESM, please use v2 which remains compatible with CommonJS. Critical bug fixes will continue to be published for v2.
You should either use
import fetch from 'node-fetch';
(Remember to add "type": "module" to the package.json)
Or install the older version
npm install node-fetch#2
I am currently trying to get the page count of a Word document in openXML format and have been able to get to the point of where I have the XML structure of the document in a readable format, but I can't seem to find where the page count property is. Any guidance would be appreciated.|
UPDATE:
You can access the page count and other metadata by accessing the docProps/app.xml file. All you have to do is separate and extract the data you want. I got the page count by doing this.
const XMLData = fs.readFileSync(data, { encoding: "utf-8" });
let pageCount = XMLData.split("<Pages>")
.join(",")
.split("</Pages>")
.join(",")
.split(",")[1];`
const fs = require("fs");
const path = require("path");
const axios = require("axios");
let noRepeatDocs = ['somewebsite.com/somedocument.docx'];
const writeTheFile = async (data) => {
fs.writeFileSync("read_word_doc", data);
};
const unzipTheFile = async (data) => {
fs.createReadStream(data)
.pipe(unzipper.Parse())
.on("entry", function (entry) {
const fileName = entry.path;
const type = entry.type;
const size = entry.vars.uncompressedSize;
if (fileName === "word/document.xml") {
entry.pipe(fs.createWriteStream("./output"));
} else {
entry.autodrain();
}
});
};
const getWordBuffer = async (arr) => {
for (const wordDocLink of arr) {
const response = await axios({
url: wordDocLink,
method: "GET",
responseType: "arraybuffer",
});
const data = response.data;
await writeTheFile(data);
await unzipTheFile("./read_word_doc");
}
};
getWordBuffer(noRepeatDocs);
I have a cloud function receiving a json string in a pubsub topic.
The goal is to extracts some data into a new json string.
Next parse it as JSONL.
And finally stream it to Google Cloud Storage.
I notice that sometimes the files seem to contain data and sometimes they do not.
The pubsub is working fine and data is coming into this cloud function just fine.
I tried adding some async awaits where I seem it might fit but I am afraid it has do to with the bufferstream. Both topics on where I have trouble getting my head around.
What could be the issue?
const stream = require('stream');
const { Storage } = require('#google-cloud/storage');
// Initiate the source
const bufferStream = new stream.PassThrough();
// Creates a client
const storage = new Storage();
// save stream to bucket
const toBucket = (message, filename) => {
// Write your buffer
bufferStream.end(Buffer.from(message));
const myBucket = storage.bucket(process.env.BUCKET);
const file = myBucket.file(filename);
// Pipe the 'bufferStream' into a 'file.createWriteStream' method.
bufferStream.pipe(file.createWriteStream({
validation: 'md5',
}))
.on('error', (err) => { console.error(err); })
.on('finish', () => {
// The file upload is complete.
console.log(`${filename} is uploaded`);
});
};
// extract correct fields
const extract = (entry) => ({
id: entry.id,
status: entry.status,
date_created: entry.date_created,
discount_total: entry.discount_total,
discount_tax: entry.discount_tax,
shipping_total: entry.shipping_total,
shipping_tax: entry.shipping_tax,
total: entry.total,
total_tax: entry.total_tax,
customer_id: entry.customer_id,
payment_method: entry.payment_method,
payment_method_title: entry.payment_method_title,
transaction_id: entry.transaction_id,
date_completed: entry.date_completed,
billing_city: entry.billing.city,
billing_state: entry.billing.state,
billing_postcode: entry.billing.postcode,
coupon_lines_id: entry.coupon_lines.id,
coupon_lines_code: entry.coupon_lines.code,
coupon_lines_discount: entry.coupon_lines.discount,
coupon_lines_discount_tax: entry.coupon_lines.discount_tax,
});
// format json to jsonl
const format = async (message) => {
let jsonl;
try {
// extract only the necessary
const jsonMessage = await JSON.parse(message);
const rows = await jsonMessage.map((row) => {
const extractedRow = extract(row);
return `${JSON.stringify(extractedRow)}\n`;
});
// join all lines as one string with no join symbol
jsonl = rows.join('');
console.log(jsonl);
} catch (e) {
console.error('jsonl conversion failed');
}
return jsonl;
};
exports.jsonToBq = async (event, context) => {
const message = Buffer.from(event.data, 'base64').toString();
const { filename } = event.attributes;
console.log(filename);
const jsonl = await format(message, filename);
toBucket(jsonl, filename);
};
it's fixed by moving the bufferstream const into the tobucket function.
I need to make a get calls request to an external server in a loop but the problem is that calling all in the loop can cause rate limits so I just wanna make the calls inside the loop every 5 min or 10 or so , is there a way that can help achieve this.
here is my code :
const util = require('util');
const StravaClientService = require("../../strava/client.service");
const StravaActivityService = require("../../strava/activity.service");
const _ = require("underscore");
const fs = require("fs");
const AWS = require("aws-sdk");
const ids = require("underscore");
const data = require("underscore");
AWS.config.update({
region: "us-east-1",
});
var docClient = new AWS.DynamoDB.DocumentClient();
module.exports = (router) => {
router.get("/streams/:id", async (req, res, done) => {
const userc = req.user;
const access_token = userc.access_token;
const ids = [4401422821,
4401416494,
4401413107,
]
const stravaClient = StravaClientService.getClient(access_token);
const activityService = StravaActivityService(stravaClient);
//
var params = {
TableName:"run-id",
Key: {
"id": userc.stravaId,
}
};
docClient.get(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data.Item.json);
ids.map(( id ) => setTimeout(activityService.streamActivity,5000,id))//data.Item.json
}
});
Basically, setInterval is enough in your scenario. However, you could try to use node-schedule which helps to manage your cron jobs.
Use the index to offset?
ids.map(( id, index ) => setInterval(()=>activityService.streamActivity(id),(5+index) * 60000))//data.Item.json
Use setInterval
setInterval(() => {
ids.map(( id ) => setTimeout(activityService.streamActivity,5000,id))
}, 5000); \\ for 5 minutes
I have created a Javascript file which uses twit(npm) to get a stream of tweets based on certain rules to retrieve tweets in real time
const Twit = require('twit')
const notifier = require('node-notifier');
const open = require('open');
const franc = require('franc')
const apikey = 'XXX'
const apiSecretKey = 'XXX'
const accessToken = 'XXX'
const accessTokenSecret = 'XXX'
var T = new Twit({
consumer_key: apikey,
consumer_secret: apiSecretKey,
access_token: accessToken,
access_token_secret: accessTokenSecret,
});
( function getTweets() {
// //1. GET RECENT TWEETS
//T.get('search/tweets', { q: '#tesla since:2020-04-15', count: 100 }, function(err, data, response) {
// const tweets = data.statuses
// // .map(tweet => `LANG: ${franc(tweet.text)} : ${tweet.text}`) //CHECK LANGUAGE
// .map(tweet => tweet.text)
// .filter(tweet => tweet.toLowerCase().includes('elon'));
// console.log(tweets);
// })
// //2. REAL TIME MONITORING USING STREAM (HASHTAG)
var stream = T.stream('statuses/filter', { track: '#travel' })
stream.on('tweet', function (tweet) {
console.log(tweet.text);
console.log('Language: ' + franc(tweet.text));
console.log('------');
})
// 3. REAL TIME MONITORING USING STREAM (LOCATION)
//var sanFrancisco = [ '-122.75', '36.8', '-121.75', '37.8' ]
//var stream = T.stream('statuses/filter', { locations: sanFrancisco })
//SHOW NOTIFICATION FOR EACH RECEIVED TWEET
stream.on('tweet', function (tweet) {
console.log(tweet.text);
let url = `https://twitter.com/${tweet.user.screen_name}/status/${tweet.id_str}`
notifier.notify({
title: tweet.user.name,
message: tweet.text
});
notifier.on('click', async function(notifierObject, options, event) {
console.log('clicked');
await open(url);
});
})
})();
My issue is that I want to take the returned objects, and display it on a standalone page in my react app. I have tried wrapping this code in a component, but I am having no luck
I don't know how Twit works and I have only tested a little React, but I would try the following:
Define
function getTweets() {
return new Promise((resolve, reject) => {
T.stream('statuses/filter', { track: '#travel' }).on('tweet', resolve);
});
}
and then use
let tweets = await getTweets();
inside the React component code.