Outputting file details using ffprobe in ffmpeg AWS Lambda layer - javascript

I am trying to output the details of an audio file with ffmpeg using the ffprobe option. But it is just returning 'null' at the moment? I have added the ffmpeg layer in Lambda. can anyone spot why this is not working?
const { spawnSync } = require("child_process");
const { readFileSync, writeFileSync, unlinkSync } = require("fs");
const util = require('util');
var fs = require('fs');
let path = require("path");
exports.handler = (event, context, callback) => {
spawnSync(
"/opt/bin/ffprobe",
[
`var/task/myaudio.flac`
],
{ stdio: "inherit" }
);
};
This is the official AWS Lambda layer I am using, it is a great prooject but a little lacking in documentation.
https://github.com/serverlesspub/ffmpeg-aws-lambda-layer

First of all, I would recommend using NodeJS 8.10 over NodeJs 6.10 (it will be soon EOL, although AWS is unclear on how long it will be supported)
Also, I would not use the old style handler with a callback.
A working example below - since it downloads a file from the internet (couldn't be bothered to create a package to deploy on lambda with the file uploaded) give it a bit more time to work.
const { spawnSync } = require('child_process');
const util = require('util');
var fs = require('fs');
let path = require('path');
const https = require('https');
exports.handler = async (event) => {
const source_url = 'https://upload.wikimedia.org/wikipedia/commons/b/b2/Bell-ring.flac'
const target_path = '/tmp/test.flac'
async function downloadFile() {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(target_path);
const request = https.get(source_url, function(response) {
const stream = response.pipe(file)
stream.on('finish', () => {resolve()})
});
});
}
await downloadFile()
const test = spawnSync('/opt/bin/ffprobe',[
target_path
]);
console.log(test.output.toString('utf8'))
const response = {
statusCode: 200,
body: JSON.stringify([test.output.toString('utf8')]),
};
return response;
}
NB! In production be sure to generate a unique temporary file as instances that the Lambda function run on are often shared from invocation to invocation, you don't want multiple invocations stepping on each others files! When done, delete the temporary file, otherwise you might run out of free space on the instance executing your functions. The /tmp folder can hold 512MB, so it can run out fast if you work with many large flac files

I'm not fully familiar with this layer, however from looking at the git repo of the thumbnail-builder it looks like child_process is a promise, so you should be waiting for it's result using .then(), otherwise it is returning null because it doesn't wait for the result.
So try something like:
return spawnSync(
"/opt/bin/ffprobe",
[
`var/task/myaudio.flac`
],
{ stdio: "inherit" }
).then(result => {
return result;
})
.catch(error => {
//handle error
});

Related

How to connect any server using ssh in Cypress.io to run a command?

I know that this probably is not the best way to do this. I read the question with the same title here, but it not solve my problem.
The question is: I have a server that only will achieve a result that I wanna if I run a command line in the server. So I wanna write a test to check the state of one page before and after I run that command. How I do that?
I tried to use the simple-ssh package, but I keep getting this error while trying to read the ssh key file:
fs.readFileSync is not a function
Actually my code looks like this:
import * as fs from 'fs';
let sshConfig = Cypress.config('ssh')
sshConfig.key = fs.readFileSync('path/to/key/file')
let SSH = require('simple-ssh');
Cypress.Commands.add('teste', () => {
let ssh = new SSH(sshConfig)
ssh.exec('echo', {
args: ['$PATH'],
out: function(stdout) {
console.log(stdout);
}
}).start();
})
Other possibility's are welcome.
As Fody mentioned, there are node.js functions present inside simple-ssh so a task is needed.
This is the basic configuration.
It's a direct translation of what you have, but you would want to return something from the task. As it is, the console.log() goes to the terminal console not the browser console.
cypress.config.js
const { defineConfig } = require('cypress')
const fs = require('fs')
const SSH = require('simple-ssh');
module.exports = defineConfig({
e2e: {
setupNodeEvents(on, config) {
on('task', {
ssh() {
const sshConfig = config.ssh
sshConfig.key = fs.readFileSync('path/to/key/file')
const ssh = new SSH(sshConfig)
ssh.exec('echo', {
args: ['$PATH'],
out: function(stdout) {
console.log(stdout);
}
}).start();
return null
},
})
}
}
})
test
Cypress.Commands.add('ssh', () => {
cy.task('ssh')
})
cy.ssh()
Try it with cy.readFile().
const SSH = require('simple-ssh');
Cypress.Commands.add('testSSH', () => {
cy.readFile('path/to/key/file').then(key
const sshConfig = Cypress.config('ssh')
sshConfig.key = key
const ssh = new SSH(sshConfig)
ssh.exec('echo', {
args: ['$PATH'],
out: function(stdout) {
console.log(stdout);
}
}).start()
})
})
The problem is fs is a node.js library, and it cannot be used in the browser.
But you may find the same thing applies to simple-ssh, If so, you will have to shift the code into a task where you can use any node.js functions.

Sveltekit & Fleek (IPFS) import syntax problem?

I have managed to use fleek to update IPFS via straight javascript. I am now trying to add this functionality to a clean install of a svelteKit app. I think I am having trouble with the syntax around imports, but am not sure what I am doing wrong. When I click the button on the index.svelte I get the following error
Uncaught ReferenceError: require is not defined
uploadIPFS upload.js:3
listen index.mjs:412..........(I truncated the error here)
A few thoughts
I am wondering if it could be working in javascript because it is being called in node (running on the server) but running on the client in svelte?
More Details
The index.svelte file looks like this
<script>
import {uploadIPFS} from '../IPFS/upload'
</script>
<button on:click={uploadIPFS}>
upload to ipfs
</button>
the upload.js file looks like this
export const uploadIPFS = () => {
const fleek = require('#fleekhq/fleek-storage-js');
const apiKey = 'cZsQh9XV5+6Nd1+Bou4OuA==';
const apiSecret = '';
const data = 'pauls test load';
const testFunctionUpload = async (data) => {
const date = new Date();
const timestamp = date.getTime();
const input = {
apiKey,
apiSecret,
key: `file-${timestamp}`,
data
};
try {
const result = await fleek.upload(input);
console.log(result);
} catch (e) {
console.log('error', e);
}
};
testFunctionUpload(data);
};
I have also tried using the other import syntax and when I do I get the following error
500
global is not defined....
import with the other syntax is
import fleekStorage from '#fleekhq/fleek-storage-js';
function uploadIPFS() {
console.log('fleekStorage',fleekStorage)
};
export default uploadIPFS;
*I erased the api secret in the code above. In future I will store these in a .env file.
Even more details (if you need them)
The file below will update IPFS and runs via the command
npm run upload
That file is below. For my version that I used in svelte I simplified the file by removing all the file management and just loading a variable instead of a file (as in the example below)
const fs = require('fs');
const path = require('path');
const fleek = require('#fleekhq/fleek-storage-js');
require('dotenv').config()
const apiKey = process.env.FLEEK_API_KEY;
const apiSecret = process.env.FLEEK_API_SECRET;
const testFunctionUpload = async (data) => {
const date = new Date();
const timestamp = date.getTime();
const input = {
apiKey,
apiSecret,
key: `file-${timestamp}`,
data,
};
try {
const result = await fleek.upload(input);
console.log(result);
} catch(e) {
console.log('error', e);
}
}
// File management not used a my svelte version to keep it simple
const filePath = path.join(__dirname, 'README.md');
fs.readFile(filePath, (err, data) => {
if(!err) {
testFunctionUpload(data);
}
})

How to mock a function using Frisby and Jest to return custom response?

I'm trying to mock a function using Frisby and Jest.
Here are some details about my code:
dependencies
axios: "^0.26.0",
dotenv: "^16.0.0",
express: "^4.17.2"
devDependencies
frisby: "^2.1.3",
jest: "^27.5.1"
When I mock using Jest, the correct response from API is returned, but I don't want it. I want to return a fake result like this: { a: 'b' }.
How to solve it?
I have the following code:
// (API Fetch file) backend/api/fetchBtcCurrency.js
const axios = require('axios');
const URL = 'https://api.coindesk.com/v1/bpi/currentprice/BTC.json';
const getCurrency = async () => {
const response = await axios.get(URL);
return response.data;
};
module.exports = {
getCurrency,
};
// (Model using fetch file) backend/model/cryptoModel.js
const fetchBtcCurrency = require('../api/fetchBtcCurrency');
const getBtcCurrency = async () => {
const responseFromApi = await fetchBtcCurrency.getCurrency();
return responseFromApi;
};
module.exports = {
getBtcCurrency,
};
// (My test file) /backend/__tests__/cryptoBtc.test.js
require("dotenv").config();
const frisby = require("frisby");
const URL = "http://localhost:4000/";
describe("Testing GET /api/crypto/btc", () => {
beforeEach(() => {
jest.mock('../api/fetchBtcCurrency');
});
it('Verify if returns correct response with status code 200', async () => {
const fetchBtcCurrency = require('../api/fetchBtcCurrency').getCurrency;
fetchBtcCurrency.mockImplementation(() => (JSON.stringify({ a: 'b'})));
const defaultExport = await fetchBtcCurrency();
expect(defaultExport).toBe(JSON.stringify({ a: 'b'})); // This assert works
await frisby
.get(`${URL}api/crypto/btc`)
.expect('status', 200)
.expect('json', { a: 'b'}); // Integration test with Frisby does not work correctly.
});
});
Response[
{
I hid the lines to save screen space.
}
->>>>>>> does not contain provided JSON [ {"a":"b"} ]
];
This is a classic lost reference problem.
Since you're using Frisby, by looking at your test, it seems you're starting the server in parallel, correct? You first start your server with, say npm start, then you run your test with npm test.
The problem with that is: by the time your test starts, your server is already running. Since you started your server with the real fetchBtcCurrency.getCurrency, jest can't do anything from this point on. Your server will continue to point towards the real module, not the mocked one.
Check this illustration: https://gist.githubusercontent.com/heyset/a554f9fe4f34101430e1ec0d53f52fa3/raw/9556a9dbd767def0ac9dc2b54662b455cc4bd01d/illustration.svg
The reason the assertion on the import inside the test works is because that import is made after the mock replaces the real file.
You didn't share your app or server file, but if you are creating the server and listening on the same module, and those are "hanging on global" (i.e: being called from the body of the script, and not part of a function), you'll have to split them. You'll need a file that creates the server (appending any route/middleware/etc to it), and you'll need a separate file just to import that first one and start listening.
For example:
app.js
const express = require('express');
const { getCurrency } = require('./fetchBtcCurrency');
const app = express()
app.get('/api/crypto/btc', async (req, res) => {
const currency = await getCurrency();
res.status(200).json(currency);
});
module.exports = { app }
server.js
const { app } = require('./app');
app.listen(4000, () => {
console.log('server is up on port 4000');
});
Then, on your start script, you run the server file. But, on your test, you import the app file. You don't start the server in parallel. You'll start and stop it as part of the test setup/teardown.
This will give jest the chance of replacing the real module with the mocked one before the server starts listening (at which point it loses control over it)
With that, your test could be:
cryptoBtc.test.js
require("dotenv").config();
const frisby = require("frisby");
const URL = "http://localhost:4000/";
const fetchBtcCurrency = require('./fetchBtcCurrency');
const { app } = require('./app');
jest.mock('./fetchBtcCurrency')
describe("Testing GET /api/crypto/btc", () => {
let server;
beforeAll((done) => {
server = app.listen(4000, () => {
done();
});
});
afterAll(() => {
server.close();
});
it('Verify if returns correct response with status code 200', async () => {
fetchBtcCurrency.getCurrency.mockImplementation(() => ({ a: 'b' }));
await frisby
.get(`${URL}api/crypto/btc`)
.expect('status', 200)
.expect('json', { a: 'b'});
});
});
Note that the order of imports don't matter. You can do the "mock" below the real import. Jest is smart enough to know that mocks should come first.

How to upload a file into Firebase Storage from a callable https cloud function

I have been trying to upload a file to Firebase storage using a callable firebase cloud function.
All i am doing is fetching an image from an URL using axios and trying to upload to storage.
The problem i am facing is, I don't know how to save the response from axios and upload it to storage.
First , how to save the received file in the temp directory that os.tmpdir() creates.
Then how to upload it into storage.
Here i am receiving the data as arraybuffer and then converting it to Blob and trying to upload it.
Here is my code. I have been missing a major part i think.
If there is a better way, please recommend me. Ive been looking through a lot of documentation, and landed up with no clear solution. Please guide. Thanks in advance.
const bucket = admin.storage().bucket();
const path = require('path');
const os = require('os');
const fs = require('fs');
module.exports = functions.https.onCall((data, context) => {
try {
return new Promise((resolve, reject) => {
const {
imageFiles,
companyPIN,
projectId
} = data;
const filename = imageFiles[0].replace(/^.*[\\\/]/, '');
const filePath = `ProjectPlans/${companyPIN}/${projectId}/images/${filename}`; // Path i am trying to upload in FIrebase storage
const tempFilePath = path.join(os.tmpdir(), filename);
const metadata = {
contentType: 'application/image'
};
axios
.get(imageFiles[0], { // URL for the image
responseType: 'arraybuffer',
headers: {
accept: 'application/image'
}
})
.then(response => {
console.log(response);
const blobObj = new Blob([response.data], {
type: 'application/image'
});
return blobObj;
})
.then(async blobObj => {
return bucket.upload(blobObj, {
destination: tempFilePath // Here i am wrong.. How to set the path of downloaded blob file
});
}).then(buffer => {
resolve({ result: 'success' });
})
.catch(ex => {
console.error(ex);
});
});
} catch (error) {
// unknown: 500 Internal Server Error
throw new functions.https.HttpsError('unknown', 'Unknown error occurred. Contact the administrator.');
}
});
I'd take a slightly different approach and avoid using the local filesystem at all, since its just tmpfs and will cost you memory that your function is using anyway to hold the buffer/blob, so its simpler to just avoid it and write directly from that buffer to GCS using the save method on the GCS file object.
Here's an example. I've simplified out a lot of your setup, and I am using an http function instead of a callable. Likewise, I'm using a public stackoverflow image and not your original urls. In any case, you should be able to use this template to modify back to what you need (e.g. change the prototype and remove the http response and replace it with the return value you need):
const functions = require('firebase-functions');
const axios = require('axios');
const admin = require('firebase-admin');
admin.initializeApp();
exports.doIt = functions.https.onRequest((request, response) => {
const bucket = admin.storage().bucket();
const IMAGE_URL = 'https://cdn.sstatic.net/Sites/stackoverflow/company/img/logos/so/so-logo.svg';
const MIME_TYPE = 'image/svg+xml';
return axios.get(IMAGE_URL, { // URL for the image
responseType: 'arraybuffer',
headers: {
accept: MIME_TYPE
}
}).then(response => {
console.log(response); // only to show we got the data for debugging
const destinationFile = bucket.file('my-stackoverflow-logo.svg');
return destinationFile.save(response.data).then(() => { // note: defaults to resumable upload
return destinationFile.setMetadata({ contentType: MIME_TYPE });
});
}).then(() => { response.send('ok'); })
.catch((err) => { console.log(err); })
});
As a commenter noted, in the above example the axios request itself makes an external network access, and you will need to be on the Blaze or Flame plan for that. However, that alone doesn't appear to be your current problem.
Likewise, this also defaults to using a resumable upload, which the documentation does not recommend when you are doing large numbers of small (<10MB files) as there is some overhead.
You asked how this might be used to download multiple files. Here is one approach. First, lets assume you have a function that returns a promise that downloads a single file given its filename (I've abridged this from the above but its basically identical except for the change of INPUT_URL to filename -- note that it does not return a final result such as response.send(), and there's sort of an implicit assumption all the files are the same MIME_TYPE):
function downloadOneFile(filename) {
const bucket = admin.storage().bucket();
const MIME_TYPE = 'image/svg+xml';
return axios.get(filename, ...)
.then(response => {
const destinationFile = ...
});
}
Then, you just need to iteratively build a promise chain from the list of files. Lets say they are in imageUrls. Once built, return the entire chain:
let finalPromise = Promise.resolve();
imageUrls.forEach((item) => { finalPromise = finalPromise.then(() => downloadOneFile(item)); });
// if needed, add a final .then() section for the actual function result
return finalPromise.catch((err) => { console.log(err) });
Note that you could also build an array of the promises and pass them to Promise.all() -- that would likely be faster as you would get some parallelism, but I wouldn't recommend that unless you are very sure all of the data will fit inside the memory of your function at once. Even with this approach, you need to make sure the downloads can all complete within your function's timeout.

Unable to pass a dynamically generated array of cacheable links to Service Worker

How do I pass a programmatically populated array of links to a service worker script for caching?
I am generating the array in cachelist.js like this:
const fs = require('fs');
const path = require('path');
require('dotenv').config();
var cachedItems = ['/'];
function walkSync(currentDirPath, callback) {
fs.readdirSync(currentDirPath).forEach(function (name) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walkSync(filePath, callback);
}
});
}
walkSync('./pages/', function(filePath, stat) {
cachedItem = filePath.substr(5);
if(cachedItem.indexOf('_') == -1) {
cachedItems.push(cachedItem);
}
});
module.exports = { cachedItems };
And then I'm trying to use this cachedItems array in /offline/serviceWorker.js as follows:
const URLSTOCACHE = require("../cachelist.js");
const CACHE_NAME = "version-0.0.46";
// Call install event
self.addEventListener("install", e => {
e.waitUntil(
caches
.open(CACHE_NAME)
.then(cache => cache.addAll(URLSTOCACHE))
.then(() => self.skipWaiting())
);
});
// Call fetch event
self.addEventListener("fetch", e => {
e.respondWith(
fetch(e.request).catch(() => caches.match(e.request))
)
});
However, this fails with an "Uncaught ReferenceError: require is not defined." Any workaround?
require isn't a built-in browser-side utility. There are various libraries (like RequireJS) and bundlers (which rewrite the require call), but unless you're using one of those, you can't use require browser-side.
If your goal is to read that file in the service worker and add the URLs in it to the cache, use fetch to load it, and then use thme in addAll.

Categories