how can i download a video mp4 file using node.js? - javascript

I want to let users download a video from my AWS S3 bucket. The video format is MP4:
app.get("/download_video", function(req,res) {
filename = "s3.xxx.amazon.com/bucketname/folder/video_example.mp4";
// im stuck on what i can do here
});
There are a lot of examples on how to download images and textfiles online using nodejs, but I can't find anything on videos.

use strict
const Fs = require('fs')
const Path = require('path')
const Listr = require('listr')
const Axios = require('axios')
function one (tasks) {
tasks.run()
.then(process.exit)
.catch(process.exit)
}
if (process.argv) {
const tasks = [{
title: 'Downloading',
task: async (ctx, task) => {
const url = 'https://s3.xxx.amazon.com/bucketname/folder/video_example.mp4"'
const path = Path.resolve(__dirname, 'media', 'video.mp4')
const response = await Axios({
method: 'GET',
url: url,
responseType: 'stream'
})
response.data.pipe(Fs.createWriteStream(path))
return new Promise((resolve, reject) => {
response.data.on('end', () => {
resolve()
})
response.data.on('error', err => {
reject(err)
})
})
}
}]
one(new Listr(tasks))
}

Try this
const fetch = require('node-fetch');
const fs = require('fs');
const response = await fetch(yourUrl);
const buffer = await response.buffer();
fs.writeFile(`./videos/name.mp4`, buffer, () =>
console.log('finished downloading video!'));

Third-party modules are no longer needed as of Node.js v18.
import { createWriteStream } from 'node:fs';
import { Readable } from 'node:stream';
const videoFileUrl = 'https://sveltejs.github.io/assets/caminandes-llamigos.mp4';
const videoFileName = 'video.mp4';
if (typeof (fetch) === 'undefined') throw new Error('Fetch API is not supported.');
const response = await fetch(videoFileUrl);
if (!response.ok) throw new Error('Response is not ok.');
const writeStream = createWriteStream(videoFileName);
// Reference https://stackoverflow.com/a/66629140/12817553
const readable = Readable.fromWeb(response.body);
readable.pipe(writeStream);
await new Promise((resolve, reject) => {
readable.on('end', resolve);
readable.on('error', reject);
});

Related

Node: How to async await folder file reads

How to read files asynchronously in node js, here is a simple function.
There are a lot of convoluted answers on the internet, does anyone agree if this is the simplest?
export default async function handler(req, res) {
let data = await readFiles('data/companies/');
res.status(200).json(data);
}
// async file reader
function readFiles(dirname) {
return new Promise(function (resolve, reject) {
let data = {}
fs.readdir(dirname, async function(err, filenames) {
filenames.forEach(function(filename) {
fs.readFile(dirname + filename, 'utf-8', function(err, content) {
if (err) {
reject(err)
}
data[filename] = content;
if (filenames.length === Object.keys(data).length) {
resolve(data)
}
});
});
});
})
}
A bit cleaner and easier using the built in promise support in fs.promises:
const fs = require('fs');
const fsp = fs.promises;
const path = require('path');
// async file reader
async function readFiles(dirname) {
const data = {};
const files = await fsp.readdir(dirname);
for (const filename of files) {
const full = path.join(dirname, filename);
const content = await fsp.readFile(full, {encoding: 'utf8'});
data[filename] = content;
}
return data;
}
Or, if you want to run your file operations in parallel (at least to the limit of the thread pool), you might get slightly faster end-to-end performance like this:
// async file reader
async function readFiles(dirname) {
const data = {};
const files = await fsp.readdir(dirname);
await Promise.all(files.map(async filename => {
const full = path.join(dirname, filename);
const content = await fsp.readFile(full, {encoding: 'utf8'});
data[filename] = content;
}));
return data;
}
Also, this:
res.status(200).json(data);
can be replaced with:
res.json(data);
200 is already the default status so there is no reason to specify it.

Async Testing axios in node, and getting a timeout error

I have my Axios code, which should download a file (cribbed and changed from here).
'use strict'
const Fs = require('fs')
const Path = require('path')
const axios = require('axios')
const URL = 'https://unsplash.com/photos/AaEQmoufHLk/download?force=true'
async function downloadMtgJson() {
const path = Path.resolve(__dirname, 'resources', 'code.jpg')
const writer = Fs.createWriteStream(path)
const response = await axios({
URL,
method: 'GET',
responseType: 'stream'
})
console.log(response.data.pipe)
if (response && response.data) response.data.pipe(writer);
console.log('hello')
return handleWriter(writer)
}
const handleWriter = (writer) => new Promise((resolve, reject) => {
writer.on('finish', resolve)
writer.on('error', reject)
})
module.exports = {
downloadMtgJson,
handleWriter,
URL
}
And I looked for an example of axios tests for this online, and some SO questions, and found some code I've also tweaked to my purposes:
const axios = require('axios');
const { downloadMtgJson, URL } = require('./resources');
jest.mock('axios');
describe.only('fetchData', () => {
it('fetches successfully data from an URL', async () => {
expect.assertions(1);
const data = { status: 200, data: { pipe: () => 'data' } };
axios.mockImplementationOnce(() => Promise.resolve(data));
console.log('waiting...')
await expect(downloadMtgJson()).resolves.toEqual('data');
console.log('waited')
expect(axios).toHaveBeenCalledWith(
`${URL}/search?query=react`,
);
});
});
The issue I am having, is that the promise part of the handleWriter seems to be timing out. At one point I mocked it in the same way as I mocked axios, but it didn't make a difference.
Timeout - Async callback was not invoked within the 5000 ms timeout specified by jest.setTimeout.Timeout - Async callback was not invoked within the 5000 ms timeout specified by jest.setTimeout.Error:
6 |
7 | describe.only('fetchData', () => {
> 8 | it('fetches successfully data from an URL', async () => {
I can't figure out what is different in what I've done, vs what I've seen in testing examples.
What can I do to get my tests working?
This line is also asynchronous:
const writer = Fs.createWriteStream(path)
And needs mocking out to prevent the error you see.
The full test code is now:
jest.mock('fs');
jest.mock('axios');
jest.mock('./resource.util')
// ... other code ...
it('fetches successfully data from an URL', async () => {
const data = { status: 200, data: { pipe: () => 'data' } };
axios.mockImplementationOnce(() => data);
fs.createWriteStream.mockImplementationOnce(() => 'fs');
handleWriter.mockImplementationOnce(() => 'data'); //handle writer was extracted to a util file
const expectedJson = await downloadMtgJson();
expect(expectedJson).toEqual('data');
expect(axios).toHaveBeenCalledWith(
expect.objectContaining({ URL: 'https://unsplash.com/photos/AaEQmoufHLk/download?force=true' }),
);
expect(axios).toHaveBeenCalledWith(
expect.objectContaining({ responseType: 'stream' }),
);
});

function is being executed twice when called only once

I am making a program that will read content of files inside nested folders.For now I am just trying to log the content of the file in console. But I am getting two logs instead of only one.Here is what I have done till now
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const getStats = promisify(fs.stat);
const readdir = promisify(fs.readdir);
const http = require('http');
handle_files = async (req, res) => {
let files = await scanDir("logs_of_109");
let result = await read_content(files)
check_file_content(result)
res.writeHead(200, { 'Content-Type': 'text/html' });
// console.log(result)
res.write("Hello");
res.end();
};
check_file_content = (file_data) => {
console.log(file_data[1])
}
async function read_content(files) {
let file_data = []
files.map(file => {
let start_index = file.toString().lastIndexOf('.') + 1
let ext = file.substring(start_index, file.length)
if (ext == 'data') {
file_data.push(fs.readFileSync(file, { encoding: 'utf-8' }))
}
})
return file_data
}
http.createServer(handle_files).listen(8080)
async function scanDir(dir, fileList = []) {
// fetch list of files from the giver directory
let files = await readdir(dir);
// loop through all the files
for (let file of files) {
// join new folder name after the parent folder
// logs_of_109/24
let filePath = path.join(dir, file);
try {
//
let stats = await getStats(filePath);
if (!stats.isDirectory()) {
// add the filepath to the array
fileList.push(filePath);
}
if (stats.isDirectory()) {
await scanDir(filePath, fileList);
}
} catch (err) {
// Drop on the floor..
}
}
return fileList;
}
I expect the file content to be logged only once but it is logging twice on my console. Why is this happening and how do I stop this?
Your browser is making two requests to your server, most likely one for the URL you put in the address bar and another for favicon.ico. (You can quickly tell by opening the dev tools on your browser and going to the Network tab.)
handleFiles should look at req (specifically its url property) and act according to what's being requested. (This is something the code should be doing anyway.)
Side note 1: You're passing an async function into something (createServer) that won't do anything with the promise it returns. If you do that, it's important to catch any errors in the function locally within the function, since (again) nothing else is going to handle them. E.g.:
handle_files = async (req, res) => {
try {
let files = await scanDir("logs_of_109");
let result = await read_content(files)
check_file_content(result)
res.writeHead(200, { 'Content-Type': 'text/html' });
// console.log(result)
res.write("Hello");
res.end();
} catch (e) {
// ...handle error here...
}
};
Side note 2: That code is falling prey to The Horror of Implicit Globals¹. Declare your variables in the appropriate scope. Not declaring them, in loose mode, makes them globals. (Also recommend using strict mode, so you get an error for this.)
¹ (that's a post on my anemic little blog)
The answer above is correct.
My approach is to solve it via 'routing' of any kind.
Here is small basic example of how it can be done
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const getStats = promisify(fs.stat);
const readdir = promisify(fs.readdir);
const http = require('http');
handle_routes = async (req, res) => {
switch(req.url) {
case '/files':
handle_files(req, res);
default:
console.log('for default page');
}
}
handle_files = async (req, res) => {
let files = await scanDir("logs_of_109");
let result = await read_content(files)
check_file_content(result)
res.writeHead(200, { 'Content-Type': 'text/html' });
res.write("Hello");
res.end();
};
check_file_content = (file_data) => {
console.log(file_data[1])
}
async function read_content(files) {
let file_data = []
files.map(file => {
let start_index = file.toString().lastIndexOf('.') + 1
let ext = file.substring(start_index, file.length)
if (ext == 'data') {
file_data.push(fs.readFileSync(file, { encoding: 'utf-8' }))
}
})
return file_data
}
http.createServer(handle_routes).listen(8080)
async function scanDir(dir, fileList = []) {
// fetch list of files from the giver directory
let files = await readdir(dir);
// loop through all the files
for (let file of files) {
// join new folder name after the parent folder
// logs_of_109/24
let filePath = path.join(dir, file);
try {
//
let stats = await getStats(filePath);
if (!stats.isDirectory()) {
// add the filepath to the array
fileList.push(filePath);
}
if (stats.isDirectory()) {
await scanDir(filePath, fileList);
}
} catch (err) {
// Drop on the floor..
}
}
return fileList;
}
This gives you possibility to call handle_files function by going to localhost:8080/files url

How to write an async function that resolves when `data` event emitter fires

I am using node-serialport to communicate with a piece of hardware. It just writes a command and receives a response.
https://serialport.io/docs/en/api-parsers-overview
The following code works:
const port = new SerialPort(path);
const parser = port.pipe(new Readline({ delimiter: '\r', encoding: 'ascii' }));
const requestArray = [];
parser.on('data', (data) => {
// get first item in array
const request = requestArray[0];
// remove first item
requestArray.shift();
// resolve promise
request.promise.resolve(data);
});
export const getFirmwareVersion = async () => {
let resolvePromise;
let rejectPromise;
const promise = new Promise((resolve, reject) => {
resolvePromise = resolve;
rejectPromise = reject;
});
const title = 'getFirmwareVersion';
const cmd = 'V\r';
requestArray.push({
title,
cmd,
promise: {
resolve: resolvePromise,
reject: rejectPromise
}
});
await v2Port.write(cmd);
return promise;
};
Then from my app (which is written in electron/react) I can call the function:
<Button onClick={() => {
let data = await _api.getFirmwareVersion();
console.log('done waiting...');
console.log(data);
}>
Click Me
</Button>
Is there anyway I can refactor this code to make it more succinct?
Is there a way to get the Promise from the async function, rather than having to make a new Promise?
Is there a way to tap into the Transform Stream that already exists and pipe the Promise in there somehow?
I'm also new to async/await, and wanted to avoid using callbacks, especially in the React/Redux side of things.
I aim to have a lot of these endpoints for the api (i.e. getFirmwareVersion, getTemperature, etc...). So I want to make the code as concise as possible. I don't want the UI to have any underlying knowledge of how the API is getting the data. It just needs to request it like any other API and wait for a response.
Oh, I think I get it. The parser is receiving data constantly. So when a request comes, you wait for the next data and send it when it arrives. I suggest you to write an intermediate class.
Like this:
const SerialPort = require('serialport')
const Readline = require('#serialport/parser-readline')
const { EventEmitter } = require('events');
class SerialPortListener extends EventEmitter {
constructor(path) {
super();
this.serialPortPath = path;
}
init() {
this.serialPort = new SerialPort(this.serialPortPath);
const parser = this.serialPort.pipe(new Readline({ delimiter: '\r', encoding: 'ascii' }));
parser.on('data', data => this.emit('data', data));
}
}
Then you could modify the getFirmwareVersion like this:
const serialPortListener = new SerialPortListener(path);
serialPortListener.init();
export const getFirmwareVersion = () => {
return new Promise((resolve, reject) => {
serialPortListener.once('data', async (data) => {
try {
const cmd = 'V\r';
await v2Port.write(cmd);
resolve(data);
} catch (ex) {
reject(ex);
}
});
});
};
Based on help from Mehmet, here is what I ended up with:
const _port = new SerialPort(path);
const _parser = _port.pipe(new Readline({ delimiter: '\r', encoding: 'ascii' }));
const waitForData = async () => {
return new Promise((resolve, reject) => {
const timeoutId = setTimeout(() => reject('Write Timeout'), 500);
_parser.once('data', (data) => {
clearTimeout(timeoutId);
resolve(data);
});
});
};
const createAPIFunction = (cmdTemplate, validationString) => {
return async (config) => {
try {
// replace {key} in template with config[key] props
const cmd = cmdTemplate.replace(/{(\w+)}/g, (_, key) => {
return config[key];
});
_port.write(cmd + '\r');
const data = await waitForData();
// validate data
if (data.startsWith(validationString)) {
// is valid
return data;
} else {
// invalid data
throw new Error('Invalid Data Returned');
}
} catch (err) {
throw err;
}
};
};
export const getFirmwareVersion = createAPIFunction('V', 'V1');
export const enableSampling = createAPIFunction('G1{scope}', 'G11');

How to pipe a readable stream into a URL.createObjectURL without waiting for the whole file?

I know it's doable with mediaSource but media source doesn't support all video formats (like fragmented mp4 for example). Which is a problem because my application doesn't have a server that can fix the file. It's a client side application only.
const blob = await ipfs.getBlobFromStream(hash)
const url = URL.createObjectURL(blob)
this.setState({...this.state, videoSrc: url})
const getBlobFromStream = async (hash) => {
return new Promise(async resolve => {
let entireBuffer
const s = await stream(hash)
s.on('data', buffer => {
console.log(buffer)
if (!entireBuffer) {
entireBuffer = buffer
}
else {
entireBuffer = concatTypedArrays(entireBuffer, buffer)
}
})
s.on('end', () => {
const arrayBuffer = typedArrayToArrayBuffer(entireBuffer)
const blob = new Blob(arrayBuffer)
resolve(blob)
})
})
}
this is the code i'm using right now, which basically waits for the entire file and puts it in a single array and then into a blob and then into URL.createObjectURL
You can do it in which you restructure your code:
await ipfs.startBlobStreaming(hash);
this.setState({...this.state, videoComplete: true});
const startBlobStreaming = async (hash) => {
return new Promise(async (resolve) => {
let entireBuffer;
const s = await stream(hash);
s.on('data', buffer => {
if (!entireBuffer) {
entireBuffer = buffer;
} else {
entireBuffer = concatTypedArrays(entireBuffer, buffer);
}
const arrayBuffer = typedArrayToArrayBuffer(entireBuffer);
const blob = new Blob(arrayBuffer);
const url = URL.createObjectURL(blob);
this.setState({...this.state, videoSrc: url});
});
s.on('end', _ => resolve())
});
}
I dont know how intensive the buffers are come into s.on but you could also collect a amount of buffer in a certain time(e.g. 1000ms) and then create the blob url.

Categories