Node.js using amazon transcoder to format video / audio files - javascript

My goal is to make sure that all videos that are being uploaded to my application is the right format and that they are formatted to fit minimum size.
I did this before using ffmpeg however i have recently moved my application to an amazon server.
This gives me the option to use Amazon Elastic Transcoder
However by the looks of it from the interface i am unable to set up automatic jobs that look for video or audio files and converts them.
For this i have been looking at their SDK / api references but i am not quite sure how to use that in my application.
My question is has anyone successfully started transcoding jobs in node.js and know how to convert videos from one format to another and / or down set the bitrate? I would really appreciate it if someone could point me in the right direction with some examples of how this might work.

However by the looks of it from the interface i am unable to set up
automatic jobs that look for video or audio files and converts them.
The Node.js SDK doesn't support it but you can do the followings: if you store the videos in S3 (if not move them to S3 because elastic transcoder uses S3) you can run a Lambda function on S3 putObject triggered by AWS.
http://docs.aws.amazon.com/lambda/latest/dg/with-s3.html
My question is has anyone successfully started transcoding jobs in
node.js and know how to convert videos from one format to another and
/ or down set the bitrate? I would really appreciate it if someone
could point me in the right direction with some examples of how this
might work.
We used AWS for video transcoding with node without any problem. It was time consuming to find out every parameter, but I hope these few line could help you:
const aws = require('aws-sdk');
aws.config.update({
accessKeyId: config.AWS.accessKeyId,
secretAccessKey: config.AWS.secretAccessKey,
region: config.AWS.region
});
var transcoder = new aws.ElasticTranscoder();
let transcodeVideo = function (key, callback) {
// presets: http://docs.aws.amazon.com/elastictranscoder/latest/developerguide/system-presets.html
let params = {
PipelineId: config.AWS.transcode.video.pipelineId, // specifies output/input buckets in S3
Input: {
Key: key,
},
OutputKeyPrefix: config.AWS.transcode.video.outputKeyPrefix,
Outputs: config.AWS.transcode.video.presets.map(p => {
return {Key: `${key}${p.suffix}`, PresetId: p.presetId};
})
};
params.Outputs[0].ThumbnailPattern = `${key}-{count}`;
transcoder.createJob(params, function (err, data) {
if (!!err) {
logger.err(err);
return;
}
let jobId = data.Job.Id;
logger.info('AWS transcoder job created (' + jobId + ')');
transcoder.waitFor('jobComplete', {Id: jobId}, callback);
});
};
An example configuration file:
let config = {
accessKeyId: '',
secretAccessKey: '',
region: '',
videoBucket: 'blabla-media',
transcode: {
video: {
pipelineId: '1450364128039-xcv57g',
outputKeyPrefix: 'transcoded/', // put the video into the transcoded folder
presets: [ // Comes from AWS console
{presetId: '1351620000001-000040', suffix: '_360'},
{presetId: '1351620000001-000020', suffix: '_480'}
]
}
}
};

If you want to generate master playlist you can do it like this.
".ts" files can not playable via hls players. Generate ".m3u8" file
async function transcodeVideo(mp4Location, outputLocation) {
let params = {
PipelineId: elasticTranscoderPipelineId,
Input: {
Key: mp4Location,
AspectRatio: 'auto',
FrameRate: 'auto',
Resolution: 'auto',
Container: 'auto',
Interlaced: 'auto'
},
OutputKeyPrefix: outputLocation + "/",
Outputs: [
{
Key: "hls2000",
PresetId: "1351620000001-200010",
SegmentDuration: "10"
},
{
Key: "hls1500",
PresetId: "1351620000001-200020",
SegmentDuration: "10"
}
],
Playlists: [
{
Format: 'HLSv3',
Name: 'hls',
OutputKeys: [
"hls2000",
"hls1500"
]
},
],
};
let jobData = await createJob(params);
return jobData.Job.Id;
}
async function createJob(params) {
return new Promise((resolve, reject) => {
transcoder.createJob(params, function (err, data) {
if(err) return reject("err: " + err);
if(data) {
return resolve(data);
}
});
});
}

Related

Write or stream audio (live-voice) file to variable as a binary in Node.js

I am working with audio streams in Node.js. As for now, my code doesn't have utils.promisfy and I have 3 stages of it. So after the 2nd .pipe I am writing file to disk in wav audio format with required params.
Code example below:
import { FileWriter } from 'wav';
const filename = `./${Date.now()}-${userId}.wav`;
const encoder = new OpusEncoder(16000, 1);
receiver
.subscribe(userId, {
end: {
behavior: EndBehaviorType.AfterSilence,
duration: 100,
},
})
// OpusDecodingStream is a custom class, which convert audio, like a gzip stage for file.
.pipe(new OpusDecodingStream({}, encoder))
.pipe(
// Writes wav file to disk, also can be replaces with FileRead, part of wav module
new FileWriter(filename, {
channels: 1,
sampleRate: 16000,
}),
);
The problem is: I need to transfer (not streaming!) resulting audio file in binary format via axios POST method. So I guess, it's a bit wrong to write file on disk instead of writing it in variable, and after stream ends, send it right to required URL. Something (by logic) which I'd like to see:
// other code
const fileStringBinary = await receiver
.subscribe(userId, {
end: {
behavior: EndBehaviorType.AfterSilence,
duration: 100,
},
})
.pipe(new OpusDecodingStream({}, encoder))
.pipe(
return new FileWriter(filename, {
channels: 1,
sampleRate: 16000,
}),
);
await axios.post('https://url.com', {
data: fileStringBinary
});
Unfortunately I am not so good with streams and especially with audio one, so I am looking for a bit help or any useful advice will be welcome for me.
I understand, that I could write my file to directory, find it there, read once again with node:steam createReadStream and then POST it to required URL. This is not what I need. I'd like to skip this useless stages with writing and then reading. I believe that there is a way to transform steam to binary format and write it down to js variable.
That was a bit treaky after all, but I guess I figure it out:
const stream = receiver
.subscribe(userId, {
end: {
behavior: EndBehaviorType.AfterSilence,
duration: 100,
},
})
.pipe(
new opus.OggLogicalBitstream({
opusHead: new opus.OpusHead({
channelCount: 2,
sampleRate: 48000,
}),
pageSizeControl: {
maxPackets: 10,
},
crc: false,
}),
);
const data = [];
stream.on('data', (chunk) => {
data.push(chunk);
});
stream.on('end', async () => {
try {
const response = await axios.post(
`https://url.com${postParams}`,
Buffer.concat(data),
{
headers: {
Authorization: `Api-Key ${token}`,
'Content-Type': 'application/x-www-form-urlencoded',
},
},
);
console.log(response);
} catch (e) {
console.log(e);
}
});
Unfortunately, I haven't found a better solution, then using old-school events model with data and on end. My working case is connected with Discord.js voice recording without file and using stream for voice recognition.
I will be glad if someone will provide a better-syntax solution, and in that case I'll accept this answer as solved.

Using the node.js google cloud speech to text, how can I get the status of a current job?

I managed to trigger a job with:
const config = {
languageCode: 'en-US',
enableSpeakerDiarization: true,
audioChannelCount: 2,
enableSeparateRecognitionPerChannel: true,
useEnhanced: true,
profanityFilter: false,
enableAutomaticPunctuation: true,
};
const audio = {
uri: `gs://${filePath}`
}
const requestObj = {
config: config,
audio: audio
}
return speechClient.longRunningRecognize(requestObj)
I get back an object with a name. I want to use that with https://cloud.google.com/speech-to-text/docs/reference/rest/v1/LongRunningRecognizeMetadata (via the node.js package) to get the current status.
How do I do it?
return speechClient.longrunning.Operation()
Seems not to exist
Looks like you can do it with:
return speechClient.operationsClient.getOperation({ name: googleName })
This is not super well documented

Ionic Cordova can not share video on social sites

I am trying to use the cordova social sharing plugin for sharing video on social sites. So far what I have achieved is, I have successfully captured video using following code -
var options = {
limit: 1,
duration: 15
};
$cordovaCapture.captureVideo(options).then(function (videoData) {
$scope.videoUrl = videoData[0].fullPath;
}, function (err) {
// An error occurred. Show a message to the user
//alert("video error : "+err);
});
I can successfully find the captured video files url but unfortunately I can not share them to the social media sites. I have tried both of the following methods -
$cordovaSocialSharing
.share(message, subject, file, link)
and
$cordovaSocialSharing
.shareViaTwitter(message, image, link)
Now my question is -
Is there any way to share video through this approach?
If not, please let me know if there is any possible way for this.
N.B. : I have already bothered the Google a lot.
Thanks in advance.
my problem was passing a bad filePath, so i found a solution like below :
import {CaptureError, MediaFile, MediaCapture, CaptureImageOptions, Transfer} from "ionic-native";`
declare let cordova: any;
private static options = {
message: '', // not supported on some apps (Facebook, Instagram)
subject: '', // for email
files: [''], // an array of filenames either locally or remotely
url: ''
};
videoOptions: CaptureImageOptions = {limit: 1};
videoData: any;
captureVideo() {
MediaCapture.captureVideo(this.videoOptions)
.then(
(data: MediaFile[]) => {
this.videoData = data[0];
const fileTransfer = new Transfer();
fileTransfer.download(this.videoData.fullPath, cordova.file.applicationStorageDirectory + 'fileDir/filename.mp4').then((entry) => {
this.options.message = " Your message";
this.options.subject = "Your Subject";
this.options.files = [entry.toURL()];
this.options.url = "https://www.google.com.tr/";
SocialSharing.shareWithOptions(this.options);
}, (error) => {
});
},
(err: CaptureError) => {
}
);
}
As you see above, i just copy my video file to applicationStorageDirectory

JS AWS SDK - copyObject SignatureDoesNotMatch error

I have an application using Node and the AWS-SDK package. I am copying objects from one bucket to another using the copyObject method. I'm getting an error that says SignatureDoesNotMatch: The request signature we calculated does not match the signature you provided. Check your key and signing method.
I've been able to successfully run the code on my local machine and it copies the files from one bucket to another. The error occurs on our AWS server, which I deployed the application to. The full error is:
{ [SignatureDoesNotMatch: The request signature we calculated does not
match the signature you provided. Check your key and signing method.]
message: 'The request signature we calculated does not match the signature you provided. Check your key and signing method.',
code: 'SignatureDoesNotMatch',
region: null,
time: Mon Jul 11 2016 12:11:36 GMT-0400 (EDT),
requestId: <requestId>,
extendedRequestId: <extendedRequestId>,
cfId: undefined,
statusCode: 403,
retryable: false,
retryDelay: 66.48076744750142 }
Also, I'm able to perform the listObjects command. The error is only happening on copyObject.
So far, I've tried
setting correctClockSkew to true
checked the servers time (same as local computer)
checked the key/secret (loading from a config file and is working locally)
checked the file names (there are no strange characters. Alphanumeric, '.', '-' and '/')
Here is the code causing the problem:
AWS.config.update({
accessKeyId: <accessKeyId>,
secretAccessKey: <secretAccessKey>,
correctClockSkew: true
});
var s3 = new AWS.S3();
var params = {
Bucket: <bucket>,
Prefix: <prefix>
};
s3.listObjects(params, function(err, data) {
if (data.Contents.length) {
async.each(data.Contents, function(file, cb) {
var file_name = file.Key.substr(file.Key.indexOf('/')+1);
var copy_params = {
Bucket: <bucket2>,
CopySource: <bucket> + '/' + file.Key,
Key: file_name,
ACL: 'public-read'
};
s3.copyObject(copy_params, function(copyErr, copyData){
if (copyErr) {
console.log('Error:', copyErr);
}
else {
cb();
}
});
}, function(err){
...
}
});
} else {
...
}
});
Not sure if you've found a solution to this or not, but this was an issue raised on github and the solution seems to simply URL encode your CopySource parameter with encodeURI():
https://github.com/aws/aws-sdk-js/issues/1949

How to upload files to Amazon S3 with Meteor?

I'm trying to upload files to my Amazon S3 Bucket. S3 and amazon is set up.
This is the error message from Amazon:
Conflicting query string parameters: acl, policy
Policy and signature is encoded, with Crypto.js for Node.js
var crypto=Npm.require("crypto");
I'm trying to build POST request with Meteor HTTP.post method. This could be wrong as well.
var BucketName="mybucket";
var AWSAccessKeyId="MY_ACCES_KEY";
var AWSSecretKey="MY_SECRET_KEY";
//create policy
var POLICY_JSON={
"expiration": "2009-01-01T00:00:00Z",
"conditions": [
{"bucket": BucketName},
["starts-with", "$key", "uploads/"],
{"acl": 'public-read'},
["starts-with", "$Content-Type", ""],
["content-length-range", 0, 1048576],
]
}
var policyBase64=encodePolicy(POLICY_JSON);
//create signature
var SIGNATURE = encodeSignature(policyBase64,AWSSecretKey);
console.log('signature: ', SIGNATURE);
This is the POST request I'm using with Meteor:
//Send data----------
var options={
"params":{
"key":file.name,
'AWSAccessKeyId':AWSAccessKeyId,
'acl':'public-read',
'policy':policyBase64,
'signature':SIGNATURE,
'Content-Type':file.type,
'file':file,
"enctype":"multipart/form-data",
}
}
HTTP.call('POST','https://'+BucketName+'.s3.amazonaws.com/',options,function(error,result){
if(error){
console.log("and HTTP ERROR:",error);
}else{
console.log("result:",result);
}
});
and her I'm encoding the policy and the signature:
encodePolicy=function(jsonPolicy){
// stringify the policy, store it in a NodeJS Buffer object
var buffer=new Buffer(JSON.stringify(jsonPolicy));
// convert it to base64
var policy=buffer.toString("base64");
// replace "/" and "+" so that it is URL-safe.
return policy.replace(/\//g,"_").replace(/\+/g,"-");
}
encodeSignature=function(policy,secret){
var hmac=crypto.createHmac("sha256",secret);
hmac.update(policy);
return hmac.digest("hex");
}
A can't figure out whats going on. There might already be a problem at the POST method, or the encryption, because I don't know these methods too well. If someone could point me to the right direction, to encode, or send POST request to AmazonS3 properly, it could help a lot.
(I don't like to use filepicker.io, because I don't want to force the client to sign up there as well.)
Thanks in advance!!!
Direct uploads to S3 you can use the slingshot package:
meteor add edgee:slingshot
On the server side declare your directive:
Slingshot.createDirective("myFileUploads", Slingshot.S3Storage, {
bucket: "mybucket",
allowedFileTypes: ["image/png", "image/jpeg", "image/gif"],
acl: "public-read",
authorize: function () {
//You can add user restrictions here
return true;
},
key: function (file) {
return file.name;
}
});
This directive will generate policy and signature automatically.
And them just upload it like this:
var uploader = new Slingshot.Upload("myFileUploads");
uploader.send(document.getElementById('input').files[0], function (error, url) {
Meteor.users.update(Meteor.userId(), {$push: {"profile.files": url}});
});
Why don't you use the aws-sdk package? It packs all the needed methods for you. For example, here's the simple function for adding a file to bucket:
s3.putObject({
Bucket: ...,
ACL: ...,
Key: ...,
Metadata: ...,
ContentType: ...,
Body: ...,
}, function(err, data) {
...
});
check out the S3 meteor package. The readme has a very comprehensive walkthrough of how to get started
First thing is to add the package for s3 file upload.
For Installation: ADD (AWS SDK Smart Package)
$ meteor add peerlibrary: aws-sdk
1.Create Directive upload.js and paste this code.
angular.module('techno')
.directive("fileupload", [function () {
return {
scope: {
fileupload: "="
},
link: function(scope,element, attributes){
$('.button-collapse').sideNav();
element.bind("change", function (event) {
scope.$apply(function () {
scope.fileupload = event.target.files[0];
});
})
}};
}]);
2.Get Access key and paste it in your fileUpload.js file.
AWS.config.update({
accessKeyId: ' AKIAJ2TLJBEUO6IJLKMN ',
secretAccessKey: lqGE9o4WkovRi0hCFPToG0B6w9Okg/hUfpVr6K6g'
});
AWS.config.region = 'us-east-1';
let bucket = new AWS.S3();
3.Now put this upload code in your directive fileUpload.js
vm.upload = (Obj) =>{
vm.loadingButton = true;
let name = Obj.name;
let params = {
Bucket: 'technodheeraj',
Key: name,
ContentType: 'application/pdf',
Body: Obj,
ServerSideEncryption: 'AES256'
};
bucket.putObject(params, (err, data) => {
if (err) {
console.log('---err------->', err);
}
else {
vm.fileObject = {
userId: Meteor.userId(),
eventId: id,
fileName: name,
fileSize: fileObj.size,
};
vm.call("saveFile", vm.fileObject, (error, result) => {
if (!error){
console.log('File saved successfully');
}
})
}
})
};
4.Now in “saveFile” method paste this code
saveFile: function(file){
if(file){
return Files.insert(file);
}
};
5.In HTML paste this code
<input type="file" name="file" fileupload="file">
<button type="button" class="btn btn-info " ng-click="vm.upload(file)"> Upload File</button>

Categories