Dynamically load subtitles into video - javascript

Here is the situation: The server receives the srt file stream. The server converts the stream into vtt format. The resulting stream is then buffered into a buffer which is then sent to client through a io.socket.
Here is the server code:
self.expressApp.post("/upload", function (req, res) {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
var subchunks = [],
sub = file.pipe(srt2vtt());
sub.on('data',function(buffer){
subchunks.push(buffer);
});
sub.on('end',function(){
var buffer = Buffer.concat(subchunks);
socket.emit('Subtitles',buffer);
});
});
busboy.on('finish', function() {
res.writeHead(200, { 'Connection': 'close' });
res.end();
});
return req.pipe(busboy);
});
Here is the client code:
var subobj = new Blob([new Uint8Array(payload)],{type: "text/vtt"}),
url = (URL || webkitURL).createObjectURL(subobj),
track = document.createElement("track");
track.kind = "captions";
track.label = "English";
track.srclang = "en";
track.src = url;
track.addEventListener("load", function() {
this.mode = "showing";
videoElement.textTracks[0].mode = "showing";
});
videoElement.append(track);
Why this is not working. Any help is appreciated.
Thanks.

I'm using plyr, there are some bug for dashjs captions, so i created a blob of that webvtt and append that on video container, here is my code
fetch("url")
.then(response => response.text())
.then(result => {
var blob = new Blob([result], {
type: "text/vtt"
});
const track = document.createElement('track');
Object.assign(track, {
label: 'language',
default: true,
src: window.URL.createObjectURL(blob)
});
video.appendChild(track);
})
.catch(error => console.log('error', error));`

Here is the solution.
What was I doing wrong?
I was sending the vtt stream as binary array rather than plan text string.
Server-side Code:
self.expressApp.post("/upload", function (req, res) {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
var subchunks = "",
sub = file.pipe(srt2vtt());
sub.on('data',function(buffer){
subchunks += buffer.toString('utf8');
});
sub.on('end',function(){
socket.emit('Subtitles',subchunks);
});
});
busboy.on('finish', function() {
res.writeHead(200, { 'Connection': 'close' });
res.end();
});
return req.pipe(busboy);
});
Client-side Code:
var subobj = new Blob([new Uint8Array(payload)],{type: "text/vtt"}),
url = (URL || webkitURL).createObjectURL(subobj),
track = document.createElement("track");
track.kind = "captions";
track.label = "English";
track.srclang = "en";
track.src = url;
videoElement.append(track);
track.mode = "showing";
videoElement.textTracks[0].mode = "showing";

Related

How to save pdf to Cloudant

I want to save the pdf to Cloudant. With the code below, I get an error opening the Attachment in Cloudant. "An error was encountered when processing this file"
I can put fake string data in the "._attachments[name].data" field and it will save.
The Cloudant docs say the data content needs to be in base64 and that is what I am attempting.
Cloudant says "The content must be provided by using BASE64 representation"
function saveFile() {
var doc = {};
var blob = null;
//fileName is from the input field model data
var url = fileName;
fetch(url)
.then((r) => r.blob())
.then((b) => {
blob = b;
return getBase64(blob);
})
.then((blob) => {
console.log(blob);
let name = url._rawValue.name;
doc._id = "testing::" + new Date().getTime();
doc.type = "testing attachment";
doc._attachments = {};
doc._attachments[name] = {};
doc._attachments[name].content_type = "application/pdf";
doc._attachments[name].data = blob.split(",")[1];
console.log("doc: ", doc);
})
.then(() => {
api({
method: "POST",
url: "/webdata",
auth: {
username: process.env.CLOUDANT_USERNAME,
password: process.env.CLOUDANT_PASSWORD,
},
data: doc,
})
.then((response) => {
console.log("result: ", response);
alert("Test has been submitted!");
})
.catch((e) => {
console.log("e: ", e);
alert(e);
});
console.log("finished send test");
});
}
function getBase64(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result);
reader.onerror = (error) => reject(error);
});
}
any ideas?
Thanks
CouchDB, and by extension Cloudant, has a means of handling a "multi-part" request where the JSON document and the attachments are sent in the same request. See https://docs.couchdb.org/en/3.2.2/api/document/common.html#put--db-docid
They are modelled in CouchDB's Nano project here: https://www.npmjs.com/package/nano#multipart-functions
const fs = require('fs');
fs.readFile('rabbit.png', (err, data) => {
if (!err) {
await alice.multipart.insert({ foo: 'bar' }, [{name: 'rabbit.png', data: data, content_type: 'image/png'}], 'mydoc')
}
});
Alternatively, you could write the document first and add the attachment in a supplementary request. Using the current Cloudant SDKs:
write document https://cloud.ibm.com/apidocs/cloudant?code=node#putdocument
write attachment https://cloud.ibm.com/apidocs/cloudant?code=node#putattachment
const doc = {
a: 1,
b: 2
}
const res = await service.putDocument({
db: 'events',
docId: 'mydocid',
document: doc
})
const stream = fs.createReadStream('./mypdf.pdf')
await service.putAttachment({
db: 'events',
docId: 'mydocid',
rev: res.result.rev, // we need the _rev of the doc we've just created
attachmentName: 'mypdf',
attachment: stream,
contentType: 'application/pdf'
})
I found out I was doing too much to the PDF file. No need to make to blob then convert to base64.
Only convert to base64.
async function sendFiles() {
try {
const url = fileName;
const doc = {};
doc._attachments = {};
doc._id = "testing::" + new Date().getTime();
doc.type = "testing attachment";
for (let item of url._value) {
const blob2 = await getBase64(item);
let name = item.name;
doc._attachments[name] = {};
doc._attachments[name].content_type = item.type;
doc._attachments[name].data = blob2.split(",")[1];
}
const response = await api({
method: "POST",
url: "/webdata",
data: doc,
});
} catch (e) {
console.log(e);
throw e; // throw error so caller can see the error
}
console.log("finished send test");
fileName.value = null;
}
function getBase64(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result);
reader.onerror = (error) => reject(error);
});
}
This works for me.

issues in downloading the pdf files using axios in reactjs (context api)

Can any one please help me out on how to download the pdf files when reactjs(using context api) is used as frontend(using axios),as on server side every thing is fine , if i click on download link - following response is shown in console Click here to see image
this is the client side code making req:
const pdfFileDownload = async (dlIdInfo) => {
const { id } = dlIdInfo;
console.log(`pdf file id`, id);
try {
const res = await axios({
method: "GET",
url: `/pub/pdfdload/${id}`,
responseType: 'blob',
});
console.log(res.data);
} catch (err) {
console.log(err.response);
}
}
this is server side code:
router.get("/pdfdload/:id", async (req, res, next) => {
const fileId = req.params.id;
try {
const fetchedFileToDl = await FILESDB.findById(fileId);
const fileArr = fetchedFileToDl.pdfFiles;
fileArr.map(filename => {
const editedFileName = filename.split("/")[1];
const filePath = path.join(__dirname, "../", "public", editedFileName);
const files = fs.createReadStream(filePath);
res.setHeader("Content-Type", "application/pdf");
res.setHeader('Content-Disposition', 'inline; filename="' + editedFileName + '"');
files.pipe(res);
});
} catch (err) {
console.log(err);
}});
Here is how I usually do:
const downloadFileAsPDF = (name, data) => {
const url = window.URL.createObjectURL(new Blob([res.data], {type: `application/pdf`}));
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', name);
link.click();
window.URL.revokeObjectURL(url);
}
const pdfFileDownload = async (dlIdInfo) => {
const { id } = dlIdInfo;
console.log(`pdf file id`, id);
try {
const res = await axios({
method: "GET",
url: `/pub/pdfdload/${id}`,
responseType: 'blob',
});
downloadFileAsPDF('file.pdf', res.data);
} catch (err) {
console.log(err.response);
}
}
It's really simple but it get the job done.

Uploaded file at azure is 0 bites

I am trying to upload image to azure blob storage. It succeeded but, My uploaded image appears 0 bites in my azure blob storage.
var fs = require('fs');
const fileUpload = (req, res, next) => {
console.log(req.files.file)
var blobSvc = azure.createBlobService();
var file = req.files.file.tempFilePath;
var stream = fs.createReadStream(file)
var dataLength = 0;
// using a readStream that we created already
stream
.on('data', function (chunk) {
dataLength += chunk.length;
})
.on('end', function () {
console.log('The length was:', dataLength);
});
blobSvc.createBlockBlobFromStream("image", req.files.file.name, stream, dataLength, function (error, result, response) {
if (!error) {
console.log('ok Blob uploaded')
console.log(result)
console.log(response)
} else {
console.log(error)
}
})
}
module.exports = fileUpload
This is how, my req.files.file obj received from frontend.
file: {
name: 'test.png',
data: <Buffer >,
size: 184332,
encoding: '7bit',
tempFilePath: '/tmp/tmp-1-1585187683435',
truncated: false,
mimetype: 'image/png',
md5: 'b8532c65a5c1ba95bbe89b6589b94a6c',
mv: [Function: mv]
}
You don't have to specify the length, you could try createWriteStreamToBlockBlob.
The code will look like this:
var azure = require('azure-storage');
var blobService = azure.createBlobService();
var stream = fs.createReadStream(fileNameTarget).pipe(blobService.createWriteStreamToBlockBlob(containerName, blobName, { blockIdPrefix: 'block' }));

Firebase function storage POST image issue

I have encountered a problem when following a maximilian schwarzmüller course, which has otherwise been great: https://www.youtube.com/watch?v=qZ1EFnFOGvE
The image logs in the Firebase console as uploaded, recognises the type of file/size etc. But continually loads and never displays the image. I use a post request in POSTMAN to upload the image.
When I upload manually to firebase on their UI, everything works fine.
My code:
const functions = require("firebase-functions");
const os = require("os");
const path = require("path");
const spawn = require("child-process-promise").spawn;
const cors = require("cors")({ origin: true });
const Busboy = require("busboy");
const fs = require("fs");
const gcconfig = {
projectId: "REDACTED",
keyFilename: "REDACTED"
};
const gcs = require("#google-cloud/storage")(gcconfig);
//
exports.onFileChange = functions.storage.object().onFinalize(event => {
const object = event.data;
const bucket = object.bucket;
const contentType = object.contentType;
const filePath = object.name;
console.log("File change detected, function execution started");
if (object.resourceState === "not_exists") {
console.log("We deleted a file, exit...");
return;
}
if (path.basename(filePath).startsWith("resized-")) {
console.log("We already renamed that file!");
return;
}
const destBucket = gcs.bucket(bucket);
const tmpFilePath = path.join(os.tmpdir(), path.basename(filePath));
const metadata = { contentType: contentType };
return destBucket
.file(filePath)
.download({
destination: tmpFilePath
})
.then(() => {
return spawn("convert", [tmpFilePath, "-resize", "500x500", tmpFilePath]);
})
.then(() => {
return destBucket.upload(tmpFilePath, {
destination: "resized-" + path.basename(filePath),
metadata: metadata
});
});
});
exports.uploadFile = functions.https.onRequest((req, res) => {
cors(req, res, () => {
if (req.method !== "POST") {
return res.status(500).json({
message: "Not allowed"
});
}
const busboy = new Busboy({ headers: req.headers });
let uploadData = null;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename);
uploadData = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
const bucket = gcs.bucket("REDACTED");
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
res.status(500).json({
error: err
});
});
});
busboy.end(req.rawBody);
});
});
My security rules:
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write:if true;
}
}
}

Downloading multiple files using AngularJS and storing in path

I am new to Angular,I have a scenario, where I need to download multiple files at the same time. My files are stored in GridFS. I am able to download the files, but for example a pdf is blank. The contentType stored in gridFS is "contentType": "binary/octet-stream", Am I missing out on anything?
My Jade code is
tr(ng-repeat='row in displayedCollection')
td {{ row.Name}}
td {{ row.email}}
td
button.btn.btn-info(type='button',ng-click="downloadDocuments(row.documentsSubmitted)" download) Download Documents
My controller code is
$scope.downloadDocuments = function (row) {
angular.forEach(row, function (value, key) {
var fileToDownload = value.id + "," + 'TrainingPartnerAddingTrainingCenter';
$http.get('/downloadDocuments/' + fileToDownload).success(function (result, status, headers, config) {
var _contentType = (headers('Content-Type'));
var _fileName = headers('FileName');
var blob = new Blob([ result ], { type : _contentType });
var url = (window.URL || window.webkitURL).createObjectURL(blob);
var anchor = angular.element('<a/>');
anchor.attr({
href : url,
target : '_blank',
download : _fileName
})[0].click();
});
});
};
my node.js code is as follows
exports.downloadDocument = function (req, res) {
var paramData = req.params.fileToDownload.split(',');
var role = req.session.user.role;
var conn = mongoose.connection;
var gfs = Grid(conn.db, mongoose.mongo);
routesLogger.logInfo(role, "downloadDocument", "START");
gfs.findOne({_id: paramData[0], root: paramData[1]}, function (err, file) {
if (err) {
routesLogger.logError(role, "downloadDocument", err);
return res.status(400).send(err);
}
else if (!file) {
routesLogger.logError(role, "downloadDocument", "No File Found for download");
return res.status(404).send('Error on the database looking for the file.');
}
else {
res.set('Content-Type', file.contentType);
res.set('Content-Disposition', 'attachment; filename="' + file.filename + '"');
var readstream = gfs.createReadStream({
_id: paramData[0],
root: paramData[1]
});
readstream.on("error", function (err) {
routesLogger.logError(role, "downloadDocument", err);
res.end();
});
readstream.pipe(res);
routesLogger.logInfo(role, "downloadDocument", "END");
}
});
};
So the mistake I was doing was not adding the parameter { responseType: 'arraybuffer' }. I found the answer in this link
AngularJS: Display blob (.pdf) in an angular app
$http.get('/downloadDocuments/' + fileToDownload,{ responseType: 'arraybuffer' }).success(function (result, status, headers, config) {
console.log(headers('Content-Type'));
var _contentType = (headers('Content-Type'));
var _fileName = headers('FileName');
var blob = new Blob([result], {type: _contentType });
saveAs(blob, _fileName);

Categories