Ionic how can I create an mp3 file? - javascript

I'm writing a web app with Ionic framework and I'm trying to manage a record and play sounds mechanism. I'm using the following snippet as a service:
.factory('MediaSrv', function ($q, $ionicPlatform, $window) {
var service = {
loadMedia: loadMedia,
getStatusMessage: getStatusMessage,
getErrorMessage: getErrorMessage
};
function loadMedia (src, onError, onStatus, onStop) {
var defer = $q.defer();
$ionicPlatform.ready(function () {
var mediaSuccess = function () {
if (onStop) { onStop(); }
};
var mediaError = function (err) {
_logError(src, err);
if (onError) { onError(err); }
};
var mediaStatus = function (status) {
if (onStatus) { onStatus(status); }
};
if ($ionicPlatform.is('android')) {
src = '/android_asset/www/' + src;
}
defer.resolve(new $window.Media(src, mediaSuccess, mediaError, mediaStatus));
});
return defer.promise;
}
...
return service;
});
I'm able to play an existing .mp3 file, but I cannot record on a non-existing file. I thought it would create the file by itself if the file wasn't found. How can I create an empty .mp3 file for recording?

Creating a file is a server function. You would need a node server using fs to create a file.
From Ionic's website:
Think of Ionic as the front-end UI framework that handles all of the look and feel and UI interactions your app needs in order to be compelling. Kind of like "Bootstrap for Native," but with support for a broad range of common native mobile components, slick animations, and beautiful design.

Ionic can use the Cordova plugins since its built on top of it.
You can use the media-capture plugin to capture audio, however I have found these record as AMR files.
From the [documentation][1]:
// capture callback
var captureSuccess = function(mediaFiles) {
var i, path, len;
for (i = 0, len = mediaFiles.length; i < len; i += 1) {
path = mediaFiles[i].fullPath;
// do something interesting with the file
}
};
// capture error callback
var captureError = function(error) {
navigator.notification.alert('Error code: ' + error.code, null, 'Capture Error');
};
// start audio capture
navigator.device.capture.captureAudio(captureSuccess, captureError, {limit:2});
[1]: http://docs.phonegap.com/en/edge/cordova_media_capture_capture.md.html

Related

Access function in external js file from button in webapp

I have a file dmreboot_service.js in my /js folder. When I run this file using node /js/dmreboot_service.js it successfully invokes a direct method in Azure.
I need to be able to execute this function or file on a button click from my web app.
I tried loading the script into the head of my html using :
<script src="js/dmreboot_service.js"></script>
I put an alert in the external file.
If I put this alert at the top of the file it works, but at
the bottom it fails, so the contents of the file are not loading.
The content of dmreboot_service.js is :
'use strict';
var Registry = require('azure-iothub').Registry;
var Client = require('azure-iothub').Client;
var connectionString ="HostName=XxxxxxxX.azure-devices.net;SharedAccessKeyName=service;SharedAccessKey=XxxxxxxxxxxxxxxxxxxxxxxxxxxxxxX=";
var registry = Registry.fromConnectionString(connectionString);
var client = Client.fromConnectionString(connectionString);
var deviceToReboot = 'Runner';
var startRebootDevice = function (twin) {
var methodName = "reboot";
var methodParams = {
methodName: methodName,
payload: null,
timeoutInSeconds: 30
};
client.invokeDeviceMethod(deviceToReboot, methodParams, function(err, result) {
if (err) {
console.error("Direct method error: "+err.message);
} else {
console.log("Successfully invoked the device to reboot.");
}
});
};
var queryTwinLastReboot = function() {
registry.getTwin(deviceToReboot, function(err, twin){
if (twin.properties.reported.iothubDM != null)
{
if (err) {
console.error('Could not query twins: ' + err.varructor.name + ': ' + err.message);
} else {
var lastRebootTime = twin.properties.reported.iothubDM.reboot.lastReboot;
console.log('Last reboot time: ' + JSON.stringify(lastRebootTime, null, 2));
}
} else
console.log('Waiting for device to report last reboot time.');
});
};
startRebootDevice();
setInterval(queryTwinLastReboot, 2000);
alert('dmreboot included!');
I have also tried creating a function in the head of my html that includes the entire contents of dmreboot_service.js, but although the function is called successfully the code does not execute.
This is the last part of a project that I need to get working. I'm fairly new to this, and this is driving me nuts!! Any advice much appreciated.
I usually handle click in HTML with javascript like so;
const doSomething = document.querySelector('.whateverclassnameyouchoose').addEventListener('click',onClick);
function onClick(e){
what ever you want the function to do
}
Hope it helps :)

Worker blocking UI thread in Chrome

I'm building a web app that uses EvaporateJS to upload large files to Amazon S3 using Multipart Uploads. I noticed an issue where every time a new chunk was started the browser would freeze for ~2 seconds. I want the user to be able to continue to use my app while the upload is in progress, and this freezing makes that a bad experience.
I used Chrome's Timeline to look into what was causing this and found that it was SparkMD5's hashing. So I've moved the entire upload process into a Worker, which I thought would fix the issue.
Well the issue is now fixed in Edge and Firefox, but Chrome still has the exact same problem.
Here's a screenshot of my Timeline:
As you can see, during the freezes my main thread is doing basically nothing, with <8ms of JavaScript running during that time. All the work is occurring in my Worker thread, and even that is only running for ~600ms or so, not the 1386ms that my frame takes.
I'm really not sure what's causing the issue, are there any gotchas with Workers that I should be aware of?
Here's the code for my Worker:
var window = self; // For Worker-unaware scripts
// Shim to make Evaporate work in a Worker
var document = {
createElement: function() {
var href = undefined;
var elm = {
set href(url) {
var obj = new URL(url);
elm.protocol = obj.protocol;
elm.hostname = obj.hostname;
elm.pathname = obj.pathname;
elm.port = obj.port;
elm.search = obj.search;
elm.hash = obj.hash;
elm.host = obj.host;
href = url;
},
get href() {
return href;
},
protocol: undefined,
hostname: undefined,
pathname: undefined,
port: undefined,
search: undefined,
hash: undefined,
host: undefined
};
return elm;
}
};
importScripts("/lib/sha256/sha256.min.js");
importScripts("/lib/spark-md5/spark-md5.min.js");
importScripts("/lib/url-parse/url-parse.js");
importScripts("/lib/xmldom/xmldom.js");
importScripts("/lib/evaporate/evaporate.js");
DOMParser = self.xmldom.DOMParser;
var defaultConfig = {
computeContentMd5: true,
cryptoMd5Method: function (data) { return btoa(SparkMD5.ArrayBuffer.hash(data, true)); },
cryptoHexEncodedHash256: sha256,
awsSignatureVersion: "4",
awsRegion: undefined,
aws_url: "https://s3-ap-southeast-2.amazonaws.com",
aws_key: undefined,
customAuthMethod: function(signParams, signHeaders, stringToSign, timestamp, awsRequest) {
return new Promise(function(resolve, reject) {
var signingRequestId = currentSigningRequestId++;
postMessage(["signingRequest", signingRequestId, signParams.videoId, timestamp, awsRequest.signer.canonicalRequest()]);
queuedSigningRequests[signingRequestId] = function(signature) {
queuedSigningRequests[signingRequestId] = undefined;
if(signature) {
resolve(signature);
} else {
reject();
}
}
});
},
//logging: false,
bucket: undefined,
allowS3ExistenceOptimization: false,
maxConcurrentParts: 5
}
var currentSigningRequestId = 0;
var queuedSigningRequests = [];
var e = undefined;
var filekey = undefined;
onmessage = function(e) {
var messageType = e.data[0];
switch(messageType) {
case "init":
var globalConfig = {};
for(var k in defaultConfig) {
globalConfig[k] = defaultConfig[k];
}
for(var k in e.data[1]) {
globalConfig[k] = e.data[1][k];
}
var uploadConfig = e.data[2];
Evaporate.create(globalConfig).then(function(evaporate) {
var e = evaporate;
filekey = globalConfig.bucket + "/" + uploadConfig.name;
uploadConfig.progress = function(p, stats) {
postMessage(["progress", p, stats]);
};
uploadConfig.complete = function(xhr, awsObjectKey, stats) {
postMessage(["complete", xhr, awsObjectKey, stats]);
}
uploadConfig.info = function(msg) {
postMessage(["info", msg]);
}
uploadConfig.warn = function(msg) {
postMessage(["warn", msg]);
}
uploadConfig.error = function(msg) {
postMessage(["error", msg]);
}
e.add(uploadConfig);
});
break;
case "pause":
e.pause(filekey);
break;
case "resume":
e.resume(filekey);
break;
case "cancel":
e.cancel(filekey);
break;
case "signature":
var signingRequestId = e.data[1];
var signature = e.data[2];
queuedSigningRequests[signingRequestId](signature);
break;
}
}
Note that it relies on the calling thread to provide it with the AWS Public Key, AWS Bucket Name and AWS Region, AWS Object Key and the input File object, which are all provided in the 'init' message. When it needs something signed, it sends a 'signingRequest' message to the parent thread, which is expected to provided the signature in a 'signature' message once it's been fetched from my API's signing endpoint.
I can't give a very good example or analyze what you are doing with only the Worker code, but I strongly suspect that the issue either has to do with either the reading of the chunk on the main thread or some unexpected processing that you are doing on the chunk on the main thread. Maybe post the main thread code that calls postMessage to the Worker?
If I were debugging it right now, I'd try moving your FileReader operations into the Worker. If you don't mind the Worker blocking while it loads a chunk, you could also use FileReaderSync.
Post-comments update
Does generating the presigned URL require hashing the file content + metadata + a key? Hashing file content is going to take O(n) in the size of the chunk and it's possible, if the hash is the first operation that reads from the Blob, that the loading of the file content could be deferred until the hashing starts. Unless you are compelled to keep the signing in the main thread (you don't trust the worker with key material?) that would be another good thing to bring into the worker.
If moving the signing into the Worker is too much, you could have the worker do something to force the Blob to be read and/or pass the ArrayBuffer(or Uint8Array or what have you) of file content back to the main thread for signing; this would ensure that reading the chunk does not occur on the main thread.

local PDF file scraping in node.js

I have uploaded a pdf via a MEAN stack web application using fs. I want to extract certain fields from the pdf and display them on the web app. I have looked at a couple npm packages like pdf.js, pdf2json. I can't figure out the documentation and javascript callbacks used in the examples available. Please help!
I hope I can help answer your question. Using pdf2json can be used to parse a pdf and extract the text. There are a couple of steps that need to be taken to get it working. I have adapted the example from https://github.com/modesty/pdf2json.
The setup is to install pdf2json in the node app, and also underscore. The example page didn't explain the need to define your own callback functions. It also used self instead of this to register them. So, with the appropriate changes the code to extract all the text from the pdf will be something like this:
// Get the dependencies that have already been installed
// to ./node_modules with `npm install <dep>`in the root director
// of your app
var _ = require('underscore'),
PDFParser = require('pdf2json');
var pdfParser = new PDFParser();
// Create a function to handle the pdf once it has been parsed.
// In this case we cycle through all the pages and extraxt
// All the text blocks and print them to console.
// If you do `console.log(JSON.stringify(pdf))` you will
// see how the parsed pdf is composed. Drill down into it
// to find the data you are looking for.
var _onPDFBinDataReady = function (pdf) {
console.log('Loaded pdf:\n');
for (var i in pdf.data.Pages) {
var page = pdf.data.Pages[i];
for (var j in page.Texts) {
var text = page.Texts[j];
console.log(text.R[0].T);
}
}
};
// Create an error handling function
var _onPDFBinDataError = function (error) {
console.log(error);
};
// Use underscore to bind the data ready function to the pdfParser
// so that when the data ready event is emitted your function will
// be called. As opposed to the example, I have used `this` instead
// of `self` since self had no meaning in this context
pdfParser.on('pdfParser_dataReady', _.bind(_onPDFBinDataReady, this));
// Register error handling function
pdfParser.on('pdfParser_dataError', _.bind(_onPDFBinDataError, this));
// Construct the file path of the pdf
var pdfFilePath = 'test3.pdf';
// Load the pdf. When it is loaded your data ready function will be called.
pdfParser.loadPDF(pdfFilePath);
I am running the code out of my server side controller.
module.exports = (function() {
return {
add: function(req, res) {
var tmp_path = req.files.pdf.path;
var target_path = './uploads/' + req.files.pdf.name;
fs.rename(tmp_path, target_path, function(err) {
if (err) throw err;
// delete the temporary file, so that the explicitly set temporary upload dir does not get filled with unwanted files
fs.unlink(tmp_path, function() {
if (err) throw err;
//edit here pdf parser
res.redirect('#/');
});
})
},
show: function(req, res) {
var pdfParser = new PDFParser();
var _onPDFBinDataReady = function (pdf) {
console.log('Loaded pdf:\n');
for (var i in pdf.data.Pages) {
var page = pdf.data.Pages[i];
// console.log(page.Texts);
for (var j in page.Texts) {
var text = page.Texts[j];
// console.log(text.R[0].T);
}
}
console.log(JSON.stringify(pdf));
};
// Create an error handling function
var _onPDFBinDataError = function (error) {
console.log(error);
};
pdfParser.on('pdfParser_dataReady', _.bind(_onPDFBinDataReady, this));
// Register error handling function
pdfParser.on('pdfParser_dataError', _.bind(_onPDFBinDataError, this));
// Construct the file path of the pdf
var pdfFilePath = './uploads/Invoice_template.pdf';
// Load the pdf. When it is loaded your data ready function will be called.
pdfParser.loadPDF(pdfFilePath);
},
//end controller
}

CollectionsFS files won't upload to the Meteor server

I'm fairly new to collectionFS so I'm probably wrong somewhere. I try to use collectionFS on a mobile device. Recording audio and uploading it to a server. I followed the doc and it seems to work except the files won't show up in the defined upload-directory on the server. They are visible in the mongoDB though.
Images = new FS.Collection('images', {
stores: [new FS.Store.FileSystem('images', {path: '~/uploads/'})]
});
.
Template.main.events ({
'click #record': function (){
Meteor.startup(function () {
// capture callback
var captureSuccess = function(mediaFiles) {
var i, path, len;
for (i = 0, len = mediaFiles.length; i < len; i += 1) {
path = mediaFiles[i].fullPath;
// do something interesting with the file
upload(mediaFiles[i]);
}
};
// capture error callback
var captureError = function(error) {
navigator.notification.alert('Error code: ' + error.code, null, 'Capture Error');
};
// start audio capture
navigator.device.capture.captureAudio(captureSuccess, captureError);
});
function upload (file){
Images.insert(file, function(err, fileObj){
if (err) { console.Error(err);}
else { console.log('done');}
});
}
}
})

Create plugin gulp with stream

I created plugin for send json data in json file.
But I don't understand why send my object json in pipe, and not write file directly in my plugin.
I want use my plugin whit this syntax:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('/build/js/'))
.pipe(gulp.dest('./build/js/hash.json'));
});
And not that:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('./build/js/hash.json', '/build/js/'));
});
This is my plugin:
var through = require('through2');
var gutil = require('gulp-util');
var crypto = require('crypto');
var fs = require('fs');
var PluginError = gutil.PluginError;
// Consts
const PLUGIN_NAME = 'get-hash-file';
var json = {};
function getHashFile(filename, basename)
{
if (!filename) {
throw PluginError(PLUGIN_NAME, "Missing filename !");
}
// Creating a stream through which each file will pass
var stream = through.obj(function (file, enc, callback) {
if (file.isNull()) {
this.push(file); // Do nothing if no contents
return callback();
}
if (file.isBuffer()) {
var hash = crypto.createHash('sha256').update(String(file.contents)).digest('hex');
json[file.path.replace(file.cwd+basename, '')] = hash;
return callback();
}
if (file.isStream()) {
this.emit('error', new PluginError(PLUGIN_NAME, 'Stream not supported!'));
return callback();
}
}).on('finish', function () {
fs.writeFile(filename, JSON.stringify(json), function(err) {
if (err) {
throw err;
}
});
});
// returning the file stream
return stream;
}
// Exporting the plugin main function
module.exports = getHashFile;
Your are idea
Nothing prevents you from doing this... besides not respecting plugins guidelines!
Users actually assume a plugin will stream files and that they can pipe them to other plugins.
If I get your code right, you're trying to generate a file that contains all sha hashes of inbound files. Why not let users take this file and pipe it to other plugins? You'd be surprised what people could do.
While this question looks a bit opinion-based, you could definitely put the focus on how to deal with files that may not belong to the main stream of files. Issues like this can be found in many plugins; for example, gulp-uglify authors are wondering how they can add source-maps without mixing js and source map downstream.

Categories