How To Upload Multiple Files To Server Using Javascript - javascript

I am using PhoneGap, and uploading a file (using a HTTP POST) like this,
function uploadSingleFile()
{
var ft = new FileTransfer();
// set up parameters etc
ft.upload(imageName, "http://serviceaddress/UploadFile.ashx", win, fail, options);
}
function win(r)
{
// success callback
}
I am wanting to upload muliple files, so in the success callback I want to call the uploadSingleFile to move onto the next file.
How can I store which file I am up to? I am using the localStorage to store the file names. So I would want to do this,
upload file localStorage.file0
upload file localStorage.file1
upload file localStorage.file2
So all I would need to do would be to store the number on the end, 0, 1, etc of where we are up to. Do I need to use a global variable? Seems messy.
If only I could pass through to the success callback a number as a additional parameter?

Hmmm. Is the problem worth doubting? Just store an array of file names and use JSON.stringify / JSON.parse for conversion between array and string.

function uploadSingleFile(fileName) {
var ft = new FileTransfer();
ft.upload("fileUrl",
"server",
function (result , fileName) {
console.log(fileName + ' has been uploaded successfully to server');
},
function (error) {
console.log(error);
},
{fileName: fileName, fileKey: "file"});
}
function uploadFiles() {
var files = JSON.parse(localStorage.files);
for(var i=0; i < files.length; i++) {
uploadSingleFile(files[i]);
}
}
You can send the index of file as parameter to uploadSingleFile() then using it in console.log()

First add all your images to array :
var TemplstImg = [];
function UploadImages()
{
var lstImages = [localStorage.file0,localStorage.file1,localStorage.file2];
TemplstImg=lstImages ;
if (TemplstImg.length > 0) {
var img = TemplstImg.pop();
uploadPhoto(img);
}
}
function uploadPhoto(imageURI) {
imageURI = imageURI.ImageFile;
var options = new FileUploadOptions();
options.fileKey = "file";
options.fileName = imageURI.substr(imageURI.lastIndexOf('/') + 1);
options.mimeType = "image/jpeg";
var params = {};
params.value1 = "test";
params.value2 = "param";
options.params = params;
var ft = new FileTransfer();
ft.upload(imageURI, yourServerPath, winImg, failImg,options);
}
function winImg(r) {
if (TemplstImg.length == 0) {
alert ('Done , all files was uploaded');
} else {
var img = TemplstImg.pop();
uploadPhoto(img);
}
}
function failImg(error) {
alert("failImg An error has occurred: Code = " + error.code);
console.log("upload error source " + error.source);
console.log("upload error target " + error.target);
}

Related

How do i play an HLS stream when playlist.m3u8 file is constantly being updated?

I am using MediaRecorder to record chunks of my live video in webm format from MediaStream and converting these chunks to .ts files on the server using ffmpeg and then updating my playlist.m3u8 file with this code:
function generateM3u8Playlist(fileDataArr, playlistFp, isLive, cb) {
var durations = fileDataArr.map(function(fd) {
return fd.duration;
});
var maxT = maxOfArr(durations);
var meta = [
'#EXTM3U',
'#EXT-X-VERSION:3',
'#EXT-X-MEDIA-SEQUENCE:0',
'#EXT-X-ALLOW-CACHE:YES',
'#EXT-X-TARGETDURATION:' + Math.ceil(maxT),
];
fileDataArr.forEach(function(fd) {
meta.push('#EXTINF:' + fd.duration.toFixed(2) + ',');
meta.push(fd.fileName2);
});
if (!isLive) {
meta.push('#EXT-X-ENDLIST');
}
meta.push('');
meta = meta.join('\n');
fs.writeFile(playlistFp, meta, cb);
}
Here fileDataArr holds information for all the chunks that have been created.
After that i use this code to create a hls server :
var runStreamServer = (function(streamFolder) {
var executed = false;
return function(streamFolder) {
if (!executed) {
executed = true;
var HLSServer = require('hls-server')
var http = require('http')
var server = http.createServer()
var hls = new HLSServer(server, {
path: '/stream', // Base URI to output HLS streams
dir: 'C:\\Users\\Work\\Desktop\\live-stream\\webcam2hls\\videos\\' + streamFolder // Directory that input files are stored
})
console.log("We are going to stream from folder:" + streamFolder);
server.listen(8000);
console.log('Server Listening on Port 8000');
}
};
})();
The problem is that if i stop creating new chunks and then use the hls server link:
http://localhost:8000/stream/playlist.m3u8 then the video plays in VLC but if i try to play during the recording it keeps loading the file but does not play. I want it to play while its creating new chunks and updating playlist.m3u8. The quirk in generateM3u8Playlist function is that it adds '#EXT-X-ENDLIST' to the playlist file after i have stopped recording.
The software is still in production so its a bit messy code. Thank you for any answers.
The client side that generates blobs is as follows:
var mediaConstraints = {
video: true,
audio:true
};
navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);
function onMediaSuccess(stream) {
console.log('will start capturing and sending ' + (DT / 1000) + 's videos when you press start');
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/webm';
mediaRecorder.ondataavailable = function(blob) {
var count2 = zeroPad(count, 5);
// here count2 just creates a blob number
console.log('sending chunk ' + name + ' #' + count2 + '...');
send('/chunk/' + name + '/' + count2 + (stopped ? '/finish' : ''), blob);
++count;
};
}
// Here we have the send function which sends our blob to server:
function send(url, blob) {
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);
xhr.responseType = 'text/plain';
xhr.setRequestHeader('Content-Type', 'video/webm');
//xhr.setRequestHeader("Content-Length", blob.length);
xhr.onload = function(e) {
if (this.status === 200) {
console.log(this.response);
}
};
xhr.send(blob);
}
The code that receives the XHR request is as follows:
var parts = u.split('/');
var prefix = parts[2];
var num = parts[3];
var isFirst = false;
var isLast = !!parts[4];
if ((/^0+$/).test(num)) {
var path = require('path');
shell.mkdir(path.join(__dirname, 'videos', prefix));
isFirst = true;
}
var fp = 'videos/' + prefix + '/' + num + '.webm';
var msg = 'got ' + fp;
console.log(msg);
console.log('isFirst:%s, isLast:%s', isFirst, isLast);
var stream = fs.createWriteStream(fp, { encoding: 'binary' });
/*stream.on('end', function() {
respond(res, ['text/plain', msg]);
});*/
//req.setEncoding('binary');
req.pipe(stream);
req.on('end', function() {
respond(res, ['text/plain', msg]);
if (!LIVE) { return; }
var duration = 20;
var fd = {
fileName: num + '.webm',
filePath: fp,
duration: duration
};
var fileDataArr;
if (isFirst) {
fileDataArr = [];
fileDataArrs[prefix] = fileDataArr;
} else {
var fileDataArr = fileDataArrs[prefix];
}
try {
fileDataArr.push(fd);
} catch (err) {
fileDataArr = [];
console.log(err.message);
}
videoUtils.computeStartTimes(fileDataArr);
videoUtils.webm2Mpegts(fd, function(err, mpegtsFp) {
if (err) { return console.error(err); }
console.log('created %s', mpegtsFp);
var playlistFp = 'videos/' + prefix + '/playlist.m3u8';
var fileDataArr2 = (isLast ? fileDataArr : lastN(fileDataArr, PREV_ITEMS_IN_LIVE));
var action = (isFirst ? 'created' : (isLast ? 'finished' : 'updated'));
videoUtils.generateM3u8Playlist(fileDataArr2, playlistFp, !isLast, function(err) {
console.log('playlist %s %s', playlistFp, (err ? err.toString() : action));
});
});
runStreamServer(prefix);
}
You don't show us how you use MediaRecorder to generate your "chunks" of data. Do you you use its ondataavailable event for this purpose?
If so, please keep this in mind: You must concatenate all the chunks handed to you by ondataavailable to get a valid .webm (or .matroska) data stream.
You can't just store an arbitrary chunk of data in a media file and expect it to play. Even ffmpeg needs all your chunks streamed to it to generate valid output. That's because the first couple of chunks contain the mandatory .webm initialization segment, and the other chunks do not.

How to add an attachement to an email (not stored in file disk)

I want to open outlook with an attachement (zip) using Javascript.
It's working fine if the file in in some directory in the computer client.
this.sendEmail = function (zip) {
try {
var theApp = new ActiveXObject("Outlook.Application");
var objNS = theApp.GetNameSpace('MAPI');
var theMailItem = theApp.CreateItem(0); // value 0 = MailItem
theMailItem.to = ('test#gmail.com');
theMailItem.Subject = ('test');
theMailItem.Body = ('test');
theMailItem.Attachments.Add(zip);
theMailItem.display();
}
catch (err) {
alert(err.message);
}
};
But What I want to do is to add the file that I have alredy in memory
this.SenddZip = function (docDescription, fileName) {
var zip = new JSZip();
for (var i = 0; i < docDescription.length; i++) {
var doc = docDescription[i];
zip.file(doc.core_documenttitle + doc.core_fileextension, doc.fileBase64, { base64: true });
}
Utils.sendEmail(zip);
// Generate the zip file asynchronously
zip.generateAsync({ type: "blob" })
.then(function (content) {
// Force down of the Zip file
var name = "documents";
if (fileName) {
name = fileName;
}
// location.href = "data:application/zip;base64," + content;
saveAs(content, name + ".zip");
Utils.sendEmail(xxxxxxxx);
});
};
If their is no solution for this :
Is their a way to download the document without the confirmation dialog ?
saveAs ask always for confirmation before to store the file.
Thank you

Phonegap: FileTransferError.FILE_NOT_FOUND_ERR

I'm using the Phonegap file transfer plugin to upload a picture to the server. However I am getting error code: 1 (FileTransferError.FILE_NOT_FOUND_ERR). I've tested my server code with POSTMAN and I can upload and image successfully. However I get that error with the plugin. This is my code. The file is declared from "camera_image.src" and I can see the image when I append this to the src of an image on the fly. Any contributions? How is this code not perfect?
var fileURL = camera_image.src;
alert(fileURL);
var win = function (r) {
temp.push(r.response);
statusDom.innerHTML = "Upload Succesful!";
}
var fail = function (error) {
alert("An error has occurred: Code = " + error.code + " | Source:" + error.source + " | Target:" + error.target );
statusDom.innerHTML = "Upload failed!";
}
var options = new FileUploadOptions();
options.fileKey = "properties_photo";
options.fileName=fileURL.substr(fileURL.lastIndexOf('/') + 1);
options.headers = {
Connection: "close"
};
var params = {};
params.value1 = "test";
params.value2 = "param";
options.params = params;
var ft = new FileTransfer();
statusDom = document.querySelector('#status');
ft.onprogress = function(progressEvent) {
if (progressEvent.lengthComputable) {
var perc = Math.floor(progressEvent.loaded / progressEvent.total * 100);
statusDom.innerHTML = perc + "% uploaded...";
console.log(perc);
} else {
if(statusDom.innerHTML == "") {
statusDom.innerHTML = "Loading";
} else {
statusDom.innerHTML += ".";
}
}
};
ft.upload(fileURL, encodeURI("http://cloud10.me/clients/itsonshow/app/image_upload_process.php"), win, fail, options);
I had this problem because of spaces in the path or filename of the file to be uploaded.
You need to ensure the plugin isn't being passed a fileURL with %20 in the URL.

Get Byte Position during Upload Loop

I am working on a function that will write data to a remote server in chunks using a 3rd party API. Through some help on Stack Overflow I was able to accomplish this, where it is now working as expected. The problem is that I can only get a single 16kb chunk to write as I will need to advance the pos of where the next bytes are written to.
The initial write starts at 0 easily enough. Due to my unfamiliarity with this though, I am unsure if the next pos should just be 16 or what. If it helps, the API call writeFileChunk() takes 3 parameters, filepath (str), pos (int64), and data (base64 encoded string).
reader.onload = function(evt)
{
// Get SERVER_ID from URL
var server_id = getUrlParameter('id');
$("#upload_status").text('Uploading File...');
$("#upload_progress").progressbar('value', 0);
var chunkSize = 16<<10;
var buffer = evt.target.result;
var fileSize = buffer.byteLength;
var segments = Math.ceil(fileSize / chunkSize); // How many segments do we need to divide into for upload
var count = 0;
// start the file upload
(function upload()
{
var segSize = Math.min(chunkSize, fileSize - count * chunkSize);
if (segSize > 0)
{
$("#upload_progress").progressbar('value', (count / segments));
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize); // get a chunk
var chunkEncoded = btoa(String.fromCharCode.apply(null, chunk));
// Send Chunk data to server
$.ajax({
type: "POST",
url: "filemanagerHandler.php",
data: { 'action': 'writeFileChunk', 'server_id': server_id, 'filepath': filepath, 'pos': 0, 'chunk': chunkEncoded },
dataType: 'json',
success: function(data)
{
console.log(data);
setTimeout(upload, 100);
},
error: function(XMLHttpRequest, textStatus, errorThrown)
{
alert("Status: " + textStatus); alert("Error: " + errorThrown); alert("Message: " + XMLHttpRequest.responseText);
}
});
}
else
{
$("#upload_status").text('Finished!');
$("#upload_progress").progressbar('value', 100);
getDirectoryListing(curDirectory);
}
})()
};
The current position for the file on client side would be represented by this line, or more specifically the second argument at the pre-incremental step:
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize);
though, in this case it advances (count++) before you can reuse it so if you need the actual position (below as pos) you can extract it by simply rewriting the line into:
var pos = count++ * chunkSize; // here chunkSize = 16kb
var chunk = new Uint8Array(buffer, pos, segSize);
Here each position update will increment 16kb as that is the chunk-size. For progress then it is calculated pos / fileSize * 100. This of course assuming using the unencoded buffer size.
The only special case is the last chunk, but when there are no more chunks left to read the position should be equal to the file length (fileSize) so it should be pretty straight-forward.
When the ajax call return the server should have the same position unless something went wrong (connection, write access change, disk full etc.).
You can use Filereader API to read the chunks and send it to your remote server.
HTML
<input type="file" id="files" name="file" /> Read bytes:
<span class="readBytesButtons">
<button>Read entire file in chuncks</button>
</span>
Javascript
// Post data to your server.
function postChunk(obj) {
var url = "https://your.remote.server";
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.open('post', url, true);
xhr.responseType = 'json';
xhr.onload = function() {
var status = xhr.status;
if (status == 200) {
resolve(xhr.response);
} else {
reject(status);
}
};
var params = "";
// check that obj has the proper keys and create the url parameters
if (obj.hasOwnProperty(action) && obj.hasOwnProperty(server_id) && obj.hasOwnProperty(filepath) && obj.hasOwnProperty(pos) && obj.hasOwnProperty(chunk)) {
params += "action="+obj[action]+"&server_id="+obj[server_id]+"&filepath="+obj[filepath]+"&pos="+obj[pos]+"&chunk="+obj[chunk];
}
if(params.length>0) {
xhr.send(params);
} else {
alert('Error');
}
});
}
// add chunk to "obj" object and post it to server
function addChunk(reader,obj,divID) {
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
obj.chunk = evt.target.result;
console.log(obj);
document.getElementById(divID).textContent +=
['Sending bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),
'\n'].join('');
// post data to server
postChunk(obj).then(function(data) {
if(data!=="" && data!==null && typeof data!=="undefined") {
// chunk was sent successfully
document.getElementById(divID).textContent +=
['Sent bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),'\n'].join('');
} else {
alert('Error! Empty response');
}
}, function(status) {
alert('Resolve Error');
});
}
};
}
// read and send Chunk
function readChunk() {
var files = document.getElementById('files').files;
if (!files.length) {
alert('Please select a file!');
return;
}
var file = files[0];
var size = parseInt(file.size);
var chunkSize = 16000;
var chunks = Math.ceil(size/chunkSize);
var start,stop = 0;
var blob = [];
for(i=0;i<chunks;i++) {
start = i*chunkSize;
stop = (i+1)*chunkSize-1;
if(i==(chunks-1)) {
stop = size;
}
var reader = new FileReader();
blob = file.slice(start, stop);
reader.readAsBinaryString(blob);
var obj = {action: 'writeFileChunk', server_id: 'sid', filepath: 'path', pos: i, chunk: ""};
var div = document.createElement('div');
div.id = "bytes"+i;
document.body.appendChild(div);
addChunk(reader,obj,div.id);
}
}
// Check for the various File API support.
if (window.File && window.FileReader && window.FileList && window.Blob) {
console.log(' Great success! All the File APIs are supported.');
} else {
alert('The File APIs are not fully supported in this browser.');
}
document.querySelector('.readBytesButtons').addEventListener('click', function(evt) {
if (evt.target.tagName.toLowerCase() == 'button') {
readChunk();
}
}, false);
You can check this example in this Fiddle

JSZip Memory Issue

I am experiencing high memory consumption on my Node.js app, when loading ~100MB zip files one after the other it is keeping them in memory as a "NodeBufferReader". The library I am using is called JSZip and is found here: https://stuk.github.io/jszip/
If I access the same zip file twice then it doesn't increase memory usage but for every 'extra' .zip file I access the memory increases by approx the size of the .zip file. The files I am accessing are all around 100MB or larger so as you can imagine this has the potential to get rather large, rather quickly.
The Node.js application is a websocket server that reads files from within .zip files and returns them back to the requestor as base64 data. The function in question is here:
function handleFileRequest(args, connection_id) {
var zipIndex = 0,
pathLen = 0,
zip_file = "",
zip_subdir = "";
try {
if (args.custom.file.indexOf(".zip") > -1) {
// We have a .zip directory!
zipIndex = args.custom.file.indexOf(".zip") + 4;
pathLen = args.custom.file.length;
zip_file = args.custom.file.substring(0, zipIndex);
zip_subdir = args.custom.file.substring(zipIndex + 1, pathLen);
fs.readFile(zip_file, function (err, data) {
if (!err) {
zipObj.load(data);
if (zipObj.file(zip_subdir)) {
var binary = zipObj.file(zip_subdir).asBinary();
var base64data = btoa(binary);
var extension = args.custom.file.split('.').pop();
var b64Header = "data:" + MIME[extension] + ";base64,";
var tag2 = args.custom.tag2 || "unset";
var tag3 = args.custom.tag3 || "unset";
var rargs = {
action: "getFile",
tag: args.tag,
dialogName: connections[connection_id].dialogName,
custom: {
file: b64Header + base64data,
tag2: tag2,
tag3: tag3
}
};
connections[connection_id].sendUTF(JSON.stringify(rargs));
rargs = null;
binary = null;
base64data = null;
} else {
serverLog(connection_id, "Requested file doesn't exist");
}
} else {
serverLog(connection_id, "There was an error retrieving the zip file data");
}
});
} else {
// File isn't a .zip
}
} catch (e) {
serverLog(connection_id, e);
}
}
Any help would be much appreciated in getting rid of this problem - Thanks!
Working Code Example
function handleFileRequest(args, connection_id) {
var zipIndex = 0,
pathLen = 0,
f = "",
d = "";
try {
if (args.custom.file.indexOf(".zip") > -1) {
// We have a .zip directory!
zipIndex = args.custom.file.indexOf(".zip") + 4;
pathLen = args.custom.file.length;
f = args.custom.file.substring(0, zipIndex);
d = args.custom.file.substring(zipIndex + 1, pathLen);
fs.readFile(f, function (err, data) {
var rargs = null,
binary = null,
base64data = null,
zipObj = null;
if (!err) {
zipObj = new JSZip();
zipObj.load(data);
if (zipObj.file(d)) {
binary = zipObj.file(d).asBinary();
base64data = btoa(binary);
var extension = args.custom.file.split('.').pop();
var b64Header = "data:" + MIME[extension] + ";base64,";
var tag2 = args.custom.tag2 || "unset";
var tag3 = args.custom.tag3 || "unset";
rargs = {
action: "getFile",
tag: args.tag,
dialogName: connections[connection_id].dialogName,
custom: {
file: b64Header + base64data,
tag2: tag2,
tag3: tag3
}
};
connections[connection_id].sendUTF(JSON.stringify(rargs));
} else {
serverLog(connection_id, "Requested file doesn't exist");
}
} else {
serverLog(connection_id, "There was an error retrieving the zip file data");
}
rargs = null;
binary = null;
base64data = null;
zipObj = null;
});
} else {
// Non-Zip file
}
} catch (e) {
serverLog(connection_id, e);
}
}
If you use the same JSZip instance to load each and every file, you will keep everything in memory : the load method doesn't replace the existing content.
Try using a new JSZip instance each time :
var zipObj = new JSZip();
zipObj.load(data);
// or var zipObj = new JSZip(data);

Categories