JavaScript - Set object property in callback - javascript

I'm pretty confident that there is something with this that I'm doing wrong. This question has been asked before, but even after reviewing the other questions and answers, I still can't get it to work.
Basically the issue is that I can't set file.fileType to be the value I need it to be from within the callback function within magic.detectFileType.
var Magic = mmm.Magic,
magic = new Magic(mmm.MAGIC_MIME_TYPE),
for (var i in files){
var file = new File(files[i])
file.detectFileType();
commandSelf.log("File Type: " + file.fileType);
commandSelf.log("File Name: " + file.filename);
commandSelf.log("Full Path: " + file.fullPath);
}
var File = function(filename){
this.filename = filename;
this.fullPath = null;
this.fileType = null;
};
File.prototype.detectFileType = function(){
this.fullPath = path + "/" + this.filename;
var self = this;
// Make sure this is an appropriate image file type
magic.detectFile(this.fullPath, function(err, result){
self.fileType = "test"
});
}

A more appropriate solution would be to have detectFileType accept a callback or return a Promise so that you know when the asynchronous task has completed and you can safely check the File instance properties. For example:
var Magic = mmm.Magic;
var magic = new Magic(mmm.MAGIC_MIME_TYPE);
files.forEach(function(file) {
file = new File(file);
file.detectFileType(function(err) {
if (err) throw err;
commandSelf.log("File Type: " + file.fileType);
commandSelf.log("File Name: " + file.filename);
commandSelf.log("Full Path: " + file.fullPath);
});
});
var File = function(filename){
this.filename = filename;
this.fullPath = null;
this.fileType = null;
};
File.prototype.detectFileType = function(cb){
this.fullPath = path + "/" + this.filename;
var self = this;
// Make sure this is an appropriate image file type
magic.detectFile(this.fullPath, function(err, result){
self.fileType = "test"
cb(err);
});
}

Related

cannot access element within function in javascript custom object

When I run this code it generates the appropriate file upload ui and adds the event listener to the upload button. However the first line in the upload function throws an error - Cannot read property 'style' of undefined - for this.missingFile. What am I doing wrong here?
function FileUploader(props) {
var div = document.querySelector(props.element);
var uid = generateGuid();
var templateHtml = "<p><div id=\"dvMissingFile-" + uid + "\" class=\"missing-file\"> Please choose a file.</div><input type=\"file\" id=\"flUploadedFile-" + uid + "\" name=\"flUploadedFile-" + uid + "\"/></p><div class=\"dvProgressBar\" id=\"progress-" + uid + "\"><div></div></div>";
div.innerHTML = templateHtml;
this.uploadButton = document.querySelector(props.uploadButton);
this.fileInput = document.querySelector("#flUploadedFile-" + uid);
this.missingFile = document.querySelector("#dvMissingFile-" + uid);
this.progress = document.querySelector("#progress-" + uid);
this.url = props.postUrl;
this.upload = function() {
this.missingFile.style.display = "none";
if (this.fileInput.files.length === 0) {
this.missingFile.style.display = "";
}
else {
var file = this.fileInput.files[0];
var xhr = new XMLHttpRequest();
var pbar = document.querySelector("#progress-" + uid + ">div");
if (xhr.upload) {
// do upload
}
}
}
this.uploadButton.addEventListener("click", this.upload);
}
Usage example
<div id="dvUploader"></div>
<button type="button" id="btnUpload" class="btn btn-primary">Upload</button>
<script>
var uploader = new FileUploader({
element: "#dvUploader",
uploadButton: "#btnUpload",
postUrl: "myposturl"
});
</script>
One small update to your code can help:
this.upload = function() {
// ...
}.bind(this);

JSZip Memory Issue

I am experiencing high memory consumption on my Node.js app, when loading ~100MB zip files one after the other it is keeping them in memory as a "NodeBufferReader". The library I am using is called JSZip and is found here: https://stuk.github.io/jszip/
If I access the same zip file twice then it doesn't increase memory usage but for every 'extra' .zip file I access the memory increases by approx the size of the .zip file. The files I am accessing are all around 100MB or larger so as you can imagine this has the potential to get rather large, rather quickly.
The Node.js application is a websocket server that reads files from within .zip files and returns them back to the requestor as base64 data. The function in question is here:
function handleFileRequest(args, connection_id) {
var zipIndex = 0,
pathLen = 0,
zip_file = "",
zip_subdir = "";
try {
if (args.custom.file.indexOf(".zip") > -1) {
// We have a .zip directory!
zipIndex = args.custom.file.indexOf(".zip") + 4;
pathLen = args.custom.file.length;
zip_file = args.custom.file.substring(0, zipIndex);
zip_subdir = args.custom.file.substring(zipIndex + 1, pathLen);
fs.readFile(zip_file, function (err, data) {
if (!err) {
zipObj.load(data);
if (zipObj.file(zip_subdir)) {
var binary = zipObj.file(zip_subdir).asBinary();
var base64data = btoa(binary);
var extension = args.custom.file.split('.').pop();
var b64Header = "data:" + MIME[extension] + ";base64,";
var tag2 = args.custom.tag2 || "unset";
var tag3 = args.custom.tag3 || "unset";
var rargs = {
action: "getFile",
tag: args.tag,
dialogName: connections[connection_id].dialogName,
custom: {
file: b64Header + base64data,
tag2: tag2,
tag3: tag3
}
};
connections[connection_id].sendUTF(JSON.stringify(rargs));
rargs = null;
binary = null;
base64data = null;
} else {
serverLog(connection_id, "Requested file doesn't exist");
}
} else {
serverLog(connection_id, "There was an error retrieving the zip file data");
}
});
} else {
// File isn't a .zip
}
} catch (e) {
serverLog(connection_id, e);
}
}
Any help would be much appreciated in getting rid of this problem - Thanks!
Working Code Example
function handleFileRequest(args, connection_id) {
var zipIndex = 0,
pathLen = 0,
f = "",
d = "";
try {
if (args.custom.file.indexOf(".zip") > -1) {
// We have a .zip directory!
zipIndex = args.custom.file.indexOf(".zip") + 4;
pathLen = args.custom.file.length;
f = args.custom.file.substring(0, zipIndex);
d = args.custom.file.substring(zipIndex + 1, pathLen);
fs.readFile(f, function (err, data) {
var rargs = null,
binary = null,
base64data = null,
zipObj = null;
if (!err) {
zipObj = new JSZip();
zipObj.load(data);
if (zipObj.file(d)) {
binary = zipObj.file(d).asBinary();
base64data = btoa(binary);
var extension = args.custom.file.split('.').pop();
var b64Header = "data:" + MIME[extension] + ";base64,";
var tag2 = args.custom.tag2 || "unset";
var tag3 = args.custom.tag3 || "unset";
rargs = {
action: "getFile",
tag: args.tag,
dialogName: connections[connection_id].dialogName,
custom: {
file: b64Header + base64data,
tag2: tag2,
tag3: tag3
}
};
connections[connection_id].sendUTF(JSON.stringify(rargs));
} else {
serverLog(connection_id, "Requested file doesn't exist");
}
} else {
serverLog(connection_id, "There was an error retrieving the zip file data");
}
rargs = null;
binary = null;
base64data = null;
zipObj = null;
});
} else {
// Non-Zip file
}
} catch (e) {
serverLog(connection_id, e);
}
}
If you use the same JSZip instance to load each and every file, you will keep everything in memory : the load method doesn't replace the existing content.
Try using a new JSZip instance each time :
var zipObj = new JSZip();
zipObj.load(data);
// or var zipObj = new JSZip(data);

NodeJS synchronous functions

I have a text file which simply lists some URL's. I'm trying to take each line from the text file, and add it to an array of urls for further operation.
var fs = require('fs'),
Urls = [];
var returnURLS = function(file) {
var read = function(callback) {
fs.readFile(file, function(err, logData){
if (err) throw err;
var text = logData.toString();
var lines = text.split('\n');
lines.forEach(function(line, callback){
var url = "http://www." + line;
Urls.push(url);
});
callback();
});
};
var giveBackAnswer = function() {
console.log("1: ", Urls);
return Urls;
};
read(giveBackAnswer);
};
console.log("2: ", returnURLS('textFileWithListOfURLs.txt'));
My console output clearly shows that the file system operations have not completed by the time the program is supposed to log the results, but that the results eventually do show up.
2: the urls are undefined
1: [ 'http://www.cshellsmassage.com',
'http://www.darsanamartialarts.com',
'http://www.davidgoldbergdc.com',
'http://www.dayspaofbroward.com',.... (etc)
What is the best way to get these functions to operate synchronously?
1) Compile the Urls array through file system operations
2) Print the array to the console once it has been filled
Well, your function takes returns undefined. This is because all functions in JavaScript return undefined.
If you would like to hook on your function using callbacks, it has to take a callback itself and then you'd place your continuation in that callback:
var returnURLS = function(file, whenDone) {
var read = function(callback) {
fs.readFile(file, function(err, logData){
if (err) whenDone(err);
var text = logData.toString();
var lines = text.split('\n');
lines.forEach(function(line, callback){
var url = "http://www." + line;
Urls.push(url);
});
callback();
});
};
var giveBackAnswer = function() {
console.log("1: ", Urls);
whenDone(null, Urls);
};
read(giveBackAnswer);
};
Which would let you do:
returnURLS("textFileWithList.txt", function(err, list){
console.log("2: ", list);
});
The alternative solution using promises (bluebird) would look something like:
var fs = Promise.promisify(require("fs"));
var returnURLS = function(file) {
return fs.readFileAsync(file).then(function(logData){
var text = logData.toString();
var lines = text.split('\n');
return lines.map(function(line){
return "http://www." + line;
});
});
};
Which would let you do:
returnURLS("url.txt").then(function(data){
console.log("Got data!", data);
});
You could use fs.readFileSync in that simple case :
var returnURLS = function(file) {
var text = fs.readFileSync(file).toString();
var lines = text.split('\n');
lines.forEach(function(line, callback){
var url = "http://www." + line;
Urls.push(url);
});
return Urls;
};
That's perfectly OK when you don't need parallelism, like in this small utility program.
But the solution you'll reapply everywhere else is to be wholly asynchronous by not returning the result but passing it as argument to a callback :
var fetchURLS = function(callback) {
fs.readFile(file, function(err, logData){
if (err) throw err;
var text = logData.toString();
var lines = text.split('\n');
lines.forEach(function(line, callback){
var url = "http://www." + line;
Urls.push(url);
});
callback(Urls);
});
};
};
fetchURLS('textFileWithListOfURLs.txt', function(urls){
console.log("2: ", urls);
});
When your code grows in complexity, it becomes convenient to use promises to reduce the "callback hell".
Wrap the function with a callback
var fs = require('fs'),
Urls = [];
function doit(cb){
var returnURLS = function(file) {
var read = function(callback) {
fs.readFile(file, function(err, logData){
if (err) throw err;
var text = logData.toString();
var lines = text.split('\n');
lines.forEach(function(line, callback){
var url = "http://www." + line;
Urls.push(url);
});
callback();
});
};
var giveBackAnswer = function() {
console.log("1: ", Urls);
return Urls;
};
read(giveBackAnswer);
};
cb(returnURLS);
}
doit(function(result){
console.log("2: ", result('textFileWithListOfURLs.txt'));
});

How To Upload Multiple Files To Server Using Javascript

I am using PhoneGap, and uploading a file (using a HTTP POST) like this,
function uploadSingleFile()
{
var ft = new FileTransfer();
// set up parameters etc
ft.upload(imageName, "http://serviceaddress/UploadFile.ashx", win, fail, options);
}
function win(r)
{
// success callback
}
I am wanting to upload muliple files, so in the success callback I want to call the uploadSingleFile to move onto the next file.
How can I store which file I am up to? I am using the localStorage to store the file names. So I would want to do this,
upload file localStorage.file0
upload file localStorage.file1
upload file localStorage.file2
So all I would need to do would be to store the number on the end, 0, 1, etc of where we are up to. Do I need to use a global variable? Seems messy.
If only I could pass through to the success callback a number as a additional parameter?
Hmmm. Is the problem worth doubting? Just store an array of file names and use JSON.stringify / JSON.parse for conversion between array and string.
function uploadSingleFile(fileName) {
var ft = new FileTransfer();
ft.upload("fileUrl",
"server",
function (result , fileName) {
console.log(fileName + ' has been uploaded successfully to server');
},
function (error) {
console.log(error);
},
{fileName: fileName, fileKey: "file"});
}
function uploadFiles() {
var files = JSON.parse(localStorage.files);
for(var i=0; i < files.length; i++) {
uploadSingleFile(files[i]);
}
}
You can send the index of file as parameter to uploadSingleFile() then using it in console.log()
First add all your images to array :
var TemplstImg = [];
function UploadImages()
{
var lstImages = [localStorage.file0,localStorage.file1,localStorage.file2];
TemplstImg=lstImages ;
if (TemplstImg.length > 0) {
var img = TemplstImg.pop();
uploadPhoto(img);
}
}
function uploadPhoto(imageURI) {
imageURI = imageURI.ImageFile;
var options = new FileUploadOptions();
options.fileKey = "file";
options.fileName = imageURI.substr(imageURI.lastIndexOf('/') + 1);
options.mimeType = "image/jpeg";
var params = {};
params.value1 = "test";
params.value2 = "param";
options.params = params;
var ft = new FileTransfer();
ft.upload(imageURI, yourServerPath, winImg, failImg,options);
}
function winImg(r) {
if (TemplstImg.length == 0) {
alert ('Done , all files was uploaded');
} else {
var img = TemplstImg.pop();
uploadPhoto(img);
}
}
function failImg(error) {
alert("failImg An error has occurred: Code = " + error.code);
console.log("upload error source " + error.source);
console.log("upload error target " + error.target);
}

How can I get node.js to return data once all operations are complete

I am just learning server-side JavaScript so please bear with any glaring mistakes I've made.
I am trying to write a file parser that operates on HTML files in a directory and returns a JSON string once all files have been parsed. I started it with a single file and it works fine. it loads the resource from Apache running on the same machine, injects jquery, does the parsing and returns my JSON.
var request = require('request'),
jsdom = require('jsdom'),
sys = require('sys'),
http = require('http');
http.createServer(function (req, res) {
request({uri:'http://localhost/tfrohe/Car3E.html'}, function (error, response, body) {
if (!error && response.statusCode == 200) {
var window = jsdom.jsdom(body).createWindow();
jsdom.jQueryify(window, 'http://ajax.googleapis.com/ajax/libs/jquery/1.4.4/jquery.min.js', function (window, jquery) {
// jQuery is now loaded on the jsdom window created from 'body'
var emps = {};
jquery("tr td img").parent().parent().each(function(){
var step = 0;
jquery(this).children().each(function(index){
if (jquery(this).children('img').attr('src') !== undefined) {
step++;
var name = jquery(this).parent().next().next().children('td:nth-child('+step+')').children().children().text();
var name_parts = name.split(",");
var last = name_parts[0];
var name_parts = name_parts[1].split(/\u00a0/g);
var first = name_parts[2];
emps[last + ",_" + first] = jquery(this).children('img').attr('src');
}
});
});
emps = JSON.stringify(emps);
//console.log(emps);
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(emps);
});
} else {
res.writeHead(200, {"Content-Type": "text/plain"});
res.end("empty");
//console.log(response.statusCode);
}
});
}).listen(8124);
Now I am trying to extend this to using the regular file system (fs) and get all HTML files in the directory and parse them the same way and return a single combined JSON object once all files have been parsed. Here is what I have so far but it does not work.
var sys = require("sys"),
fs = require("fs"),
jsdom = require("jsdom"),
emps = {};
//path = '/home/inet/www/media/employees/';
readDirectory = function(path) {
fs.readdir(path, function(err, files) {
var htmlfiles = [];
files.forEach(function(name) {
if(name.substr(-4) === "html") {
htmlfiles.push(name);
}
});
var count = htmlfiles.length;
htmlfiles.forEach(function(filename) {
fs.readFile(path + filename, "binary", function(err, data) {
if(err) throw err;
window = jsdom.jsdom(data).createWindow();
jsdom.jQueryify(window, 'http://ajax.googleapis.com/ajax/libs/jquery/1.4.4/jquery.min.js', function (window, jquery) {
jquery("tr td img").parent().parent().each(function(){
var step = 0;
jquery(this).children().each(function(index){
if (jquery(this).children('img').attr('src') !== undefined) {
step++;
var empname = jquery(this).parent().next().next().children('td:nth-child('+step+')').children().children().text();
var name_parts = empname.split(",");
var last = name_parts[0];
var name_parts = name_parts[1].split(/\u00a0/g);
var first = name_parts[2]
emps[last + ",_" + first] = jquery(this).children('img').attr('src');
}
});
});
});
});
});
});
}
readDirectory('/home/inet/www/media/employees/', function() {
console.log(emps);
});
In this particular case, there are 2 html files in the directory. If i console.log(emps) during the htmlfiles.forEach() it shows me the results from the first file then the results for both files together the way I expect. how do I get emps to be returned to readDirectory so i can output it as desired?
Completed Script
After the answers below, here is the completed script with a httpServer to serve up the detail.
var sys = require('sys'),
fs = require("fs"),
http = require('http'),
jsdom = require('jsdom'),
emps = {};
var timed = setInterval(function() {
emps = {};
readDirectory('/home/inet/www/media/employees/', function(emps) {
});
}, 3600000);
readDirectory = function(path, callback) {
fs.readdir(path, function(err, files) {
var htmlfiles = [];
files.forEach(function(name) {
if(name.substr(-4) === "html") {
htmlfiles.push(name);
}
});
var count = htmlfiles.length;
htmlfiles.forEach(function(filename) {
fs.readFile(path + filename, "binary", function(err, data) {
if(err) throw err;
window = jsdom.jsdom(data).createWindow();
jsdom.jQueryify(window, 'http://ajax.googleapis.com/ajax/libs/jquery/1.4.4/jquery.min.js', function (window, jquery) {
var imagecount = jquery("tr td img").length;
jquery("tr td img").parent().parent().each(function(){
var step = 0;
jquery(this).children().each(function(index){
if (jquery(this).children('img').attr('src') !== undefined) {
step += 1;
var empname = jquery(this).parent().next().next().children('td:nth-child('+step+')').children().children().text();
var name_parts = empname.split(",");
var last = name_parts[0];
var name_parts = name_parts[1].split(/\u00a0/g);
var first = name_parts[2]
emps[last + ",_" + first] = jquery(this).children('img').attr('src');
}
});
});
count -= 1;
if (count <= 0) {
callback(JSON.stringify(emps));
}
});
});
});
});
}
var init = readDirectory('/home/inet/www/media/employees/', function(emps) {
});
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(JSON.stringify(emps));
}).listen(8124);
That sure is a lot of code a couple of mistakes.
You're never calling the callback function you supply to readDirectory
You need to keep track of the files you have parsed, when you parsed all of them, call the callback and supply the emps
This should work:
var sys = require("sys"),
fs = require("fs"),
jsdom = require("jsdom"),
//path = '/home/inet/www/media/employees/';
// This is a nicer way
function readDirectory(path, callback) {
fs.readdir(path, function(err, files) {
// make this local
var emps = {};
var htmlfiles = [];
files.forEach(function(name) {
if(name.substr(-4) === "html") {
htmlfiles.push(name);
}
});
// Keep track of the number of files we have parsed
var count = htmlfiles.length;
var done = 0;
htmlfiles.forEach(function(filename) {
fs.readFile(path + filename, "binary", function(err, data) {
if(err) throw err;
window = jsdom.jsdom(data).createWindow();
jsdom.jQueryify(window, 'http://ajax.googleapis.com/ajax/libs/jquery/1.4.4/jquery.min.js', function (window, jquery) {
jquery("tr td img").parent().parent().each(function(){
var step = 0;
jquery(this).children().each(function(index){
if (jquery(this).children('img').attr('src') !== undefined) {
step++;
var empname = jquery(this).parent().next().next().children('td:nth-child('+step+')').children().children().text();
var name_parts = empname.split(",");
var last = name_parts[0];
var name_parts = name_parts[1].split(/\u00a0/g);
var first = name_parts[2]
emps[last + ",_" + first] = jquery(this).children('img').attr('src');
}
});
});
// As soon as all have finished call the callback and supply emps
done++;
if (done === count) {
callback(emps);
}
});
});
});
});
}
readDirectory('/home/inet/www/media/employees/', function(emps) {
console.log(emps);
});
You seem to be doing this a tad wrong
readDirectory('/home/inet/www/media/employees/', function() {
console.log(emps);
});
But you've defined your function as:
readDirectory = function(path) {
Where is the callback argument? Try this:
readDirectory = function(path, callback) {
then under emps[last + ",_" + first] = jquery(this).children('img').attr('src'); put
callback.call(null, emps);
Your callback function will be called however many times your loop goes on for. If you want it to return all of them at once, you'll need to get a count of how many times the loop is going to run for, count up until that number then call your callback when the emps array is full of the data you need.

Categories