Sails.js downloading file - javascript

I'm beginner in sails.js and I need to add some logic to existing server.
I need to retrieve image from shared network, so I added ImageController.js and action in it:
module.exports = {
download: function(req, res) {
var run = req.param('run');
var test = req.param('test');
var file = req.param('file');
var path = util.format('\\\\tch-smain\\Share\\AutoTest\\Screenshots\\%s\\%s\\%s.jpeg', run, test, file);
fileAdapter.read(path)
.on('error', function (err){
return res.serverError(err);
})
.pipe(res);
}
};
Then I registered route in config\routes.js:
'/img': 'ImageController.download'
But when I trying to execute GET /img?run=a&test=b&file=c I get 500 error Internal Server Error. Something isn't right here.
But file on \\tch-smain\Share\AutoTest\Screenshots\a\b\c.jpeg exists

Related

How to handle file uploads when a file with same name exists using node js

I am using formidable node module for uploading the file.
Here is my .jade code
form#uploadForm(enctype='multipart/form-data', method='POST', action='/upload')
input#uploadTestSheet(type='file', name='uploadTestSheet', accept='.csv, application/vnd.openxmlformats-officedocument.spreadsheetml.sheet, application/vnd.ms-excel')
button(type='submit') Submit
The files are getting uploaded but if a file already exists in the directory and then a user is trying to add a file with the same name then the new file is not getting uploaded.
Here is my Server
var fs = require('node-fs-extra');
var formidable = require('formidable');
app.use(bodyparser({defer: true}));
app.post('/upload',function (req, res, next) {
var form = new formidable.IncomingForm();
form.uploadDir =__dirname +"/uploads";
form.keepExtensions = true;
form.parse(req, function(err, fields, files) {
fs.rename(files.uploadTestSheet.path,__dirname +'/uploads'+files.uploadTestSheet.name, function(err) {
if (err){
res.render('ManualMode',{data:{id:req.user.id, hash:req.user.hash, nodePollInterval:req.user.nodePollInterval}});
console.log("cannot upload: "+err);
}
});
res.render('index',{data:{id:req.user.id, hash:req.user.hash, nodePollInterval:req.user.nodePollInterval}});
});
});
The fs.rename method follows linux conventions, so it should be overwriting the files in the directory. This leads me to think that maybe your problem is in the browser. Chrome for instance, will refuse to upload files of the same name without first clearing the value property in the input field. In your case, assuming your uploading is asynchronous, this would do:
document.getElementById('uploadTestSheet').value = ''; // after each upload
If you have verified that this is not the issue, try deleting the potentially existent file before renaming it, something along the lines of:
...
var newFilename = __dirname + '/uploads' + files.uploadTestSheet.name
try {
fs.unlinkSync(newFilename)
} catch (e) {
if (e.code !== 'ENOENT')
throw err;
}
fs.rename(files.uploadTestSheet.path, newFilename, function(err) {
...
I hope that helps!

How do you use a Node.js variable on the client-side?

I'm relative new to NODEJS and I'm struggling with a basic problem, which is the correct use of global variables, I read a lot about it but it seems I can't make it work properly, I'll post some codes for a better view of the problem.
I have this simple js running as a server:
myapi.js
var http = require('http');
var express = require('express');
var app = express();
var exec = require('child_process').exec, child;
var fs = require('fs');
var jUptime;
var ipExp = require('./getDown');
var filesD = [];
var path = "/media/pi/01D16F03D7563070/movies";
app.use(express['static'](__dirname ));
exec("sudo /sbin/ifconfig eth0 | grep 'inet addr:' | cut -d: -f2 | awk '{print $1}'", function(error, stdout, stderr){
ip = stdout;
exports.ipAdd = ip;
console.log(ip);
});
app.get('/files', function(req, res) {
fs.readdir(path, function(err, files) {
if (err){
console.log("Non riesco a leggere i files");
}
filesD=files;
console.log(filesD);
});
res.status(200).send(filesD);
});
app.get('/up', function(req, res) {
child = exec("uptime -p", function(error, stdout, stderr){
jUptime = [{uptime: stdout}];
});
res.status(200).send(jUptime);
});
app.get('*', function(req, res) {
res.status(404).send('Richiesta non riconosciuta');
});
app.use(function(err, req, res, next) {
if (req.xhr) {
res.status(500).send('Qualcosa รจ andato storto');
} else {
next(err);
}
});
app.listen(3000);
console.log('Server attivo sulla porta 3000');
And then I have this JS used in a simple web page:
getDown.js
var ip = require('./myapi').ipAdd;
function gDownloads() {
var url;
var jqxhr;
var dat;
url = 'http://' + ip + '/files';
jqxhr = $.getJSON(url, function(dat) {
for(i=0; i<dat.length; i++){
$('#downLoad').append('<p>' + dat[i] + '</p>');
}
$('#bId').append(dat.length);
})
.done(function() {
console.log("OK");
})
.fail(function(data) {
console.log("Fallito: "+data);
})
};
The problem is that when I navigate to the html page that use getDown.js I get the following error on getDown.js
require is not defined
I need to pass the variable that contains the IP address in myapi.js to use it in getDown.js, I hope I explain myself good enough, thanks in advance.
require is global that exists in Node.js code, that is, on the javascript code executing in the server.
Your server will respond to the client and give it an HTML page to render. That HTML page could tell the browser to also request a javascript file from the server. When it receives that file, the client will execute it. The client does not have a require global (you can test it by opening up the console and typing require)
Using Browserify
Or you can write Node-style code, requiring your global like you're doing, but then run the code through browserify. This will create a new javascript bundle that can be executed by the client, so you should tell your html page to use that bundle instead of getDown.js.
Here is a basic example of doing using browserify like this.
module.js
function getIp() {
return 123456;
}
module.exports = {
getIp: getIp
};
main.js
var module = require('./module');
function getIp() {
var ip = module.getIp();
return ip;
};
console.log(getIp());
compile bundle
$ browserify main.js -o public/bundle.js
index.html
<script type="text/javascript" src="public/bundle.js"></script>
Global variable on the client
To use a global variable on the client which is known by the server, you can pass that variable to your rendering engine (possibly Jade if you're using Express) and have it interpolate that variable into a <script> tag which defines some globals. Leave a comment if that's the approach you'd prefer and I can add some more details.
Let me know if you have more questions!

Nodejs: wget, unzip and convert to js without writing to file

Well the title says it all, I'm trying to write a script (that runs in a nodejs/express server-side application) that leverages libraries request, unzip and xml2js to perform a task consisting of fetching a zip file from a given url, whose content is an xml file which I need to parse to a javascript object for some further processing.
So far I've managed to come up with:
var express = require("express");
var app = express();
/* some init code omitted */
var request = require("request");
var unzip = require("unzip");
var xml2js = require("xml2js");
var parser = new xml2js.Parser();
app.get("/import", function(req, res) {
request("http://path.to/file.zip")
.pipe(unzip.Parse())
.on("entry", function(entry) {
//This is what I'm trying to avoid, which doesn't even work
entry.pipe(fs.createWriteStream(entry.path));
fs.readFile(entry.path, function(err, data) {
if(err) {
return res.status(500).send(err);
}
parser.parseString(data, function(err, obj) {
console.log(util.inspect(obj));
/* further processing of obj */
});
});
});
});
Albeit the fact the contents of the xml file are correctly written to disk, I'm looking for an alternative to this approach for two reasons:
to save disk space, since I don't really need to keep the xml file anyway once it has been converted to js
it doesn't even work: fs.readFile probably starts reading the file before fs.createWriteStream is done writing it, because the line console.log(utils.inspect(obj)) logs null (whereas if I run only the innermost fs.readFile block and replace entry.path with the name of the previously written file, it produces the desired output)
I wish I could jot down a jsFiddle for this but I'm clueless as to how, when it comes to expressjs applications. Cheers.
EDITED
Piping is unnecessary, parse data directly from the entry stream:
app.get("/import", function(req, res) {
request("http://link-top.zip")
.pipe(unzip.Parse())
.on("entry", function(entry) {
var chunks = [];
var res;
if(entry.path == 'needed.xml') {
entry.on('data', function(data) {
chunks.push(data.toString());
});
entry.on('end', function () {
res = chunks.join("");
parser.parseString(res, function(err, obj) {
console.log(util.inspect(obj));
/* further processing of obj */
});
});
}
});
});

Problems in writing binary data with node.js

I am trying to write the binary body of a request to a file and failing. The file is created on server but I am unable to open it. I am getting 'Fatal error: Not a png' on Ubuntu. Here is how I am making the request:
curl --request POST --data-binary "#abc.png" 192.168.1.38:8080
And here is how I am trying to save it with the file. The first snippet is a middleware for appending all the data together and second one is the request handler:
Middleware:
app.use(function(req, res, next) {
req.rawBody = '';
req.setEncoding('utf-8');
req.on('data', function(chunk) {
req.rawBody += chunk;
});
req.on('end', function() {
next();
});
});
Handler:
exports.save_image = function (req, res) {
fs.writeFile("./1.png", req.rawBody, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
res.writeHead(200);
res.end('OK\n');
};
Here's some info which might help. In the middleware, if I log the length of rawBody, it looks to be correct. I am really stuck at how to correctly save the file. All I need is a nudge in the right direction.
Here is a complete express app that works. I hit it with curl --data-binary #photo.jpg localhost:9200 and it works fine.
var app = require("express")();
var fs = require("fs");
app.post("/", function (req, res) {
var outStream = fs.createWriteStream("/tmp/upload.jpg");
req.pipe(outStream);
res.send();
});
app.listen(9200);
I would just pipe the request straight to the filesystem. As to your actual problem, my first guess is req.setEncoding('utf-8'); as utf-8 is for text data not binary data.
For fix your code: I'm with #Peter Lyons, that the error is probably the req.setEncoding('utf-8'); line.
I know the following don't ask your question directly, but proposes an alternative to it by using req.files functionality provided by Express.js, which you are using.
if (req.files.photo && req.files.photo.name) {
// Get the temporary location of the file.
var tmp_path = req.files.photo.path;
// set where the file should actually exists - in this case it is in the "images" directory.
var target_path = './public/profile/' + req.files.photo.name;
// Move the file from the temporary location to the intended location.
fs.rename(tmp_path, target_path, function (error) {
if (!error) {
/*
* Remove old photo from fs.
* You can remove the following if you want to.
*/
fs.unlink('./public/profile/' + old_photo, function () {
if (error) {
callback_parallel(error);
}
else {
callback_parallel(null);
}
});
}
else {
callback_parallel(error);
}
});
}

NodeJS, Multi-FileWatcher, Socket.IO and interaction with the client

I'm new to NodeJS and I wonder how to properly send the content of several files using FS and Socket.IO.
My problem is more about best practices on Node/javascript than the actual 'raw' logic of my scripts.
So, the purpose of my app is to watch a log File (File1.log) and a Result File (File2.log).
Until the File2.log contains a string (such as "Done", or "Error"), I need to continue sending the result of the File1.log to the client.
When the key ("Error","Done") has been read, I send the result to the client and have to launch the same process for another couple of log/result files - after having closed the FileWatcher on the first one.
In the end, I need to close the connection and stop all of the sleeping FileWatcher processes.
The 'streaming' of my files is working pretty well, but I am confused about the best way to switch between the differents FileWatch Processes and how to notify the client.
Server.JS
/*
* [SomeCode]...
*/
io.sockets.on('connection', function (client) {
//Starting the process for the first couple of files
client.on('logA', function (res) {
var PATH_to_A = "path/to/A/directory/";
readFile(client,PATH_to_A);
});
//Starting the process for the seconde couple of files
client.on('logB', function (res) {
//I need to stop the first readFile Watcher process
var PATH_to_B = "path/to/B/directory/";
readFile(client,PATH_to_B);
});
});
function readFile(client,PATH){
var File1 = path.join(PATH,'File1.log');
var File2 = path.join(PATH,'File2.log');
//Get the file stats
fs.stat(File1,function(err,stats){
if (err) throw err;
//Send the data;
});
//Watch the first file
var w1 = fs.watch(File1,function(status, file){
if(status == "change"){
fs.stat(File1,function(err,stats){
if (err) throw err;
//Send the data;
});
}
});
//Watch the second file
var w2 = fs.watch(File2,function(status, file){
if(status == "change"){
fs.readFile(File2, "utf8", function (err, body) {
if (err) throw err;
//Some Code....
client.emit('done',body);
});
}
});
//Closing FileWatcher
client.on('ack',function(){
w1.close();
w2.close();
});
}
Client.JS
var socket = io.connect('http://127.0.0.1:8000');
//On connect, waiting for the first couple of files
socket.on('connect', function(server) {
socket.emit('init',data);
socket.emit('logA');
});
//If the first process is done, i ask for the second couple of files
socket.on('done',function(message){
socket.emit('ack');
socket.emit('logB');
});
Thanks for your help!

Categories