I am doing some experimenting with socket.io. I have a canvas that successfully sends data to the server which receives it fine.
It receives a Uint8ClampedArray which is correct because that is what is being sent.
When I then .send this message from the server to the client, I get a string: [object Object]. Again I have checked!
Am I missing something, the code for the server is below:
var fs, http, io, server;
fs = require('fs');
http = require('http');
server = http.createServer(function(req, res) {
return fs.readFile("" + __dirname + "/front.html", function(err, data) {
res.writeHead(200, {
'Content-Type': 'text/html'
});
return res.end(data, 'utf8');
});
});
server.listen(1337);
io = require('socket.io').listen(server);
io.sockets.on('connection', function(socket) {
socket.on('publish', function(message) {
return io.sockets.send(message);
});
});
On Client:
var dataStr = JSON.stringify(data); // converts object to string
On Server:
var dataObj = JSON.parse(data); // converts string to object
The only way is to reuse Uint8ClampedArray in your client size. If you have this on your server-side
var x = new Uint8ClampedArray(3);
x[0] = -17;
x[1] = 93;
x[2] = 350;
var dataThatWillBeSent = JSON.stringify(x);
// '{"0":0,"1":93,"2":255,"length":3,"byteLength":3,"byteOffset":0,"buffer":{"byteLength":3}}'
In your client side, assume that you have included Uint8ClampedArray, you can do this
var dataReceived = '{"0":0,"1":93,"2":255,"length":3,"byteLength":3,"byteOffset":0,"buffer":{"byteLength":3}}';
dataReceived = JSON.parse(dataReceived);
// reconstruct Uint8ClampedArray object
I have not used Uint8ClampedArray before, so I dont know exactly how you can recover a Uint8ClampedArray object from a JSON data, but if you read the document you might be able to figure something out
Related
I have a Node.js server and I'm trying to parse captcha by uploading bmp image to server. There are two problems I'm facing.
Image is not uploaded correctly. When I try to open Image it gives me error "BMP image has bogus header data".
Currently I'm reading buffer from uploaded data and parsing it to extract captcha string. Captcha string is arbitrary (results shows correct last 3 characters instead of 6).
This is code I'm using:
app.use (function(req, res, next) {
console.log("statrt");
var data="";
req.on('data', function(chunk) {
data += chunk;
});
req.on('end', function() {
console.log("end");
req.body = data;
next();
});
});
var DoneInSync = fibrous(function(buffer){
var val = captcha.getCaptcha(buffer);
console.log("this" + val);
return val;
});
app.post('/', function (req,res){
buffer = new Buffer(req.body);
fs.writeFileSync("captchas_ass.bmp", buffer);
var val = DoneInSync.sync(buffer);
res.write("Yoing -> " + val);
res.end();
});
Captcha parser code runs perfectly, and is tested throughly offline.
Main problem I'm facing is uploading BMP file to server . Once done I can do this
var buf = fs.readFileSync("captcha.bmp");
and pass buf to captcha.getCaptcha(buf) and get result.
Can someone explain where it uploading is wrong?
Your middleware is stringifying the incoming data, which it shouldn't be doing. Try this:
app.use (function(req, res, next) {
var data = [];
req.on('data', function(chunk) {
data.push(chunk);
});
req.on('end', function() {
console.log("end");
req.body = Buffer.concat(data); // `req.body` will be a Buffer
next();
});
});
This would only work if you're uploading the data as a "raw" POST request. If you're using something like multipart/formdata, this won't work either. For that you should use a middleware like multer.
I am newbie in Nodejs world. I am trying to insert data in MongoDB using Mongoose. The idea is
- I will have a server running on node
- any incoming POST data will be saved in Mongo.
The problem when the below code is run no data gets saved in MongoDB and also no error is shown. Am i missing something here. Any help will be really appreciated.
I have the below code that writes data in mongoDB for an incoming http request.
var http = require('http') // http module
, fs = require('fs') // file system module
, qs = require('querystring') // querystring parser
, mongoose = require('mongoose');
mongoose.connect("mongodb://localhost/app_data_db");
var db = mongoose.connection;
var appDataSchema = new mongoose.Schema({
record_id: Number,
app_version: Number,
imei: String,
created_time: Date,
device_uid: String,
model: String
});
var appDataModel = mongoose.model("app_data_collection",appDataSchema);
var PORT=8080;
http.createServer(function(req,res){
if(req.method == "POST") {
var POST = {};
//parse query string
req.on('data', function(data) {
data = data.toString();
data = data.split('&');
for (var i = 0; i < data.length; i++) {
var _data = data[i].split("=");
POST[_data[0]] = _data[1];
}
db.once('open', function (callback) {
appDataModel.create({
record_id: POST["id"],
app_version: POST["app_version"],
imei: POST["imei"],
created_time: new Date((parseInt(POST["created_time"]) + 19800) *1000), // to set correct time zone IST
device_uid: POST["device_uid"],
model: POST["model"]
});
});
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end('saved to DB:'+POST["id"]+'\n');
console.log('saved to DB:'+POST["id"]+'\n');
});
mongoose.disconnect();
}
}).listen(PORT);
To test this I am manually firing this curl call:
curl -X POST --data "id=58648148&app_version=4.8&imei=355886053224492&created_time=1417372202&device_uid=e385c8a5a4c01304&model=GT-I9082" http://localhost:8080
There are couple problems with your code:
1) you call mongoose.disconnect outside of the callback, which means that it's called before the callback is executed
2) you're creating the model inside the callback, but sending the response outside of it, so the response is sent before the model is created
3) and finally create method provides a callback when the entity is saved to the db, which you don't use it all
Here's the modified code:
mongoose.connect("mongodb://localhost/app_data_db");
db.on('open', function() {
http.createServer(function(req, res) {
if(req.method == "POST") {
var POST = {};
//parse query string
req.on('data', function(data) {
data = data.toString();
data = data.split('&');
for (var i = 0; i < data.length; i++) {
var _data = data[i].split("=");
POST[_data[0]] = _data[1];
}
appDataModel.create({
record_id: POST["id"],
app_version: POST["app_version"],
imei: POST["imei"],
created_time: new Date((parseInt(POST["created_time"]) + 19800) *1000), // to set correct time zone IST
device_uid: POST["device_uid"],
model: POST["model"]
},
function(err){
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end('saved to DB:'+POST["id"]+'\n');
console.log('saved to DB:'+POST["id"]+'\n');
mongoose.disconnect();
});
});
}
}).listen(PORT);
});
I think I got this working. I have moved the mongoose connection inside. Rest all is same. Since I wanted to post the code hence answering instead of commenting. This may help others. Here is the complete code
var http = require('http') // http module
, fs = require('fs') // file system module
, qs = require('querystring') // querystring parser
, mongoose = require('mongoose');
var appDataSchema = new mongoose.Schema({
record_id: Number,
app_version: Number,
imei: String,
created_time: Date,
device_uid: String,
model: String
});
var appDataModel = mongoose.model("app_data_collection", appDataSchema);
var PORT = 8080;
http.createServer(function(req, res) {
if (req.method == "POST") {
var POST = {};
//parse query string
req.on('data', function(data) {
data = data.toString();
data = data.split('&');
for (var i = 0; i < data.length; i++) {
var _data = data[i].split("=");
POST[_data[0]] = _data[1];
}
mongoose.connect("mongodb://localhost/app_data_db");
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', function() {
appDataModel.create({
record_id: POST["id"],
app_version: POST["app_version"],
imei: POST["imei"],
created_time: new Date((parseInt(POST["created_time"]) + 19800) * 1000), // to set correct time zone IST
device_uid: POST["device_uid"],
model: POST["model"]
}, function(err) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end('saved to DB:' + POST["id"] + '\n');
console.log('saved to DB:' + POST["id"] + '\n');
mongoose.disconnect();
});
});
});
}
}).listen(PORT);
So I'm trying to write a basic file server in Node.js, and all the images I've tried uploading and storing on it are coming back as corrupted. The problem seems to have something to do with the way that Node Buffers handle being converted to UTF-8 and back again (which I have to do in order to get the POST body headers out and away from the binary data).
Here's a simple Node server that shows my current approach and the problems I've been having:
var http = require('http');
var server = http.createServer(function(request, response) {
if (request.method === "GET") {
// on GET request, output a simple web page with a file upload form
var mypage = '<!doctype html><html><head><meta charset="utf-8">' +
'<title>Submit POST Form</title></head>\r\n<body>' +
'<form action="http://127.0.0.1:8008" method="POST" ' +
'enctype="multipart/form-data"> <input name="upload" ' +
'type="file"><p><button type="submit">Submit</button>' +
'</p></form></body></html>\r\n';
response.writeHead(200, {
"Content-Type": "text/html",
"Content-Length": mypage.length
});
response.end(mypage);
} else if (request.method === "POST") {
// if we have a return post request, let's capture it
var upload = new Buffer([]);
// get the data
request.on('data', function(chunk) {
// copy post data
upload = Buffer.concat([upload, chunk]);
});
// when we have all the data
request.on('end', function() {
// convert to UTF8 so we can pull out the post headers
var str = upload.toString('utf8');
// get post headers with a regular expression
var re = /(\S+)\r\nContent-Disposition:\s*form-data;\s*name="\w+";\s*filename="[^"]*"\r\nContent-Type: (\S+)\r\n\r\n/i,
reMatch = str.match(re);
var lengthOfHeaders = reMatch[0].length,
boundary = reMatch[1],
mimeType = reMatch[2];
// slice headers off top of post body
str = str.slice(lengthOfHeaders);
// remove the end boundary
str = str.replace("\r\n" + boundary + "--\r\n", '');
// convert back to buffer
var rawdata = new Buffer(str, 'utf8');
// echo back to client
response.writeHead(200, {
"Content-Type": mimeType
});
response.end(rawdata);
});
}
});
server.listen(8008);
console.log("server running on port 8008");
To test it, run the script in node and go to 127.0.0.1:8008 in your browser. Try uploading an image and submitting the form. The image comes back as corrupt every time -- even though the script should just be directly echoing the image data back to the browser.
So does anyone know what I'm doing wrong here? Is there a better way to handle POST body headers in Node that I haven't figured out yet? (And before anyone says anything, no, I don't want to use Express. I want to figure out and understand this problem.)
The problem seems to have something to do with the way that Node Buffers handle being converted to UTF-8 and back again
I guess you are right about that, convert to UTF-8 is a bad idea, but can do it just to work with the file and get the headers and boundaries positions, but keep the buffer file untouched, and when you have all the positions to get the header and boundary out of the file just copy the buffer to a new buffer like that
originalBuffer.copy(newBuffer,0, positionHeader, positionEndBoundary)
var http = require('http');
var fs = require('fs');
var connections = 0;
var server = http.createServer(function (req, res) {
connections++;
console.log(req.url,"connections: "+connections);
if(req.url == '/'){
res.writeHead(200, { 'content-type': 'text/html' });
res.end(
'<form action="/upload" enctype="multipart/form-data" method="post">' +
'<input type="file" name="upload" multiple="multiple"><br>' +
'<input type="submit" value="Upload">' +
'</form>'
);
}
var body = new Buffer([]);
if (req.url == '/upload') {
req.on('data', function (foo) {
//f.write(foo);
body = Buffer.concat([body,foo]);
if(isImage(body.toString())){
console.log("é imagem do tipo "+isImage(body.toString()));
}
else{
console.log("Não é imagem");
res.end("Não é imagem");
}
console.log(body.length, body.toString().length);
});
req.on('end', function () {
// console.log(req.headers);
//I converted the buffer to "utf 8" but i kept the original buffer
var str = body.toString();
console.log(str.length);
imageType = isImage(body.toString());
//get the index of the last header character
//I'm just using the string to find the postions to cut the headers and boundaries
var index = str.indexOf(imageType)+(imageType+"\r\n\r\n").length;
// var headers= str.slice(0,index).split(';');
// console.log(headers);
//Here comes the trick
/*
*I have to cut the last boundaries, so i use the lastIndexOf to cut the second boundary
* And maybe that is the corruption issues, because, I'm not sure, but I guess
* the UTF-8 format only use 7bits to represent all characters, and the buffer can use 8bits, or two hex,
*So, i need to take the difference here (body.length-str.length)
*/
var indexBoundayToBuffer = str.lastIndexOf('------WebKitFormBoundary')+(body.length-str.length);
console.log(index, indexBoundayToBuffer);
//maybe you can change this to use less memory, whatever
var newBuffer = Buffer.alloc(body.length);
/*
*And now use the index, and the indexBoudayToBuffer and you will have only the binary
*/
body.copy(newBuffer,0,index,indexBoundayToBuffer);
// f.end();
//file type
var type = imageType.substr("image/".length);
console.log("END");
fs.writeFile("nameFile."+type,newBuffer,function(err,ok){
if(err){
console.log(err);
return false;
}
res.end();
});
});
}
});
function isImage(str){
if(str.indexOf('image/png')!=-1) return 'image/png';
else if(str.indexOf('image/jpeg')!=-1) return 'image/jpeg';
else if(str.indexOf('image/bmp'!=-1)) return 'image/bmp';
else if(str.indexOf('image/gif'!=-1)) return 'image/gif';
else false;
}
var port = process.env.PORT || 8080;
server.listen(port, function () {
console.log('Recording connections on port %s', port);
});
You really shouldn't use regular expressions like that to parse multipart payloads as it can easily make trying to parse your image data very unreliable. There are modules on npm that parse forms for you such as busboy, multiparty, or formidable. None of them use regular expressions and they don't require Express.
I'm using Node's request module.
The response I get is "gziped" or otherwise encoded.
How can I
1. Build the request to not encode the response?
2. Decode the response?
The data is coming from http://api.stackexchange.com.
var myRequest = require('request');
var zlib = require('zlib');
var stackRequest = require('request');
var apikey = '<MyKey>';
var fromdate = '1359417601';
var tagged = 'node.js';
stackRequest(
{ method: 'GET'
, uri: 'http://api.stackexchange.com/2.1/questions?key=' + apikey +
'&site=stackoverflow&fromdate=' + fromdate + '&order=desc&' +
'sort=activity&tagged=' + tagged + '&filter=default'
}, function(err, response, body) {
console.log(response.body); // How can I decode this?
});
The encoding has nothing to do with request. StackOverflow's API returns GZip encoded data always, as explained in the API documentation. You need to use Node's zlib module to unzip the contents. This is a simple example:
var zlib = require('zlib');
// Other code
, function(err, response, body) {
zlip.gunzip(body, function(err, data){
console.log(data);
});
});
The main downside of this, which is bad, is that this forces the request module to process the entire response content into one Buffer as body. Instead, you should normally use Node's Stream system to send the data from the request directly through the unzipping library, so that you use less memory. You'll still need to join the parts together to parse the JSON, but it is still better.
var zlib = require('zlib');
var request = require('request');
var apikey = '<MyKey>';
var fromdate = '1359417601';
var tagged = 'node.js';
var compressedStream = request('http://api.stackexchange.com/2.1/questions?' +
'key=' + apikey + '&site=stackoverflow&fromdate=' + fromdate +
'&order=desc&sort=activity&tagged=' + tagged + '&filter=default');
var decompressedStream = compressedStream.pipe(zlib.createGunzip());
var chunks = [];
decompressedStream.on('data', function(chunk){
chunks.push(chunk);
});
decompressedStream.on('end', function(){
var body = Buffer.concat(chunks);
var data = JSON.parse(body);
// Do your thing
});
First set accept: identity as a header. If stacked change doesn't send data as regular UTF8, then it's a bug on their end.
Secondly, you want to set the encoding as UTF8 so the response isn't a buffer.
Using Node v0.2.0 I am trying to fetch an image from a server, convert it into a base64 string and then embed it on the page in an image tag. I have the following code:
var express = require('express'),
request = require('request'),
sys = require('sys');
var app = express.createServer(
express.logger(),
express.bodyDecoder()
);
app.get('/', function(req, res){
if(req.param("url")) {
var url = unescape(req.param("url"));
request({uri:url}, function (error, response, body) {
if (!error && response.statusCode == 200) {
var data_uri_prefix = "data:" + response.headers["content-type"] + ";base64,";
var buf = new Buffer(body);
var image = buf.toString('base64');
image = data_uri_prefix + image;
res.send('<img src="'+image+'"/>');
}
});
}
});
app.listen(3000);
Note: This code requires "express" and "request". And of course, node. If you have npm installed, it should be as simple as "npm install express" or "npm install request".
Unfortunately, this doesn't work as expected. If I do the conversion with the Google logo, then I get the following at the beginning of the string:
77+9UE5HDQoaCgAAAA1JSERSAAABEwAAAF8IAwAAAO+/ve+/ve+/vSkAAAMAUExURQBzCw5xGiNmK0t+U++/vQUf77+9BiHvv70WKO+/vQkk77+9D
However if I use an online Base64 encoder with the same image, then it works perfectly. The string starts like this:
iVBORw0KGgoAAAANSUhEUgAAARMAAABfCAMAAAD8mtMpAAADAFBMVEUAcwsOcRojZitLflOWBR+aBiGQFiipCSS8DCm1Cya1FiyNKzexKTjDDSrLDS
Where am I going wrong that this isn't working correctly? I have tried so many different js base64 implementations and they all don't work in the same way. The only thing I can think of is that I am trying to convert the wrong thing into base64, but what should I convert if that is the case?
The problem is encoding and storing binary data in javascript strings. There's a pretty good section on this under Buffers at http://nodejs.org/api.html.
Unfortunately, the easiest way to fix this involved changing the request npm. I had to add response.setEncoding('binary'); on line 66 just below var buffer; in /path/to/lib/node/.npm/request/active/package/lib/main.js. This will work fine for this request but not others. You might want to hack it so that this is only set based on some other passed option.
I then changed var buf = new Buffer(body) to var buf = new Buffer(body, 'binary');. After this, everything worked fine.
Another way to do this, if you really didn't want to touch the request npm, would be to pass in an object that implements Writable Stream in the responseBodyStream argument to request. This object would then store the streamed data from the response in it's own buffer. Maybe there is a library that does this already... i'm not sure.
I'm going to leave it here for now, but feel free to comment if you want me to clarify anything.
EDIT
Check out comments. New solution at http://gist.github.com/583836
The following code (available at https://gist.github.com/804225)
var URL = require('url'),
sURL = 'http://nodejs.org/logo.png',
oURL = URL.parse(sURL),
http = require('http'),
client = http.createClient(80, oURL.hostname),
request = client.request('GET', oURL.pathname, {'host': oURL.hostname})
;
request.end();
request.on('response', function (response)
{
var type = response.headers["content-type"],
prefix = "data:" + type + ";base64,",
body = "";
response.setEncoding('binary');
response.on('end', function () {
var base64 = new Buffer(body, 'binary').toString('base64'),
data = prefix + base64;
console.log(data);
});
response.on('data', function (chunk) {
if (response.statusCode == 200) body += chunk;
});
});
should also produce a data URI without requiring any external modules.
This works for me using request:
const url = 'http://host/image.png';
request.get({url : url, encoding: null}, (err, res, body) => {
if (!err) {
const type = res.headers["content-type"];
const prefix = "data:" + type + ";base64,";
const base64 = body.toString('base64');
const dataUri = prefix + base64;
}
});
No need for any intermediate buffers. The key is to set encoding to null.