I've got a fairly standard MEAN project setup with the angular-fullstack generator using yeoman.
What I'm finding is that when GETting a largish (over 65536 bytes) json result, it is encoded using gzip and chunked, but the json returned is not valid viewed either in chrome or consumed by my angular client $resource because it contains TWO responses!
e.g {name:'hi'}{name:'hi'} for a single id or [{..},{..}][{..},{..}] for a array.
The server api endpoint was autogenerated from the angular-fullstack generator and looks something like:
// Get list of worlds
exports.index = function(req, res) {
World.find(function (err, worlds) {
if(err) { return handleError(res, err); }
res.json(200, worlds);
});
};
If i slice the data so it's not chunked, then the json is well formed. I've checked the mongo db and the data is ok there too and debugging the worlds variable, I can JSON.stringify and get the expected string result without any duplicates. but the moment it's sent, I'm getting a doubling up of json result on the response.
Update for comment
angular-fullstack 2.0.4
the schema looks like:
'use strict';
var mongoose = require('mongoose'),
Schema = mongoose.Schema;
var WorldSchema = new Schema({
name: String,
info: String,
active: Boolean,
tiles: [Schema.Types.Mixed]
});
module.exports = mongoose.model('World', WorldSchema);
seeded with:
var newWorld = new WorldModel({
_id: planet._objectId,
name: "SimDD World",
tiles : seed()
});
newWorld.save();
...
var seed = function () {
var data = [];
for (var i = 0; i < planet.HEIGHT; i++) {
for (var j = 0; j < planet.WIDTH; j++) {
data.push({
coords:{
x:i,
y:j
},
type:'.'
});
}
}
return data;
}
Looks like this is being caused by the compression middleware, removing app.use(compression()); from the express config seems to fix this.
The issue is seen in browsers and not in postman. I checked the HTTP request headers and when I add 'Accept' Header as html in postman the same problem is seen in postman as well. So I believe the browsers are handling differently with Accept type with html.
// app.use(require('connect-livereload')());
I came across the same problem when building my angular-fullstack app (thanks, DaftMonk), after some extensive debugging using node-inspector, turns out the JSON data gets passed to the livereload module and gets duplicated when it comes out. Disabling this middleware eliminated the problem for me.
Does this work for you? I don't see a reason why it shouldn't.
I assume you have a planet object that has:
HEIGHT, WIDTH and _objectId properties.
Remember if you modify a mixed type you need to tell mongoose
that the value changed and subsequently save it.
http://mongoosejs.com/docs/schematypes.html#mixed
var WorldModel = require('../api/world/world.model');
var planet = require('planetSeedData');
var seed = function() {
var data = [];
for (var i = 0; i < planet.HEIGHT; i++) {
for (var j = 0; j < planet.WIDTH; j++) {
data.push({
coords: {x:i, y:j},
type: '.'
});
}
}
return data;
};
var myPlanet = {
_id: Mongoose.Types.ObjectId(planet._objectId),
name: "SimDD World",
tiles : seed()
};
WorldModel.create(myPlanet);
// if modified, you would do something like:
// WorldModel.markModified('tiles');
// WorldModel.save();
Related
I have a NodeJS / Express RESTful API that proxies requests from an Active Directory LDAP Server. I do this because LDAP queries tend to be slow. I use the RESTful API to cache and refresh data intermittently. I recently attempted to add the thumbnail photo. In research it appears the library that I am using ldapjs is converting the native ldap byte array to a string.
Example of what this looks like:
\ufffd\ufffd\ufffd\ufffd\u0000\u0010JFIF\u0000\u0001\u0000\u0001\u0000x\u0000x\u0000\u0000\ufffd\ufffd\u0000\u001fLEAD
Technologies Inc.
V1.01\u0000\ufffd\ufffd\u0000\ufffd\u0000\u0005\u0005\u0005\b\
Due to this fact the image does not render correctly on the angular client app. So based on my research, here are some attempts in correcting this problem:
Convert the string to a byte array using different methods (See code examples)
Modify the ldapjs library to render the data as a byte array in the RESTFUL as in the following, then bind the byte stream to the angular page:
https://github.com/joyent/node-ldapjs/issues/137
https://csjdpw.atlassian.net/wiki/spaces/~Genhan.Chen/pages/235044890/Display+LDAP+thumbnail+photos
html binding:
<div>
<img *ngIf="userImage" [src]="userImage" alt="{{dataSource.sAMAccountName}}">
</div>
controller:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const byteArray = this.string2Bin(this.dataSource.thumbnailPhoto);
const image = `data:image/jpeg;base64,${Buffer.from(byteArray).toString('base64')}`;
value = this.domSanitizer.bypassSecurityTrustUrl(image);
}
return value;
}
private string2Bin(str) {
var result = [];
for (var i = 0; i < str.length; i++) {
result.push(str.charCodeAt(i));
}
return result;
}
and
alternate version of controller:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const byteArray = new TextEncoder().encode(this.dataSource.thumbnailPhoto);
const image = `data:image/jpeg;base64,${Buffer.from(byteArray).toString('base64')}`;
value = this.domSanitizer.bypassSecurityTrustUrl(image);
}
return value;
}
another alternate version of controller:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const blob = new Blob( [Buffer.from(this.dataSource.thumbnailPhoto).toString('base64')], { type: 'image/jpeg' } );
const value = window.URL.createObjectURL(blob);
return value;
}
I expected a rendered image on the angular page but all I get is the non-rendered placeholder.
Here are the versions of the libraries I am using
Angular - 8.0.3
NodeJS - 10.15.0
ldapjs - 1.0.2
I am sure I am missing something, I am just not sure what it is. Any assistance would be appreciated.
So after some guidance provided by #Aritra Chakraborty, I checked the RESTful api source code. It appears to be a problem with a ldapjs library. When using the entry object conversion, it is doing something strange with the data to which it is not usable. I then realized, I had access to the entry raw format which is the byte array . Instead of trying to convert to base64 on the client, I moved this to the API. Then just mapped it back on the client binding and bang it worked.
Here is some example code:
RESTFul api
_client.search(this._search_dn, opts, (error, res) => {
res.on("searchEntry", (entry) => {
let result = {};
result.id = string_service.formatGUID(JSON.parse(JSON.stringify(entry.raw)).objectGUID);
result = Object.assign({}, result, entry.object);
if(entry.raw.thumbnailPhoto) {
result.thumbnailPhoto = entry.raw.thumbnailPhoto.toString('base64');
}
// The previous 3 lines did not exist previously
On the Angular 8 client I simplified the binding:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const image = `data:image/jpeg;base64,${this.dataSource.thumbnailPhoto}`;
value = this.domSanitizer.bypassSecurityTrustUrl(image);
}
return value;
}
I hope someone finds some value out of this.
I try to send delete request from Angular 4 to mongoDb
I have an array of ids, which I want to delete and in my service I have a function
deleteData(id) {
return this.http.delete( this.api, id)
}
Then in my component I build an array of objects (because I need to pass it to backend as JSON - as an array it won't be probably recognised)
deleteData(data) {
const dataToSend = [];
let oneDataToSend;
for (let i = 0; i < data.length; i++) {
oneDataToSend = {'_id': ''};
oneDataToSend._id = data[i];
dataToSend.push(oneDataToSend);
}
this.service.deleteData(dataToSend).subscribe((res) => {
console.log(data);
});
}
And after that I try to delete objects, which ids are the same as in a query
app.delete('/tasks', function(req,res){
console.log(req.body);
var ids = [];
for (let i = 0; i < req.body.length; i ++) {
ids.push(req.body[i]._id);
}
var myquery = { _id: { $in: ids } };
Model.collection.deleteMany(myquery, function(err, obj) {
if (err) throw err;
});
});
Here I got the problem, that there req.body is empty {}
Also, in console in Network section I see 2 requests
OPTIONS (with Status Code 204 No Content)
DELETE without any info
Could you please give me a hint and help to solve this problem?
The reason you see the OPTIONS request with a 204 NO CONTENT response is because of Cross Origin Resource Sharing (CORS). The client is checking if it is allowed to make a DELETE request to your backend.
Consider allowing CORS in the backend application. For an easy way for express, use expressjs/cors. However, make sure you understand the security implications of allowing cross-site requests.
Also, are you parsing the request body? If not, it will be undefined in req.body.
You can use express.json(), it will attempt to parse the JSON of the request body and save it to req.body, but only if the header "Content-Type: application/json" is sent along with the request:
const app = express();
app.use(express.json()); // Parses request body if type is json. Saves to req.body.
I have custom Create method I am trying to add for my Founder model in my Sails.js Application. The Application includes files and normal form fields. Following the advice of this tutorial.
I am trying to offload a lot of controller code into the Founder model for processing. This makes something a little awkward and results in the following error.
TypeError: Cannot read property 'stream' of undefined
This is the result of this line in my model function:
createFromForm: function(opts, cb){
var id = opts.id;
var params = opts.params;
var newFilename = opts.avatartwo._files[0].stream.filename;
In the fourth line. Now in the controller the analogous code would be, following Sails syntax:
var newFilename = opts.req.file('avatartwo')_files[0].stream.filename;
I tred simply writing the result of req.file('avatartwo') into an option I passed into the model function via:
var opts = {
params: params,
id: id,
avatartwo: req.file('avatartwo')
};
Although this seems problematic, but because I am not the most informed on the subject, I do not know why. I could appreciate some elucidation here. Thanks!
To handle file upload in sailsjs use following code.
var uploadedFile = req.file('some_file').upload({
dirname: 'path to store the file',/* optional. defaults to assets/uploads I guess*/
saveAs: 'new file name', /* optional. default file name */
maxBytes: 5 * 1024 * 1024 //5 MB
}, function(err, uploadedFiles) {
if (err) {
return res.json(500, err);
} else if (uploadedFiles.length === 0) {
// handle if no files were uploaded
} else {
// do processing with file descriptor available at uploadedFiles[0].fd
// pass to model in your case
var opts = {
params: params,
id: id,
avatartwo: uploadedFiles[0].fd
};
}
});
I am currently trying to return a request of all the file names (in each existing folder) on a particular website. My web application is using NodeJS, Express, Cheerio, and Request to web scrape. My code is first getting a list of all the folder names. After retrieving a list of folder names, it then goes inside each folder name to get a list of file names and store them in the 'files' array. Finally, the 'files' array is what will be sent to the client-side.
Right now I am having a big issue with asynchronous stuff because my request would always return an empty list of 'files'. I have the Q node module installed and have tried using promises, but have had no luck getting the results I want. I am still new to nodeJS and would love it if someone can help me out.. :)
exports.getAllImages = function(req, res) {
var folders = [];
var files = [];
//Step 1: Get folder names and store all of them in the 'folders' array
var foldersUrl = 'http://students.washington.edu/jmzhwng/Images/';
request(foldersUrl, function(error, response, html){
if(!error){
var $ = cheerio.load(html);
$("a:contains('-')").filter(function(){
var data = $(this)[0].attribs.href;
folders.push(data);
})
//Step 2: Using the 'folders' array, get file names in each folder and store all of them in the 'files' array
for (var i=0; i < folders.length; i++) {
var imagesUrl = 'http://students.washington.edu/jmzhwng/Images/' + folders[i];
request(imagesUrl, function(error, response, html){
if(!error){
var $ = cheerio.load(html);
$("a:contains('.')").filter(function(){
var data = $(this)[0].attribs.href;
files.push(data);
})
}
})
}
//Step 3: Return all file names to client-side
res.json({
images: files
}, 200);
console.log('GET ALL IMAGES - ' + JSON.stringify(files));
}
})
For better readability or support, you can view the JSFiddle I created here: http://jsfiddle.net/fKGrm/
You don’t necessarily need promises for this—you’re 95% of the way there already without them. The main issue, as I think you’re aware, is that your response is being sent before the image requests come back. You just need to wait for those to finish before you send the response.
The most basic way is to count the number of callbacks you receive in your Step 2. When it equals the folders.length, then send your response.
Here’s a simplified version of that:
var request = require('request'),
cheerio = require('cheerio');
var baseUrl = 'http://students.washington.edu/jmzhwng/Images/';
var files = [];
request(baseUrl, function (error, res, body) {
var folders = folderLinks(cheerio.load(body));
count = 0;
folders.forEach(function (folder) {
request(baseUrl + folder, function (error, res, body) {
files.push.apply(files, fileLinks(cheerio.load(body)));
if (++count == folders.length) {
console.log(files);
}
});
});
});
function folderLinks ($) {
return $('a:contains(-)').get().map(function (a) {
return a.attribs.href;
});
}
function fileLinks ($) {
return $('a:contains(.)').get().map(function (a) {
return a.attribs.href;
});
}
I get a PKCS#7 crypto package from a 3rd party system.
The package is not compressed and not encrypted, PEM-encoded, signed with X.509 certificate.
I also have a PEM cert file from the provider.
The data inside is XML
I need to do the following in Node.JS:
extract the data
verify the signature
A sample package (no sensitive info, data refers to our qa system) http://pastebin.com/7ay7F99e
OK, finally got it.
First of all, PKCS messages are complex structures binary-encoded using ASN1.
Second, they can be serialized to binary files (DER encoding) or text PEM files using Base64 encoding.
Third, PKCS#7 format specifies several package types from which my is called Signed Data. These formats are distinguished by OBJECT IDENTIFIER value in the beginning of the ASN1 object (1st element of the wrapper sequence) — you can go to http://lapo.it/asn1js/ and paste the package text for the fully parsed structure.
Next, we need to parse the package (Base64 -> ASN1 -> some object representation). Unfortunately, there's no npm package for that. I found quite a good project forge that is not published to npm registry (though npm-compatible). It parsed PEM format but the resulting tree is quite an unpleasant thing to traverse. Based on their Encrypted Data and Enveloped Data implementations I created partial implementation of Signed Data in my own fork. UPD: my pull request was later merged to the forge project.
Now finally we have the whole thing parsed.
At that point I found a great (and probably the only on the whole web) explanative article on signed PKCS#7 verification: http://qistoph.blogspot.com/2012/01/manual-verify-pkcs7-signed-data-with.html
I was able to extract and successfully decode the signature from the file, but the hash inside was different from the data's hash. God bless Chris who explained what actually happens.
The data signing process is 2-step:
original content's hash is calculated
a set of "Authorized Attributes" is constructed including: type of the data singed, signing time and data hash
Then the set from step 2 is signed using the signer's private key.
Due to PKCS#7 specifics this set of attributes is stored inside of the context-specific constructed type (class=0x80, type=0) but should be signed and validated as normal SET (class=0, type=17).
As Chris mentions (https://stackoverflow.com/a/16154756/108533) this only verifies that the attributes in the package are valid. We should also validate the actual data hash against the digest attribute.
So finally here's a code doing validation (cert.pem is a certificate file that the provider sent me, package is a PEM-encoded message I got from them over HTTP POST):
var fs = require('fs');
var crypto = require('crypto');
var forge = require('forge');
var pkcs7 = forge.pkcs7;
var asn1 = forge.asn1;
var oids = forge.pki.oids;
var folder = '/a/path/to/files/';
var pkg = fs.readFileSync(folder + 'package').toString();
var cert = fs.readFileSync(folder + 'cert.pem').toString();
var res = true;
try {
var msg = pkcs7.messageFromPem(pkg);
var attrs = msg.rawCapture.authenticatedAttributes;
var set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs);
var buf = Buffer.from(asn1.toDer(set).data, 'binary');
var sig = msg.rawCapture.signature;
var v = crypto.createVerify('RSA-SHA1');
v.update(buf);
if (!v.verify(cert, sig)) {
console.log('Wrong authorized attributes!');
res = false;
}
var h = crypto.createHash('SHA1');
var data = msg.rawCapture.content.value[0].value[0].value;
h.update(data);
var attrDigest = null;
for (var i = 0, l = attrs.length; i < l; ++i) {
if (asn1.derToOid(attrs[i].value[0].value) === oids.messageDigest) {
attrDigest = attrs[i].value[1].value[0].value;
}
}
var dataDigest = h.digest();
if (dataDigest !== attrDigest) {
console.log('Wrong content digest');
res = false;
}
}
catch (_e) {
console.dir(_e);
res = false;
}
if (res) {
console.log("It's OK");
}
Your answer is a big step in the right direction. You are however missing out an essential part of the validation!
You should verify the hash of the data against the digest contained in the signed attributes. Otherwise it would be possible for someone to replace the content with malicious data. Try for example validating the following 'package' with your code (and have a look at the content): http://pastebin.com/kaZ2XQQc
I'm not much of a NodeJS developer (this is actually my first try :p), but here's a suggestion to help you get started.
var fs = require('fs');
var crypto = require('crypto');
var pkcs7 = require('./js/pkcs7'); // forge from my own fork
var asn1 = require('./js/asn1');
var folder = '';
var pkg = fs.readFileSync(folder + 'package').toString();
var cert = fs.readFileSync(folder + 'cert.pem').toString();
try {
var msg = pkcs7.messageFromPem(pkg);
var attrs = msg.rawCapture.authenticatedAttributes; // got the list of auth attrs
var set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs); // packed them inside of the SET object
var buf = new Buffer(asn1.toDer(set).data, 'binary'); // DO NOT forget 'binary', otherwise it tries to interpret bytes as UTF-8 chars
var sig = msg.rawCapture.signature;
var shasum = crypto.createHash('sha1'); // better be based on msg.rawCapture.digestAlgorithms
shasum.update(msg.rawCapture.content.value[0].value[0].value);
for(var n in attrs) {
var attrib = attrs[n].value;
var attrib_type = attrib[0].value;
var attrib_value = attrib[1].value[0].value;
if(attrib_type == "\x2a\x86\x48\x86\xf7\x0d\x01\x09\x04") { // better would be to use the OID (1.2.840.113549.1.9.4)
if(shasum.digest('binary') == attrib_value) {
console.log('hash matches');
var v = crypto.createVerify('RSA-SHA1');
v.update(buf);
console.log(v.verify(cert, sig)); // -> should type true
} else {
console.log('hash mismatch');
}
}
}
}
catch (_e) {
console.dir(_e);
}
based on inspiration form this answer, I've implemented a sample for signing and verifying pdf files using node-signpdf and node-forge.