Render an image byte stream on angular client side app - javascript

I have a NodeJS / Express RESTful API that proxies requests from an Active Directory LDAP Server. I do this because LDAP queries tend to be slow. I use the RESTful API to cache and refresh data intermittently. I recently attempted to add the thumbnail photo. In research it appears the library that I am using ldapjs is converting the native ldap byte array to a string.
Example of what this looks like:
\ufffd\ufffd\ufffd\ufffd\u0000\u0010JFIF\u0000\u0001\u0000\u0001\u0000x\u0000x\u0000\u0000\ufffd\ufffd\u0000\u001fLEAD
Technologies Inc.
V1.01\u0000\ufffd\ufffd\u0000\ufffd\u0000\u0005\u0005\u0005\b\
Due to this fact the image does not render correctly on the angular client app. So based on my research, here are some attempts in correcting this problem:
Convert the string to a byte array using different methods (See code examples)
Modify the ldapjs library to render the data as a byte array in the RESTFUL as in the following, then bind the byte stream to the angular page:
https://github.com/joyent/node-ldapjs/issues/137
https://csjdpw.atlassian.net/wiki/spaces/~Genhan.Chen/pages/235044890/Display+LDAP+thumbnail+photos
html binding:
<div>
<img *ngIf="userImage" [src]="userImage" alt="{{dataSource.sAMAccountName}}">
</div>
controller:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const byteArray = this.string2Bin(this.dataSource.thumbnailPhoto);
const image = `data:image/jpeg;base64,${Buffer.from(byteArray).toString('base64')}`;
value = this.domSanitizer.bypassSecurityTrustUrl(image);
}
return value;
}
private string2Bin(str) {
var result = [];
for (var i = 0; i < str.length; i++) {
result.push(str.charCodeAt(i));
}
return result;
}
and
alternate version of controller:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const byteArray = new TextEncoder().encode(this.dataSource.thumbnailPhoto);
const image = `data:image/jpeg;base64,${Buffer.from(byteArray).toString('base64')}`;
value = this.domSanitizer.bypassSecurityTrustUrl(image);
}
return value;
}
another alternate version of controller:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const blob = new Blob( [Buffer.from(this.dataSource.thumbnailPhoto).toString('base64')], { type: 'image/jpeg' } );
const value = window.URL.createObjectURL(blob);
return value;
}
I expected a rendered image on the angular page but all I get is the non-rendered placeholder.
Here are the versions of the libraries I am using
Angular - 8.0.3
NodeJS - 10.15.0
ldapjs - 1.0.2
I am sure I am missing something, I am just not sure what it is. Any assistance would be appreciated.

So after some guidance provided by #Aritra Chakraborty, I checked the RESTful api source code. It appears to be a problem with a ldapjs library. When using the entry object conversion, it is doing something strange with the data to which it is not usable. I then realized, I had access to the entry raw format which is the byte array . Instead of trying to convert to base64 on the client, I moved this to the API. Then just mapped it back on the client binding and bang it worked.
Here is some example code:
RESTFul api
_client.search(this._search_dn, opts, (error, res) => {
res.on("searchEntry", (entry) => {
let result = {};
result.id = string_service.formatGUID(JSON.parse(JSON.stringify(entry.raw)).objectGUID);
result = Object.assign({}, result, entry.object);
if(entry.raw.thumbnailPhoto) {
result.thumbnailPhoto = entry.raw.thumbnailPhoto.toString('base64');
}
// The previous 3 lines did not exist previously
On the Angular 8 client I simplified the binding:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const image = `data:image/jpeg;base64,${this.dataSource.thumbnailPhoto}`;
value = this.domSanitizer.bypassSecurityTrustUrl(image);
}
return value;
}
I hope someone finds some value out of this.

Related

How do I write a LZ compressed string to text file using JXA?

I am trying to write a JXA script in Apple Script Editor, that compresses a string using the LZ algorithm and writes it to a text (JSON) file:
var story = "Once upon a time in Silicon Valley..."
var storyC = LZString.compress(story)
var data_to_write = "{\x22test\x22\x20:\x20\x22"+storyC+"\x22}"
app.displayAlert(data_to_write)
var desktopString = app.pathTo("desktop").toString()
var file = `${desktopString}/test.json`
writeTextToFile(data_to_write, file, true)
Everything works, except that the LZ compressed string is just transformed to a set of "?" by the time it reaches the output file, test.json.
It should look like:
{"test" : "㲃냆੠Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ"}
Instead it looks like:
{"test" : "????????????????????"}
I have a feeling the conversion is happening in the app.write command used by the writeTextToFile() function (which I pulled from an example in Apple's Mac Automation Scripting Guide):
var app = Application.currentApplication()
app.includeStandardAdditions = true
function writeTextToFile(text, file, overwriteExistingContent) {
try {
// Convert the file to a string
var fileString = file.toString()
// Open the file for writing
var openedFile = app.openForAccess(Path(fileString), { writePermission: true })
// Clear the file if content should be overwritten
if (overwriteExistingContent) {
app.setEof(openedFile, { to: 0 })
}
// Write the new content to the file
app.write(text, { to: openedFile, startingAt: app.getEof(openedFile) })
// Close the file
app.closeAccess(openedFile)
// Return a boolean indicating that writing was successful
return true
}
catch(error) {
try {
// Close the file
app.closeAccess(file)
}
catch(error) {
// Report the error is closing failed
console.log(`Couldn't close file: ${error}`)
}
// Return a boolean indicating that writing was successful
return false
}
}
Is there a substitute command for app.write that maintains the LZ compressed string / a better way to accomplish what I am trying to do?
In addition, I am using the readFile() function (also from the Scripting Guide) to load the LZ string back into the script:
function readFile(file) {
// Convert the file to a string
var fileString = file.toString()
// Read the file and return its contents
return app.read(Path(fileString))
}
But rather than returning:
{"test" : "㲃냆੠Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ"}
It is returning:
"{\"test\" : \"㲃냆੠Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ\"}"
Does anybody know a fix for this too?
I know that it is possible to use Cocoa in JXA scripts, so maybe the solution lies therein?
I am just getting to grips with JavaScript so I'll admit trying to grasp Objective-C or Swift is way beyond me right now.
I look forward to any solutions and/or pointers that you might be able to provide me. Thanks in advance!
After some further Googl'ing, I came across these two posts:
How can I write UTF-8 files using JavaScript for Mac Automation?
read file as class utf8
I have thus altered my script accordingly.
writeTextToFile() now looks like:
function writeTextToFile(text, file) {
// source: https://stackoverflow.com/a/44293869/11616368
var nsStr = $.NSString.alloc.initWithUTF8String(text)
var nsPath = $(file).stringByStandardizingPath
var successBool = nsStr.writeToFileAtomicallyEncodingError(nsPath, false, $.NSUTF8StringEncoding, null)
if (!successBool) {
throw new Error("function writeFile ERROR:\nWrite to File FAILED for:\n" + file)
}
return successBool
};
While readFile() looks like:
ObjC.import('Foundation')
const readFile = function (path, encoding) {
// source: https://github.com/JXA-Cookbook/JXA-Cookbook/issues/25#issuecomment-271204038
pathString = path.toString()
!encoding && (encoding = $.NSUTF8StringEncoding)
const fm = $.NSFileManager.defaultManager
const data = fm.contentsAtPath(pathString)
const str = $.NSString.alloc.initWithDataEncoding(data, encoding)
return ObjC.unwrap(str)
};
Both use Objective-C to overcome app.write and app.read's inability to handle UTF-8.

Can't save/create files using Store.js

So I wanted to save a file on the client storage using Store.js.
I can change the date using store.set and i can log it to console to see the change, but then it's supposed to be saved in app data where it's not created.
I tried to get the Path where it's being saved and it's :
C:\Users\USER\AppData\Roaming\stoma2/Categories.json
I noticed that there is a "/" so I tried :
C:\Users\USER\AppData\Roaming\stoma2\Categories.json
and :
C:/Users/USER/AppData/Roaming/stoma2/Categories.json
But all 3 of them didn't work.
This is my Store.js :
const fs = require('browserify-fs');
var fs2 = require('filereader'),Fs2 = new fs2();
const electron = window.require('electron');
const path = require('path');
class Store {
constructor(opts) {
// Renderer process has to get `app` module via `remote`, whereas the main process can get it directly
// app.getPath('userData') will return a string of the user's app data directory path.
//const userDataPath = (electron.app || electron.remote.app).getPath('userData');
var userDataPath = (electron.app || electron.remote.app).getPath('userData');
for(var i=0;i<userDataPath.length;i++){
if(userDataPath.charAt(i)=="\\"){
userDataPath = userDataPath.replace("\\","/");
}
}
// We'll use the `configName` property to set the file name and path.join to bring it all together as a string
this.path = path.join(userDataPath, opts.configName + '.json');
this.data = parseDataFile(this.path, opts.defaults);
console.log(this.path);
}
// This will just return the property on the `data` object
get(key) {
return this.data[key];
}
// ...and this will set it
set(key, val) {
this.data[key] = val;
// Wait, I thought using the node.js' synchronous APIs was bad form?
// We're not writing a server so there's not nearly the same IO demand on the process
// Also if we used an async API and our app was quit before the asynchronous write had a chance to complete,
// we might lose that data. Note that in a real app, we would try/catch this.
fs.writeFile(this.path, JSON.stringify(this.data));
}
}
function parseDataFile(filePath, data) {
// We'll try/catch it in case the file doesn't exist yet, which will be the case on the first application run.
// `fs.readFileSync` will return a JSON string which we then parse into a Javascript object
try {
return JSON.parse(Fs2.readAsDataURL(new File(filePath)));
} catch(error) {
// if there was some kind of error, return the passed in defaults instead.
return data;
}
}
// expose the class
export default Store;
There might be a probleme fith js.writeFile() (well that's the source of probleme).
and this is my call :
//creation
const storeDefCat = new Store({
configName: "Categories",
defaults: require("../data/DefaultCategorie.json")
})
//call for the save
storeDefCat.set('Pizza',{id:0,path:storeDefCat.get('Pizza').path});
For now if possible,I might need to find another way to save the file.
And i tried : fs : It doesn't work for me for some reason (I get strange errors that they don't want to be fixed..) .
If anyone has an Idea then please I would be grateful.
So I managed to fix the probleme, Why fs was sending me errors about undefined functions?Why file wasn't getting created ? It has NOTHING to do with the code it self, but the imports...
To clearify, I was using :
const fs = require('fs');
And the solution is to make it like :
const fs = window.require('fs');
Just adding window. fixed all the problems .Since it's my first time using electron I wasn't used to import from the window but it seems it's necessary.And more over...There was no posts saying this is the fix.

Nativescript get base64 data from native image

I try to use https://github.com/bradmartin/nativescript-drawingpad for saving a signature to my backend. But I am simply not capable to find a solution to get some "useful" data from getDrawing(), which returns a native image Object, for example UIImage on iOS.
I would love to "convert" the image data to some base64 (png, or whatever) string and send it to my server.
I tried someting like:
var ImageModule = require("ui/image");
var ImageSourceModule = require("image-source");
elements.drawingpad.getDrawing().then(function(a){
var image = ImageSourceModule.fromNativeSource( a );
api.post("sign", image.toBase64String());
});
I also tried to post simply a like seen in the demo stuff.
I would really love to see a demo of how to get my hands on the "image data" itself.
thanks!
Thanks to #bradmartin I found the solution:
var image = ImageSourceModule.fromNativeSource(a);
var base64 = image.toBase64String('png');
Actually after lots of digging with tons of error, I finally figured it out.
First you have to require the nativescript-imagepicker source module and also the nativescript image source.
var imagepicker = require("nativescript-imagepicker");
var ImageSourceModule = require("tns-core-modules/image-source");
a case where you want to update a user profile and also send a base64 string to your backend for processing
function changeProfileImage(args) {
var page = args.object;
var profile = page.getViewById("profile-avatar");
var context = imagepicker.create({ mode: "single" });
context.authorize().then(function() {
return context.present();
}).then(function(selection) {
profile.background = `url(${selection[0]._android})`;
profile.backgroundRepeat = `no-repeat`;
profile.backgroundSize = `cover`;
ImageSourceModule.fromAsset(selection[0]).then(image => {
var base64 = image.toBase64String('png');
// console.log(base64);
uploadMediaFile(base64);
});
}).catch(function (e) {
// process error
console.log(e);
});
}

Duplicate chunks on JSON response

I've got a fairly standard MEAN project setup with the angular-fullstack generator using yeoman.
What I'm finding is that when GETting a largish (over 65536 bytes) json result, it is encoded using gzip and chunked, but the json returned is not valid viewed either in chrome or consumed by my angular client $resource because it contains TWO responses!
e.g {name:'hi'}{name:'hi'} for a single id or [{..},{..}][{..},{..}] for a array.
The server api endpoint was autogenerated from the angular-fullstack generator and looks something like:
// Get list of worlds
exports.index = function(req, res) {
World.find(function (err, worlds) {
if(err) { return handleError(res, err); }
res.json(200, worlds);
});
};
If i slice the data so it's not chunked, then the json is well formed. I've checked the mongo db and the data is ok there too and debugging the worlds variable, I can JSON.stringify and get the expected string result without any duplicates. but the moment it's sent, I'm getting a doubling up of json result on the response.
Update for comment
angular-fullstack 2.0.4
the schema looks like:
'use strict';
var mongoose = require('mongoose'),
Schema = mongoose.Schema;
var WorldSchema = new Schema({
name: String,
info: String,
active: Boolean,
tiles: [Schema.Types.Mixed]
});
module.exports = mongoose.model('World', WorldSchema);
seeded with:
var newWorld = new WorldModel({
_id: planet._objectId,
name: "SimDD World",
tiles : seed()
});
newWorld.save();
...
var seed = function () {
var data = [];
for (var i = 0; i < planet.HEIGHT; i++) {
for (var j = 0; j < planet.WIDTH; j++) {
data.push({
coords:{
x:i,
y:j
},
type:'.'
});
}
}
return data;
}
Looks like this is being caused by the compression middleware, removing app.use(compression()); from the express config seems to fix this.
The issue is seen in browsers and not in postman. I checked the HTTP request headers and when I add 'Accept' Header as html in postman the same problem is seen in postman as well. So I believe the browsers are handling differently with Accept type with html.
// app.use(require('connect-livereload')());
I came across the same problem when building my angular-fullstack app (thanks, DaftMonk), after some extensive debugging using node-inspector, turns out the JSON data gets passed to the livereload module and gets duplicated when it comes out. Disabling this middleware eliminated the problem for me.
Does this work for you? I don't see a reason why it shouldn't.
I assume you have a planet object that has:
HEIGHT, WIDTH and _objectId properties.
Remember if you modify a mixed type you need to tell mongoose
that the value changed and subsequently save it.
http://mongoosejs.com/docs/schematypes.html#mixed
var WorldModel = require('../api/world/world.model');
var planet = require('planetSeedData');
var seed = function() {
var data = [];
for (var i = 0; i < planet.HEIGHT; i++) {
for (var j = 0; j < planet.WIDTH; j++) {
data.push({
coords: {x:i, y:j},
type: '.'
});
}
}
return data;
};
var myPlanet = {
_id: Mongoose.Types.ObjectId(planet._objectId),
name: "SimDD World",
tiles : seed()
};
WorldModel.create(myPlanet);
// if modified, you would do something like:
// WorldModel.markModified('tiles');
// WorldModel.save();

Verify PKCS#7 (PEM) signature / unpack data in node.js

I get a PKCS#7 crypto package from a 3rd party system.
The package is not compressed and not encrypted, PEM-encoded, signed with X.509 certificate.
I also have a PEM cert file from the provider.
The data inside is XML
I need to do the following in Node.JS:
extract the data
verify the signature
A sample package (no sensitive info, data refers to our qa system) http://pastebin.com/7ay7F99e
OK, finally got it.
First of all, PKCS messages are complex structures binary-encoded using ASN1.
Second, they can be serialized to binary files (DER encoding) or text PEM files using Base64 encoding.
Third, PKCS#7 format specifies several package types from which my is called Signed Data. These formats are distinguished by OBJECT IDENTIFIER value in the beginning of the ASN1 object (1st element of the wrapper sequence) — you can go to http://lapo.it/asn1js/ and paste the package text for the fully parsed structure.
Next, we need to parse the package (Base64 -> ASN1 -> some object representation). Unfortunately, there's no npm package for that. I found quite a good project forge that is not published to npm registry (though npm-compatible). It parsed PEM format but the resulting tree is quite an unpleasant thing to traverse. Based on their Encrypted Data and Enveloped Data implementations I created partial implementation of Signed Data in my own fork. UPD: my pull request was later merged to the forge project.
Now finally we have the whole thing parsed.
At that point I found a great (and probably the only on the whole web) explanative article on signed PKCS#7 verification: http://qistoph.blogspot.com/2012/01/manual-verify-pkcs7-signed-data-with.html
I was able to extract and successfully decode the signature from the file, but the hash inside was different from the data's hash. God bless Chris who explained what actually happens.
The data signing process is 2-step:
original content's hash is calculated
a set of "Authorized Attributes" is constructed including: type of the data singed, signing time and data hash
Then the set from step 2 is signed using the signer's private key.
Due to PKCS#7 specifics this set of attributes is stored inside of the context-specific constructed type (class=0x80, type=0) but should be signed and validated as normal SET (class=0, type=17).
As Chris mentions (https://stackoverflow.com/a/16154756/108533) this only verifies that the attributes in the package are valid. We should also validate the actual data hash against the digest attribute.
So finally here's a code doing validation (cert.pem is a certificate file that the provider sent me, package is a PEM-encoded message I got from them over HTTP POST):
var fs = require('fs');
var crypto = require('crypto');
var forge = require('forge');
var pkcs7 = forge.pkcs7;
var asn1 = forge.asn1;
var oids = forge.pki.oids;
var folder = '/a/path/to/files/';
var pkg = fs.readFileSync(folder + 'package').toString();
var cert = fs.readFileSync(folder + 'cert.pem').toString();
var res = true;
try {
var msg = pkcs7.messageFromPem(pkg);
var attrs = msg.rawCapture.authenticatedAttributes;
var set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs);
var buf = Buffer.from(asn1.toDer(set).data, 'binary');
var sig = msg.rawCapture.signature;
var v = crypto.createVerify('RSA-SHA1');
v.update(buf);
if (!v.verify(cert, sig)) {
console.log('Wrong authorized attributes!');
res = false;
}
var h = crypto.createHash('SHA1');
var data = msg.rawCapture.content.value[0].value[0].value;
h.update(data);
var attrDigest = null;
for (var i = 0, l = attrs.length; i < l; ++i) {
if (asn1.derToOid(attrs[i].value[0].value) === oids.messageDigest) {
attrDigest = attrs[i].value[1].value[0].value;
}
}
var dataDigest = h.digest();
if (dataDigest !== attrDigest) {
console.log('Wrong content digest');
res = false;
}
}
catch (_e) {
console.dir(_e);
res = false;
}
if (res) {
console.log("It's OK");
}
Your answer is a big step in the right direction. You are however missing out an essential part of the validation!
You should verify the hash of the data against the digest contained in the signed attributes. Otherwise it would be possible for someone to replace the content with malicious data. Try for example validating the following 'package' with your code (and have a look at the content): http://pastebin.com/kaZ2XQQc
I'm not much of a NodeJS developer (this is actually my first try :p), but here's a suggestion to help you get started.
var fs = require('fs');
var crypto = require('crypto');
var pkcs7 = require('./js/pkcs7'); // forge from my own fork
var asn1 = require('./js/asn1');
var folder = '';
var pkg = fs.readFileSync(folder + 'package').toString();
var cert = fs.readFileSync(folder + 'cert.pem').toString();
try {
var msg = pkcs7.messageFromPem(pkg);
var attrs = msg.rawCapture.authenticatedAttributes; // got the list of auth attrs
var set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs); // packed them inside of the SET object
var buf = new Buffer(asn1.toDer(set).data, 'binary'); // DO NOT forget 'binary', otherwise it tries to interpret bytes as UTF-8 chars
var sig = msg.rawCapture.signature;
var shasum = crypto.createHash('sha1'); // better be based on msg.rawCapture.digestAlgorithms
shasum.update(msg.rawCapture.content.value[0].value[0].value);
for(var n in attrs) {
var attrib = attrs[n].value;
var attrib_type = attrib[0].value;
var attrib_value = attrib[1].value[0].value;
if(attrib_type == "\x2a\x86\x48\x86\xf7\x0d\x01\x09\x04") { // better would be to use the OID (1.2.840.113549.1.9.4)
if(shasum.digest('binary') == attrib_value) {
console.log('hash matches');
var v = crypto.createVerify('RSA-SHA1');
v.update(buf);
console.log(v.verify(cert, sig)); // -> should type true
} else {
console.log('hash mismatch');
}
}
}
}
catch (_e) {
console.dir(_e);
}
based on inspiration form this answer, I've implemented a sample for signing and verifying pdf files using node-signpdf and node-forge.

Categories