Verify PKCS#7 (PEM) signature / unpack data in node.js - javascript

I get a PKCS#7 crypto package from a 3rd party system.
The package is not compressed and not encrypted, PEM-encoded, signed with X.509 certificate.
I also have a PEM cert file from the provider.
The data inside is XML
I need to do the following in Node.JS:
extract the data
verify the signature
A sample package (no sensitive info, data refers to our qa system) http://pastebin.com/7ay7F99e

OK, finally got it.
First of all, PKCS messages are complex structures binary-encoded using ASN1.
Second, they can be serialized to binary files (DER encoding) or text PEM files using Base64 encoding.
Third, PKCS#7 format specifies several package types from which my is called Signed Data. These formats are distinguished by OBJECT IDENTIFIER value in the beginning of the ASN1 object (1st element of the wrapper sequence) — you can go to http://lapo.it/asn1js/ and paste the package text for the fully parsed structure.
Next, we need to parse the package (Base64 -> ASN1 -> some object representation). Unfortunately, there's no npm package for that. I found quite a good project forge that is not published to npm registry (though npm-compatible). It parsed PEM format but the resulting tree is quite an unpleasant thing to traverse. Based on their Encrypted Data and Enveloped Data implementations I created partial implementation of Signed Data in my own fork. UPD: my pull request was later merged to the forge project.
Now finally we have the whole thing parsed.
At that point I found a great (and probably the only on the whole web) explanative article on signed PKCS#7 verification: http://qistoph.blogspot.com/2012/01/manual-verify-pkcs7-signed-data-with.html
I was able to extract and successfully decode the signature from the file, but the hash inside was different from the data's hash. God bless Chris who explained what actually happens.
The data signing process is 2-step:
original content's hash is calculated
a set of "Authorized Attributes" is constructed including: type of the data singed, signing time and data hash
Then the set from step 2 is signed using the signer's private key.
Due to PKCS#7 specifics this set of attributes is stored inside of the context-specific constructed type (class=0x80, type=0) but should be signed and validated as normal SET (class=0, type=17).
As Chris mentions (https://stackoverflow.com/a/16154756/108533) this only verifies that the attributes in the package are valid. We should also validate the actual data hash against the digest attribute.
So finally here's a code doing validation (cert.pem is a certificate file that the provider sent me, package is a PEM-encoded message I got from them over HTTP POST):
var fs = require('fs');
var crypto = require('crypto');
var forge = require('forge');
var pkcs7 = forge.pkcs7;
var asn1 = forge.asn1;
var oids = forge.pki.oids;
var folder = '/a/path/to/files/';
var pkg = fs.readFileSync(folder + 'package').toString();
var cert = fs.readFileSync(folder + 'cert.pem').toString();
var res = true;
try {
var msg = pkcs7.messageFromPem(pkg);
var attrs = msg.rawCapture.authenticatedAttributes;
var set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs);
var buf = Buffer.from(asn1.toDer(set).data, 'binary');
var sig = msg.rawCapture.signature;
var v = crypto.createVerify('RSA-SHA1');
v.update(buf);
if (!v.verify(cert, sig)) {
console.log('Wrong authorized attributes!');
res = false;
}
var h = crypto.createHash('SHA1');
var data = msg.rawCapture.content.value[0].value[0].value;
h.update(data);
var attrDigest = null;
for (var i = 0, l = attrs.length; i < l; ++i) {
if (asn1.derToOid(attrs[i].value[0].value) === oids.messageDigest) {
attrDigest = attrs[i].value[1].value[0].value;
}
}
var dataDigest = h.digest();
if (dataDigest !== attrDigest) {
console.log('Wrong content digest');
res = false;
}
}
catch (_e) {
console.dir(_e);
res = false;
}
if (res) {
console.log("It's OK");
}

Your answer is a big step in the right direction. You are however missing out an essential part of the validation!
You should verify the hash of the data against the digest contained in the signed attributes. Otherwise it would be possible for someone to replace the content with malicious data. Try for example validating the following 'package' with your code (and have a look at the content): http://pastebin.com/kaZ2XQQc
I'm not much of a NodeJS developer (this is actually my first try :p), but here's a suggestion to help you get started.
var fs = require('fs');
var crypto = require('crypto');
var pkcs7 = require('./js/pkcs7'); // forge from my own fork
var asn1 = require('./js/asn1');
var folder = '';
var pkg = fs.readFileSync(folder + 'package').toString();
var cert = fs.readFileSync(folder + 'cert.pem').toString();
try {
var msg = pkcs7.messageFromPem(pkg);
var attrs = msg.rawCapture.authenticatedAttributes; // got the list of auth attrs
var set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs); // packed them inside of the SET object
var buf = new Buffer(asn1.toDer(set).data, 'binary'); // DO NOT forget 'binary', otherwise it tries to interpret bytes as UTF-8 chars
var sig = msg.rawCapture.signature;
var shasum = crypto.createHash('sha1'); // better be based on msg.rawCapture.digestAlgorithms
shasum.update(msg.rawCapture.content.value[0].value[0].value);
for(var n in attrs) {
var attrib = attrs[n].value;
var attrib_type = attrib[0].value;
var attrib_value = attrib[1].value[0].value;
if(attrib_type == "\x2a\x86\x48\x86\xf7\x0d\x01\x09\x04") { // better would be to use the OID (1.2.840.113549.1.9.4)
if(shasum.digest('binary') == attrib_value) {
console.log('hash matches');
var v = crypto.createVerify('RSA-SHA1');
v.update(buf);
console.log(v.verify(cert, sig)); // -> should type true
} else {
console.log('hash mismatch');
}
}
}
}
catch (_e) {
console.dir(_e);
}

based on inspiration form this answer, I've implemented a sample for signing and verifying pdf files using node-signpdf and node-forge.

Related

How to execute / access local file from Thunderbird WebExtension?

I like to write a Thunderbird AddOn that encrypts stuff. For this, I already extracted all data from the compose window. Now I have to save this into files and run a local executable for encryption. But I found no way to save the files and execute an executable on the local machine. How can I do that?
I found the File and Directory Entries API documentation, but it seems to not work. I always get undefined while trying to get the object with this code:
var filesystem = FileSystemEntry.filesystem;
console.log(filesystem); // --> undefined
At least, is there a working AddOn that I can examine to find out how this is working and maybe what permissions I have to request in the manifest.json?
NOTE: Must work cross-platform (Windows and Linux).
The answer is, that WebExtensions are currently not able to execute local files. Also, saving to some local folder on the disk is also not possible.
Instead, you need to add some WebExtension Experiment to your project and there use the legacy APIs. There you can use the IOUtils and FileUtils extensions to reach your goal:
Execute a file:
In your background JS file:
var ret = await browser.experiment.execute("/usr/bin/executable", [ "-v" ]);
In the experiment you can execute like this:
var { ExtensionCommon } = ChromeUtils.import("resource://gre/modules/ExtensionCommon.jsm");
var { FileUtils } = ChromeUtils.import("resource://gre/modules/FileUtils.jsm");
var { XPCOMUtils } = ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
XPCOMUtils.defineLazyGlobalGetters(this, ["IOUtils");
async execute(executable, arrParams) {
var fileExists = await IOUtils.exists(executable);
if (!fileExists) {
Services.wm.getMostRecentWindow("mail:3pane")
.alert("Executable [" + executable + "] not found!");
return false;
}
var progPath = new FileUtils.File(executable);
let process = Cc["#mozilla.org/process/util;1"].createInstance(Ci.nsIProcess);
process.init(progPath);
process.startHidden = false;
process.noShell = true;
process.run(true, arrParams, arrParams.length);
return true;
},
Save an attachment to disk:
In your backround JS file you can do like this:
var f = messenger.compose.getAttachmentFile(attachment.id)
var blob = await f.arrayBuffer();
var t = await browser.experiment.writeFileBinary(tempFile, blob);
In the experiment you can then write the file like this:
async writeFileBinary(filename, data) {
// first we need to convert the arrayBuffer to some Uint8Array
var uint8 = new Uint8Array(data);
uint8.reduce((binary, uint8) => binary + uint8.toString(2), "");
// then we can save it
var ret = await IOUtils.write(filename, uint8);
return ret;
},
IOUtils documentation:
https://searchfox.org/mozilla-central/source/dom/chrome-webidl/IOUtils.webidl
FileUtils documentation:
https://searchfox.org/mozilla-central/source/toolkit/modules/FileUtils.jsm

Unable to inflate SessionStorage data; getting either 'incorrect header check' or 'invalid stored block lengths' depending on the inflation method

I have data exceeding 5MB I need to store in session storage. To that end, I'm using pako to compress the data.
First, we have an Angular app that receives data from an API and adds it to a hash 'cachedLookups':
const stringifiedLookups = JSON.stringify(this.cachedLookups)
const compressedLookups = new TextDecoder().decode(pako.deflate(stringifiedLookups));
sessionStorage.setItem(this.sessionStorageKey, compressedLookups);
Then we have an AngularJS app in the same browser window that retrieves this data from the Session Storage:
const compressedLookups = localStorageService.get("cachedLookups");
const compressedLookupsUint8Array = new TextEncoder().encode(compressedLookups);
const stringifiedLookups = pako.inflate(compressedLookupsUint8Array, { to: 'string' });
When I hit pako.inflate, I get 'incorrect header check'. I've also tried inflateRaw, in which case I get 'invalid stored block lengths'. I'm using TextEncoder/Decoder here as attempting to store the Uint8Array directly into SessionStorage would force SessionStorage to exceed its quota, despite being calculated at under 5MB. I assume that issue had to do with the fact that the Storage API is all about storing key value string pairs.
It seems like the zip header is causing an error when you encode/decode the zip file, because the Uint8Array returned by pako and the one returned by TextEncoder don't have the same values.
var data = "a";
var deflated = pako.deflate(data)
var textEncoded = new TextDecoder().decode(deflated)
var binary = new TextEncoder().encode(textEncoded)
// Not the same
console.log(deflated)
console.log(binary)
// ERROR
console.log(pako.inflate(binary))
if instead you use deflateRaw which doesn't add the zip header it works just fine
var data = "a";
var deflated = pako.deflateRaw(data)
var textEncoded = new TextDecoder().decode(deflated)
var binary = new TextEncoder().encode(textEncoded)
// Same content
console.log(deflated)
console.log(binary)
// SUCCESS
console.log(pako.inflateRaw(binary, {to: 'string'}))

Render an image byte stream on angular client side app

I have a NodeJS / Express RESTful API that proxies requests from an Active Directory LDAP Server. I do this because LDAP queries tend to be slow. I use the RESTful API to cache and refresh data intermittently. I recently attempted to add the thumbnail photo. In research it appears the library that I am using ldapjs is converting the native ldap byte array to a string.
Example of what this looks like:
\ufffd\ufffd\ufffd\ufffd\u0000\u0010JFIF\u0000\u0001\u0000\u0001\u0000x\u0000x\u0000\u0000\ufffd\ufffd\u0000\u001fLEAD
Technologies Inc.
V1.01\u0000\ufffd\ufffd\u0000\ufffd\u0000\u0005\u0005\u0005\b\
Due to this fact the image does not render correctly on the angular client app. So based on my research, here are some attempts in correcting this problem:
Convert the string to a byte array using different methods (See code examples)
Modify the ldapjs library to render the data as a byte array in the RESTFUL as in the following, then bind the byte stream to the angular page:
https://github.com/joyent/node-ldapjs/issues/137
https://csjdpw.atlassian.net/wiki/spaces/~Genhan.Chen/pages/235044890/Display+LDAP+thumbnail+photos
html binding:
<div>
<img *ngIf="userImage" [src]="userImage" alt="{{dataSource.sAMAccountName}}">
</div>
controller:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const byteArray = this.string2Bin(this.dataSource.thumbnailPhoto);
const image = `data:image/jpeg;base64,${Buffer.from(byteArray).toString('base64')}`;
value = this.domSanitizer.bypassSecurityTrustUrl(image);
}
return value;
}
private string2Bin(str) {
var result = [];
for (var i = 0; i < str.length; i++) {
result.push(str.charCodeAt(i));
}
return result;
}
and
alternate version of controller:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const byteArray = new TextEncoder().encode(this.dataSource.thumbnailPhoto);
const image = `data:image/jpeg;base64,${Buffer.from(byteArray).toString('base64')}`;
value = this.domSanitizer.bypassSecurityTrustUrl(image);
}
return value;
}
another alternate version of controller:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const blob = new Blob( [Buffer.from(this.dataSource.thumbnailPhoto).toString('base64')], { type: 'image/jpeg' } );
const value = window.URL.createObjectURL(blob);
return value;
}
I expected a rendered image on the angular page but all I get is the non-rendered placeholder.
Here are the versions of the libraries I am using
Angular - 8.0.3
NodeJS - 10.15.0
ldapjs - 1.0.2
I am sure I am missing something, I am just not sure what it is. Any assistance would be appreciated.
So after some guidance provided by #Aritra Chakraborty, I checked the RESTful api source code. It appears to be a problem with a ldapjs library. When using the entry object conversion, it is doing something strange with the data to which it is not usable. I then realized, I had access to the entry raw format which is the byte array . Instead of trying to convert to base64 on the client, I moved this to the API. Then just mapped it back on the client binding and bang it worked.
Here is some example code:
RESTFul api
_client.search(this._search_dn, opts, (error, res) => {
res.on("searchEntry", (entry) => {
let result = {};
result.id = string_service.formatGUID(JSON.parse(JSON.stringify(entry.raw)).objectGUID);
result = Object.assign({}, result, entry.object);
if(entry.raw.thumbnailPhoto) {
result.thumbnailPhoto = entry.raw.thumbnailPhoto.toString('base64');
}
// The previous 3 lines did not exist previously
On the Angular 8 client I simplified the binding:
public get userImage() {
let value = null;
if(this.dataSource.thumbnailPhoto) {
const image = `data:image/jpeg;base64,${this.dataSource.thumbnailPhoto}`;
value = this.domSanitizer.bypassSecurityTrustUrl(image);
}
return value;
}
I hope someone finds some value out of this.

How do I write a LZ compressed string to text file using JXA?

I am trying to write a JXA script in Apple Script Editor, that compresses a string using the LZ algorithm and writes it to a text (JSON) file:
var story = "Once upon a time in Silicon Valley..."
var storyC = LZString.compress(story)
var data_to_write = "{\x22test\x22\x20:\x20\x22"+storyC+"\x22}"
app.displayAlert(data_to_write)
var desktopString = app.pathTo("desktop").toString()
var file = `${desktopString}/test.json`
writeTextToFile(data_to_write, file, true)
Everything works, except that the LZ compressed string is just transformed to a set of "?" by the time it reaches the output file, test.json.
It should look like:
{"test" : "㲃냆੠Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ"}
Instead it looks like:
{"test" : "????????????????????"}
I have a feeling the conversion is happening in the app.write command used by the writeTextToFile() function (which I pulled from an example in Apple's Mac Automation Scripting Guide):
var app = Application.currentApplication()
app.includeStandardAdditions = true
function writeTextToFile(text, file, overwriteExistingContent) {
try {
// Convert the file to a string
var fileString = file.toString()
// Open the file for writing
var openedFile = app.openForAccess(Path(fileString), { writePermission: true })
// Clear the file if content should be overwritten
if (overwriteExistingContent) {
app.setEof(openedFile, { to: 0 })
}
// Write the new content to the file
app.write(text, { to: openedFile, startingAt: app.getEof(openedFile) })
// Close the file
app.closeAccess(openedFile)
// Return a boolean indicating that writing was successful
return true
}
catch(error) {
try {
// Close the file
app.closeAccess(file)
}
catch(error) {
// Report the error is closing failed
console.log(`Couldn't close file: ${error}`)
}
// Return a boolean indicating that writing was successful
return false
}
}
Is there a substitute command for app.write that maintains the LZ compressed string / a better way to accomplish what I am trying to do?
In addition, I am using the readFile() function (also from the Scripting Guide) to load the LZ string back into the script:
function readFile(file) {
// Convert the file to a string
var fileString = file.toString()
// Read the file and return its contents
return app.read(Path(fileString))
}
But rather than returning:
{"test" : "㲃냆੠Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ"}
It is returning:
"{\"test\" : \"㲃냆੠Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ\"}"
Does anybody know a fix for this too?
I know that it is possible to use Cocoa in JXA scripts, so maybe the solution lies therein?
I am just getting to grips with JavaScript so I'll admit trying to grasp Objective-C or Swift is way beyond me right now.
I look forward to any solutions and/or pointers that you might be able to provide me. Thanks in advance!
After some further Googl'ing, I came across these two posts:
How can I write UTF-8 files using JavaScript for Mac Automation?
read file as class utf8
I have thus altered my script accordingly.
writeTextToFile() now looks like:
function writeTextToFile(text, file) {
// source: https://stackoverflow.com/a/44293869/11616368
var nsStr = $.NSString.alloc.initWithUTF8String(text)
var nsPath = $(file).stringByStandardizingPath
var successBool = nsStr.writeToFileAtomicallyEncodingError(nsPath, false, $.NSUTF8StringEncoding, null)
if (!successBool) {
throw new Error("function writeFile ERROR:\nWrite to File FAILED for:\n" + file)
}
return successBool
};
While readFile() looks like:
ObjC.import('Foundation')
const readFile = function (path, encoding) {
// source: https://github.com/JXA-Cookbook/JXA-Cookbook/issues/25#issuecomment-271204038
pathString = path.toString()
!encoding && (encoding = $.NSUTF8StringEncoding)
const fm = $.NSFileManager.defaultManager
const data = fm.contentsAtPath(pathString)
const str = $.NSString.alloc.initWithDataEncoding(data, encoding)
return ObjC.unwrap(str)
};
Both use Objective-C to overcome app.write and app.read's inability to handle UTF-8.

Validate Facebook signed_request signature in Javascript

I'm building a Facebook Page app in Classic ASP. I've been unable to match the signature that Facebook passes into the app as the first part of the POSTed signed_request.
Because there are few libraries for cryptography in VBScript, I'm using server side Javascript and the crypto-js library from https://code.google.com/archive/p/crypto-js/
I've tried to translate the PHP code example from Facebook's docs at https://developers.facebook.com/docs/games/gamesonfacebook/login#parsingsr into Javascript. I can generate an HMAC SHA256 hash of the signed_request payload but that doesn't match the signed_request signature.
I think the problem is that Facebook's signature is in a different format. It looks to be binary (~1抚Ö.....) while the HMAC SHA256 hash I'm generating is a hexadecimal string (7f7e8f5f.....). In Facebook's PHP example the hash_hmac function uses the raw binary parameter. So I think I need to either convert Facebook's signature to hexadecimal or my signature to binary in order to do an "apples-to-apples" comparison and get a match.
Here's my code:
/* Use the libraries from https://code.google.com/archive/p/crypto-js/
crypto-js/crypto-js.min.js
crypto-js/hmac-sha256.min.js
crypto-js/enc-base64.min.js
*/
var signedRequest = Request.queryString("signed_request")
var FB_APP_SECRET = "459f038.....";
var arSR = signedRequest.split(".");
var encodedSig = arSR[0];
var encodedPayload = arSR[1];
var payload = base64UrlDecode(encodedPayload);
var sig = base64UrlDecode(encodedSig);
var expectedSig;
expectedSig = CryptoJS.HmacSHA256(encodedPayload, FB_APP_SECRET); // Unaltered payload string; no match
expectedSig = CryptoJS.HmacSHA256(payload, FB_APP_SECRET); // base64-decoded payload string; no match
if (sig == expectedSig) {
Response.write(payload);
} else {
Response.write("Bad signature");
}
function base64UrlDecode(input) {
// Replace characters and convert from base64.
return Base64.decode(input.replace("-", "+").replace("_", "/"));
}
After looking into the crypto-js documentation about encoding I found the solution. The de-/encoding methods provided by crypto-js are listed under 'Encoders' at the bottom of https://code.google.com/archive/p/crypto-js/ (Thanks for the nudge, CBroe.)
The solution was to use .toString() on the signatures. It seems like crypto-js uses a word format that was preventing a comparison match. I did also switch to using the base64 decoding provided by crypto-js in order to stick with one library.
Here's my updated code:
/* Use the libraries from https://code.google.com/archive/p/crypto-js/
crypto-js/crypto-js.min.js
crypto-js/hmac-sha256.min.js
crypto-js/enc-base64.min.js
*/
var signedRequest = Request.queryString("signed_request")
var FB_APP_SECRET = "459f038.....";
var arSR = signedRequest.split(".");
var encodedSig = arSR[0];
var encodedPayload = arSR[1];
var payload = base64UrlDecode(encodedPayload);
var sig = base64UrlDecode(encodedSig);
var expectedSig = CryptoJS.HmacSHA256(encodedPayload, FB_APP_SECRET); /******** Correct payload */
if (sig.toString() != expectedSig.toString()) { /******* Use .toString() to convert to normal strings */
Response.write(payload);
} else {
Response.write("Bad signature");
}
function base64UrlDecode(input) {
return CryptoJS.enc.Base64.parse( /******** Decode */
input.replace("-", "+").replace("_", "/") // Replace characters
);
}
I recently implemented this for their required user data deletion webhook. No external dependencies needed anymore:
const crypto = require('crypto');
function parseSignedRequest(signedRequest, secret) {
const [signatureReceived, encodedPayload] = signedRequest.split('.', 2);
const payload = b64decode(encodedPayload)
const data = JSON.parse(payload);
const hmac = crypto.createHmac('sha256', secret).update(payload);
const expectedSignature = hmac.digest('base64');
if (signatureReceived === expectedSignature) {
return data;
} else {
throw new Error("Signature mismatch");
}
}
function b64decode(data) {
const buff = Buffer.from(data, 'base64');
return buff.toString('ascii');
}
It's a translation of their example PHP code. I also have a repo setup with tests.
I found this worked for me.
const crypto = require('crypto')
const _atob = (str) => Buffer.from(str, 'base64').toString('binary')
const parseSignedRequest = (signed_request, app_secret) => {
const [encoded_sig, payload] = signed_request.split('.')
const json = _atob(payload)
const data = JSON.parse(json)
if (!data.algorithm || data.algorithm.toUpperCase() !== 'HMAC-SHA256') {
return {error: true, type: 'Unknown algorithm. Expected HMAC-SHA256'}
}
// check sig
const expected_sig = crypto.createHmac('sha256', config.facebook.app_secret)
.update(payload).digest('base64')
.replace(/\+/g, '-').replace(/\//g, '_')
.replace(/=/g, '')
if (encoded_sig !== expected_sig) {
return ({error: true, type: 'invalid_signature'})
}
return {error: false, parsedRequest: data}
}
const {error, type, parsedRequest} = parseSignedRequest(signed_request)

Categories