How to get to "feature parity" between Node's Crypto.createHmac( 'sha256', buffer) and CryptoJS.HmacSHA256(..., secret)?
I have a 3rd party code that does what you can see here as the method node1. I would need to achieve the same result in the browser. Seemingly, the difference is in the that the secret is base64 decoded on the node side. But I still can't get the same output.
const CryptoJS = require('crypto-js')
const Crypto = require('crypto')
const message = "Message"
const secret = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=="
function node1() {
return Crypto.createHmac("sha256", Buffer.from(secret, 'base64'))
.update(message, "utf8")
.digest("base64");
}
function node2() {
return Crypto.createHmac("sha256", Buffer.from(secret, 'base64').toString('base64'))
.update(message, "utf8")
.digest("base64");
}
function browser() {
const crypted = CryptoJS.HmacSHA256(message, secret)
return CryptoJS.enc.Base64.stringify(crypted)
}
console.log('node1', node1())
console.log('node2', node2())
console.log('browser-like', browser())
// node1 agitai8frSJpJuXwd4HMJC/t2tluUJPMZy8CeYsEHTE=
// node2 fxJQFWs5W3A4otaAlnlV0kh4yfQPb4Y1ChSVZsUAAXA=
// browser-like fxJQFWs5W3A4otaAlnlV0kh4yfQPb4Y1ChSVZsUAAXA=
So, I can reproduce a naive browser-like behaviour in node. This gave me the idea to use atob in the browser, to reproduce node's behaviour. The following sign method is my best guess on the browser side.
function sign(message) {
const crypted = CryptoJS.HmacSHA256(message, atob(secret));
return CryptoJS.enc.Base64.stringify(crypted)
}
function signNotDecoded(message) {
const crypted = CryptoJS.HmacSHA256(message, secret);
return CryptoJS.enc.Base64.stringify(crypted)
}
console.log('browser', sign('Message'))
console.log('browser-like', signNotDecoded('Message'))
// browser dnVm5jBgIBNV6pFd4J9BJTjx3BFsm7K32SCcEQX7RHA=
// browser-like fxJQFWs5W3A4otaAlnlV0kh4yfQPb4Y1ChSVZsUAAXA=
So, running signDecoded() in the browser, and running browser() in node gives the same output. Running both node2() and browser() in node again provide the same output, but still sign() differs from node1().
Based on the above, I'm quite sure that the problem is with my usage of atob, but what do I miss there?
Change
atob(secret)
To
CryptoJS.enc.Base64.parse(secret)
Because if you pass a raw string as key to the function it will be re-parsed as UTF-8.
Related
I need to get JWT with EdDSA algorithm to be able to use an API. I have the private key to sign the message and I could do that with PHP with the next library: https://github.com/firebase/php-jwt (you can see the example with EdDSA at README). Now I need to do the same in JS but I didn't find the way to get JWT with a given secret key (encoded base 64) like that (only an example is not the real secretKey):
const secretKey = Dm2xriMD6riJagld4WCA6zWqtuWh40UzT/ZKO0pZgtHATOt0pGw90jG8BQHCE3EOjiCkFR2/gaW6JWi+3nZp8A==
I tried a lot of libraries like jose, js-nacl, crypto, libsodium, etc. And I am really close to get the JWT with libsodium library, now I attach the code:
const base64url = require("base64url");
const _sodium = require("libsodium-wrappers");
const moment = require("moment");
const getJWT = async () => {
await _sodium.ready;
const sodium = _sodium;
const privateKey =
"Dm2xriMD6riJagld4WCA6zWqtuWh40UzT/ZKO0pZgtHATOt0pGw90jG8BQHCE3EOjiCkFR2/gaW6JWi+3nZp8A==";
const payload = {
iss: "test",
aud: "test.com",
iat: 1650101178,
exp: 1650101278,
sub: "12345678-1234-1234-1234-123456789123"
};
const { msg, keyAscii} = encode(payload, privateKey, "EdDSA");
const signature = sodium.crypto_sign_detached(msg, keyDecoded); //returns Uint8Array(64)
//Here is the problem.
};
const encode = (payload, key, alg) => {
const header = {
typ: "JWT",
alg //'EdDSA'
};
const headerBase64URL = base64url(JSON.stringify(header));
const payloadBase64URL = base64url(JSON.stringify(payload));
const headerAndPayloadBase64URL = `${headerBase64URL}.${payloadBase64URL}`;
const keyAscii= Buffer.from(key, "base64").toString("ascii");
return {headerAndPayloadBase64URL , keyAscii}
};
The problem is in the sodium.crypto_sign_detached function because it returns an Uint8Array(64) signature and and I need the JWT like that:
eyJ0eXAiOiJKV1QiLCJhbGciOiJFZERTQSJ9.eyJpc3MiOiJ0ZXN0IiwiYXVkIjoidGVzdC5jb20iLCJpYXQiOjE2NTAxMDExNzgsImV4cCI6MTY1MDEwMTI3OCwic3ViIjoiMTIzNDU2NzgtMTIzNC0xMjM0LTEyMzQtMTIzNDU2Nzg5MTIzIn0.f7WG_02UKljrMeVVOTNNBAGxtLXJUT_8QAnujNhomV18Pn5cU-0lHRgVlmRttOlqI7Iol_fHut3C4AOXxDGnAQ
How can I change the Uint8Array(64) to get the signature in a right format to get the JWT? I tried with base64, base64url, hex, text, ascii, etc and the final JWT is not valid (because the signature is wrong).
If you compare my code with the code that I mentioned with PHP is very similar but the function sodium.crypto_sign_detached returns Uint8Array(64) at JS library and the same function in PHP returns an string and I can get the token.
Or maybe there a way to adapt my given private key for use in other library (like crypto or jose where I received an error for the private key format)
Thank you!
In the posted NodeJS code there are the following issues:
crypto_sign_detached() returns the signature as a Uint8Array, which can be imported with Buffer.from() and converted to a Base64 string with base64url().
Concatenating headerAndPayloadBase64URL and the Base64url encoded signature with a . as separator gives the JWT you are looking for.
The raw private key must not be decoded with 'ascii', as this generally corrupts the data. Instead, it should simply be handled as buffer. Note: If for some reason a conversion to a string is required, use 'binary' as encoding, which produces a byte string (however, this is not an option with crypto_sign_detached() as this function expects a buffer).
With these changes, the following NodeJS code results:
const _sodium = require('libsodium-wrappers');
const base64url = require("base64url");
const getJWT = async () => {
await _sodium.ready;
const sodium = _sodium;
const privateKey = "Dm2xriMD6riJagld4WCA6zWqtuWh40UzT/ZKO0pZgtHATOt0pGw90jG8BQHCE3EOjiCkFR2/gaW6JWi+3nZp8A==";
const payload = {
iss: "test",
aud: "test.com",
iat: 1650101178,
exp: 1650101278,
sub: "12345678-1234-1234-1234-123456789123"
};
const {headerAndPayloadBase64URL, keyBuf} = encode(payload, privateKey, "EdDSA");
const signature = sodium.crypto_sign_detached(headerAndPayloadBase64URL, keyBuf);
const signatureBase64url = base64url(Buffer.from(signature));
console.log(`${headerAndPayloadBase64URL}.${signatureBase64url}`) // eyJ0eXAiOiJKV1QiLCJhbGciOiJFZERTQSJ9.eyJpc3MiOiJ0ZXN0IiwiYXVkIjoidGVzdC5jb20iLCJpYXQiOjE2NTAxMDExNzgsImV4cCI6MTY1MDEwMTI3OCwic3ViIjoiMTIzNDU2NzgtMTIzNC0xMjM0LTEyMzQtMTIzNDU2Nzg5MTIzIn0.f7WG_02UKljrMeVVOTNNBAGxtLXJUT_8QAnujNhomV18Pn5cU-0lHRgVlmRttOlqI7Iol_fHut3C4AOXxDGnAQ
};
const encode = (payload, key, alg) => {
const header = {
typ: "JWT",
alg //'EdDSA'
};
const headerBase64URL = base64url(JSON.stringify(header));
const payloadBase64URL = base64url(JSON.stringify(payload));
const headerAndPayloadBase64URL = `${headerBase64URL}.${payloadBase64URL}`;
const keyBuf = Buffer.from(key, "base64");
return {headerAndPayloadBase64URL, keyBuf};
};
getJWT();
Test:
Since Ed25519 is deterministic, the NodeJS code can be checked by comparing both JWTs: If, as in the above NodeJS code, the same header and payload are used as in the PHP code, the same signature and thus the same JWT is generated as by the PHP code, namely:
eyJ0eXAiOiJKV1QiLCJhbGciOiJFZERTQSJ9.eyJpc3MiOiJ0ZXN0IiwiYXVkIjoidGVzdC5jb20iLCJpYXQiOjE2NTAxMDExNzgsImV4cCI6MTY1MDEwMTI3OCwic3ViIjoiMTIzNDU2NzgtMTIzNC0xMjM0LTEyMzQtMTIzNDU2Nzg5MTIzIn0.f7WG_02UKljrMeVVOTNNBAGxtLXJUT_8QAnujNhomV18Pn5cU-0lHRgVlmRttOlqI7Iol_fHut3C4AOXxDGnAQ
which shows that the NodeJS code works.
Note that instead of the moment package, Date.now() could be used. This will return the time in milliseconds, so the value has to be divided by 1000, e.g. Math.round(Date.now()/1000), but saves a dependency.
I'm using electron to develop an app. after some encryption operations are done, I need to show a dialog to the user to save the file. The filename I want to give to the file is a random hash but I have no success also with this. I'm trying with this code but the file will not be saved. How I can fix this?
const downloadPath = app.getPath('downloads')
ipcMain.on('encryptFiles', (event, data) => {
let output = [];
const password = data.password;
data.files.forEach( (file) => {
const buffer = fs.readFileSync(file.path);
const dataURI = dauria.getBase64DataURI(buffer, file.type);
const encrypted = CryptoJS.AES.encrypt(dataURI, password).toString();
output.push(encrypted);
})
const filename = hash.createHash('md5').toString('hex');
console.log(filename)
const response = output.join(' :: ');
dialog.showSaveDialog({title: 'Save encrypted file', defaultPath: downloadPath }, () => {
fs.writeFile(`${filename}.mfs`, response, (err) => console.log(err) )
})
})
The problem you're experiencing is resulting from the asynchronous nature of Electron's UI functions: They do not take callback functions, but return promises instead. Thus, you do not have to pass in a callback function, but rather handle the promise's resolution. Note that this only applies to Electron >= version 6. If you however run an older version of Electron, your code would be correct -- but then you should really update to a newer version (Electron v6 was released well over a year ago).
Adapting your code like below can be a starting point to solve your problem. However, since you do not state how you generate the hash (where does hash.createHash come from?; did you forget to declare/import hash?; did you forget to pass any message string?; are you using hash as an alias for NodeJS' crypto module?), it is (at this time) impossible to debug why you do not get any output from console.log (filename) (I assume you mean this by "in the code, the random filename will not be created"). Once you provide more details on this problem, I'd be happy to update this answer accordingly.
As for the default filename: As per the Electron documentation, you can pass a file path into dialog.showSaveDialog () to provide the user with a default filename.
The file type extension you're using should also actually be passed with the file extension into the save dialog. Also passing this file extension as a filter into the dialog will prevent users from selecting any other file type, which is ultimately what you're also currently doing by appending it to the filename.
Also, you could utilise CryptoJS for the filename generation: Given some arbitrary string, which could really be random bytes, you could do: filename = CryptoJS.MD5 ('some text here') + '.mfs'; However, remember to choose the input string wisely. MD5 has been broken and should thus no longer be used to store secrets -- using any known information which is crucial for the encryption of the files you're storing (such as data.password) is inherently insecure. There are some good examples on how to create random strings in JavaScript around the internet, along with this answer here on SO.
Taking all these issues into account, one might end up with the following code:
const downloadPath = app.getPath('downloads'),
path = require('path');
ipcMain.on('encryptFiles', (event, data) => {
let output = [];
const password = data.password;
data.files.forEach((file) => {
const buffer = fs.readFileSync(file.path);
const dataURI = dauria.getBase64DataURI(buffer, file.type);
const encrypted = CryptoJS.AES.encrypt(dataURI, password).toString();
output.push(encrypted);
})
// not working:
// const filename = hash.createHash('md5').toString('hex') + '.mfs';
// alternative requiring more research on your end
const filename = CryptoJS.MD5('replace me with some random bytes') + '.mfs';
console.log(filename);
const response = output.join(' :: ');
dialog.showSaveDialog(
{
title: 'Save encrypted file',
defaultPath: path.format ({ dir: downloadPath, base: filename }), // construct a proper path
filters: [{ name: 'Encrypted File (*.mfs)', extensions: ['mfs'] }] // filter the possible files
}
).then ((result) => {
if (result.canceled) return; // discard the result altogether; user has clicked "cancel"
else {
var filePath = result.filePath;
if (!filePath.endsWith('.mfs')) {
// This is an additional safety check which should not actually trigger.
// However, generally appending a file extension to a filename is not a
// good idea, as they would be (possibly) doubled without this check.
filePath += '.mfs';
}
fs.writeFile(filePath, response, (err) => console.log(err) )
}
}).catch ((err) => {
console.log (err);
});
})
I am trying to write a JXA script in Apple Script Editor, that compresses a string using the LZ algorithm and writes it to a text (JSON) file:
var story = "Once upon a time in Silicon Valley..."
var storyC = LZString.compress(story)
var data_to_write = "{\x22test\x22\x20:\x20\x22"+storyC+"\x22}"
app.displayAlert(data_to_write)
var desktopString = app.pathTo("desktop").toString()
var file = `${desktopString}/test.json`
writeTextToFile(data_to_write, file, true)
Everything works, except that the LZ compressed string is just transformed to a set of "?" by the time it reaches the output file, test.json.
It should look like:
{"test" : "㲃냆Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ"}
Instead it looks like:
{"test" : "????????????????????"}
I have a feeling the conversion is happening in the app.write command used by the writeTextToFile() function (which I pulled from an example in Apple's Mac Automation Scripting Guide):
var app = Application.currentApplication()
app.includeStandardAdditions = true
function writeTextToFile(text, file, overwriteExistingContent) {
try {
// Convert the file to a string
var fileString = file.toString()
// Open the file for writing
var openedFile = app.openForAccess(Path(fileString), { writePermission: true })
// Clear the file if content should be overwritten
if (overwriteExistingContent) {
app.setEof(openedFile, { to: 0 })
}
// Write the new content to the file
app.write(text, { to: openedFile, startingAt: app.getEof(openedFile) })
// Close the file
app.closeAccess(openedFile)
// Return a boolean indicating that writing was successful
return true
}
catch(error) {
try {
// Close the file
app.closeAccess(file)
}
catch(error) {
// Report the error is closing failed
console.log(`Couldn't close file: ${error}`)
}
// Return a boolean indicating that writing was successful
return false
}
}
Is there a substitute command for app.write that maintains the LZ compressed string / a better way to accomplish what I am trying to do?
In addition, I am using the readFile() function (also from the Scripting Guide) to load the LZ string back into the script:
function readFile(file) {
// Convert the file to a string
var fileString = file.toString()
// Read the file and return its contents
return app.read(Path(fileString))
}
But rather than returning:
{"test" : "㲃냆Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ"}
It is returning:
"{\"test\" : \"㲃냆੠Њޱᐈ攀렒삶퓲ٔ쀛䳂䨀푖㢈Ӱນꀀ\"}"
Does anybody know a fix for this too?
I know that it is possible to use Cocoa in JXA scripts, so maybe the solution lies therein?
I am just getting to grips with JavaScript so I'll admit trying to grasp Objective-C or Swift is way beyond me right now.
I look forward to any solutions and/or pointers that you might be able to provide me. Thanks in advance!
After some further Googl'ing, I came across these two posts:
How can I write UTF-8 files using JavaScript for Mac Automation?
read file as class utf8
I have thus altered my script accordingly.
writeTextToFile() now looks like:
function writeTextToFile(text, file) {
// source: https://stackoverflow.com/a/44293869/11616368
var nsStr = $.NSString.alloc.initWithUTF8String(text)
var nsPath = $(file).stringByStandardizingPath
var successBool = nsStr.writeToFileAtomicallyEncodingError(nsPath, false, $.NSUTF8StringEncoding, null)
if (!successBool) {
throw new Error("function writeFile ERROR:\nWrite to File FAILED for:\n" + file)
}
return successBool
};
While readFile() looks like:
ObjC.import('Foundation')
const readFile = function (path, encoding) {
// source: https://github.com/JXA-Cookbook/JXA-Cookbook/issues/25#issuecomment-271204038
pathString = path.toString()
!encoding && (encoding = $.NSUTF8StringEncoding)
const fm = $.NSFileManager.defaultManager
const data = fm.contentsAtPath(pathString)
const str = $.NSString.alloc.initWithDataEncoding(data, encoding)
return ObjC.unwrap(str)
};
Both use Objective-C to overcome app.write and app.read's inability to handle UTF-8.
How can I get HMAC-SHA512(key, data) in the browser using Crypto Web API (window.crypto)?
Currently I am using CryptoJS library and it is pretty simple:
CryptoJS.HmacSHA512("myawesomedata", "mysecretkey").toString();
Result is 91c14b8d3bcd48be0488bfb8d96d52db6e5f07e5fc677ced2c12916dc87580961f422f9543c786eebfb5797bc3febf796b929efac5c83b4ec69228927f21a03a.
I want to get rid of extra dependencies and start using Crypto Web API instead. How can I get the same result with it?
Answering my own question. The code below returns the same result as CryptoJS.HmacSHA512("myawesomedata", "mysecretkey").toString();
There are promises everywhere as WebCrypto is asynchronous:
// encoder to convert string to Uint8Array
var enc = new TextEncoder("utf-8");
window.crypto.subtle.importKey(
"raw", // raw format of the key - should be Uint8Array
enc.encode("mysecretkey"),
{ // algorithm details
name: "HMAC",
hash: {name: "SHA-512"}
},
false, // export = false
["sign", "verify"] // what this key can do
).then( key => {
window.crypto.subtle.sign(
"HMAC",
key,
enc.encode("myawesomedata")
).then(signature => {
var b = new Uint8Array(signature);
var str = Array.prototype.map.call(b, x => x.toString(16).padStart(2, '0')).join("")
console.log(str);
});
});
Async/Await Crypto Subtle HMAC SHA-256/512 with Base64 Digest
The following is a copy of the ✅ answer. This time we are using async/await for clean syntax. This approach also offers a base64 encoded digest.
secret is the secret key that will be used to sign the body.
body is the string-to-sign.
enc is a text encoder that converts the UTF-8 to JavaScript byte arrays.
algorithm is a JS object which is used to identify the signature methods.
key is a CryptoKey.
signature is the byte array hash.
digest is the base64 encoded signature.
The JavaScript code follows:
(async ()=>{
'use strict';
let secret = "sec-demo"; // the secret key
let enc = new TextEncoder("utf-8");
let body = "GET\npub-demo\n/v2/auth/grant/sub-key/sub-demo\nauth=myAuthKey&g=1&target-uuid=user-1×tamp=1595619509&ttl=300";
let algorithm = { name: "HMAC", hash: "SHA-256" };
let key = await crypto.subtle.importKey("raw", enc.encode(secret), algorithm, false, ["sign", "verify"]);
let signature = await crypto.subtle.sign(algorithm.name, key, enc.encode(body));
let digest = btoa(String.fromCharCode(...new Uint8Array(signature)));
console.log(digest);
})();
The original answer on this page was helpful in a debugging effort earlier today. We're using it to help identify a bug in our documentation for creating signatures for granting access tokens to use APIs with read/write permissions.
Somehow #StephenBlum's answer doesn't work for me.
I rewrite #StepanSnigirev' answer as async below instead.
"use strict";
(async () => {
const secret = "mysecretkey";
const enc = new TextEncoder();
const body = "myawesomedata";
const algorithm = { name: "HMAC", hash: "SHA-512" };
const key = await crypto.subtle.importKey(
"raw",
enc.encode(secret),
algorithm,
false,
["sign", "verify"]
);
const signature = await crypto.subtle.sign(
algorithm.name,
key,
enc.encode(body)
);
// convert buffer to byte array
const hashArray = Array.from(new Uint8Array(signature));
// convert bytes to hex string
const digest = hashArray
.map((b) => b.toString(16).padStart(2, "0"))
.join("");
console.log(digest);
})();
I get a PKCS#7 crypto package from a 3rd party system.
The package is not compressed and not encrypted, PEM-encoded, signed with X.509 certificate.
I also have a PEM cert file from the provider.
The data inside is XML
I need to do the following in Node.JS:
extract the data
verify the signature
A sample package (no sensitive info, data refers to our qa system) http://pastebin.com/7ay7F99e
OK, finally got it.
First of all, PKCS messages are complex structures binary-encoded using ASN1.
Second, they can be serialized to binary files (DER encoding) or text PEM files using Base64 encoding.
Third, PKCS#7 format specifies several package types from which my is called Signed Data. These formats are distinguished by OBJECT IDENTIFIER value in the beginning of the ASN1 object (1st element of the wrapper sequence) — you can go to http://lapo.it/asn1js/ and paste the package text for the fully parsed structure.
Next, we need to parse the package (Base64 -> ASN1 -> some object representation). Unfortunately, there's no npm package for that. I found quite a good project forge that is not published to npm registry (though npm-compatible). It parsed PEM format but the resulting tree is quite an unpleasant thing to traverse. Based on their Encrypted Data and Enveloped Data implementations I created partial implementation of Signed Data in my own fork. UPD: my pull request was later merged to the forge project.
Now finally we have the whole thing parsed.
At that point I found a great (and probably the only on the whole web) explanative article on signed PKCS#7 verification: http://qistoph.blogspot.com/2012/01/manual-verify-pkcs7-signed-data-with.html
I was able to extract and successfully decode the signature from the file, but the hash inside was different from the data's hash. God bless Chris who explained what actually happens.
The data signing process is 2-step:
original content's hash is calculated
a set of "Authorized Attributes" is constructed including: type of the data singed, signing time and data hash
Then the set from step 2 is signed using the signer's private key.
Due to PKCS#7 specifics this set of attributes is stored inside of the context-specific constructed type (class=0x80, type=0) but should be signed and validated as normal SET (class=0, type=17).
As Chris mentions (https://stackoverflow.com/a/16154756/108533) this only verifies that the attributes in the package are valid. We should also validate the actual data hash against the digest attribute.
So finally here's a code doing validation (cert.pem is a certificate file that the provider sent me, package is a PEM-encoded message I got from them over HTTP POST):
var fs = require('fs');
var crypto = require('crypto');
var forge = require('forge');
var pkcs7 = forge.pkcs7;
var asn1 = forge.asn1;
var oids = forge.pki.oids;
var folder = '/a/path/to/files/';
var pkg = fs.readFileSync(folder + 'package').toString();
var cert = fs.readFileSync(folder + 'cert.pem').toString();
var res = true;
try {
var msg = pkcs7.messageFromPem(pkg);
var attrs = msg.rawCapture.authenticatedAttributes;
var set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs);
var buf = Buffer.from(asn1.toDer(set).data, 'binary');
var sig = msg.rawCapture.signature;
var v = crypto.createVerify('RSA-SHA1');
v.update(buf);
if (!v.verify(cert, sig)) {
console.log('Wrong authorized attributes!');
res = false;
}
var h = crypto.createHash('SHA1');
var data = msg.rawCapture.content.value[0].value[0].value;
h.update(data);
var attrDigest = null;
for (var i = 0, l = attrs.length; i < l; ++i) {
if (asn1.derToOid(attrs[i].value[0].value) === oids.messageDigest) {
attrDigest = attrs[i].value[1].value[0].value;
}
}
var dataDigest = h.digest();
if (dataDigest !== attrDigest) {
console.log('Wrong content digest');
res = false;
}
}
catch (_e) {
console.dir(_e);
res = false;
}
if (res) {
console.log("It's OK");
}
Your answer is a big step in the right direction. You are however missing out an essential part of the validation!
You should verify the hash of the data against the digest contained in the signed attributes. Otherwise it would be possible for someone to replace the content with malicious data. Try for example validating the following 'package' with your code (and have a look at the content): http://pastebin.com/kaZ2XQQc
I'm not much of a NodeJS developer (this is actually my first try :p), but here's a suggestion to help you get started.
var fs = require('fs');
var crypto = require('crypto');
var pkcs7 = require('./js/pkcs7'); // forge from my own fork
var asn1 = require('./js/asn1');
var folder = '';
var pkg = fs.readFileSync(folder + 'package').toString();
var cert = fs.readFileSync(folder + 'cert.pem').toString();
try {
var msg = pkcs7.messageFromPem(pkg);
var attrs = msg.rawCapture.authenticatedAttributes; // got the list of auth attrs
var set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, attrs); // packed them inside of the SET object
var buf = new Buffer(asn1.toDer(set).data, 'binary'); // DO NOT forget 'binary', otherwise it tries to interpret bytes as UTF-8 chars
var sig = msg.rawCapture.signature;
var shasum = crypto.createHash('sha1'); // better be based on msg.rawCapture.digestAlgorithms
shasum.update(msg.rawCapture.content.value[0].value[0].value);
for(var n in attrs) {
var attrib = attrs[n].value;
var attrib_type = attrib[0].value;
var attrib_value = attrib[1].value[0].value;
if(attrib_type == "\x2a\x86\x48\x86\xf7\x0d\x01\x09\x04") { // better would be to use the OID (1.2.840.113549.1.9.4)
if(shasum.digest('binary') == attrib_value) {
console.log('hash matches');
var v = crypto.createVerify('RSA-SHA1');
v.update(buf);
console.log(v.verify(cert, sig)); // -> should type true
} else {
console.log('hash mismatch');
}
}
}
}
catch (_e) {
console.dir(_e);
}
based on inspiration form this answer, I've implemented a sample for signing and verifying pdf files using node-signpdf and node-forge.