v1.1.8: cleanup and support forced keys for verification

This commit is contained in:
AJ ONeal 2019-03-09 02:50:14 -07:00
parent 448b977963
commit 5fef6a7430
5 changed files with 87 additions and 71 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
node_modules

View File

@ -1,14 +1,14 @@
'use strict'; 'use strict';
var keyfetch = require('./keyfetch.js'); var keyfetch = require('./keyfetch.js');
var testUrl = "https://example.auth0.com"; var testIss = "https://example.auth0.com";
keyfetch.init({}); keyfetch.init({});
keyfetch.oidcJwks().then(function (jwks) { keyfetch.oidcJwks(testIss).then(function (hits) {
keyfetch._clear(); keyfetch._clear();
console.log(jwks); console.log(hits);
return keyfetch.oidcJwk(jwks[0].thumbprint, "https://example.auth0.com").then(function () { return keyfetch.oidcJwk(hits[0].thumbprint, testIss).then(function () {
return keyfetch.oidcJwk(jwks[0].thumbprint, "https://example.auth0.com").then(function (jwk) { return keyfetch.oidcJwk(hits[0].thumbprint, testIss).then(function (jwk) {
console.log(jwk); console.log(jwk);
}); });
}); });

View File

@ -236,6 +236,22 @@ keyfetch.verify = function (opts) {
throw new Error("token's 'nbf' has not been reached or could not parsed: '" + nbf + "'"); throw new Error("token's 'nbf' has not been reached or could not parsed: '" + nbf + "'");
} }
} }
if (opts.jwks || opts.jwk) {
return overrideLookup(opts.jwks || [opts.jwk]);
}
function overrideLookup(jwks) {
return Promise.all(jwks.map(function (jwk) {
var Keypairs = jwk.x ? Eckles : Rasha;
return Keypairs.export({ jwk: jwk }).then(function (pem) {
return Keypairs.thumbprint({ jwk: jwk }).then(function (thumb) {
return { jwk: jwk, pem: pem, thumbprint: thumb };
});
});
})).then(verifyAny);
}
var kid = decoded.header.kid; var kid = decoded.header.kid;
var iss; var iss;
var fetcher; var fetcher;
@ -254,81 +270,79 @@ keyfetch.verify = function (opts) {
fetchOne = keyfetch.jwk; fetchOne = keyfetch.jwk;
} }
function verify(jwk, payload) {
var alg = 'SHA' + decoded.header.alg.replace(/[^\d]+/i, '');
var sig = convertIfEcdsa(decoded.header, decoded.signature);
return require('crypto')
.createVerify(alg)
.update(jwt.split('.')[0] + '.' + payload)
.verify(jwk.pem, sig, 'base64')
;
}
function convertIfEcdsa(header, b64sig) {
// ECDSA JWT signatures differ from "normal" ECDSA signatures
// https://tools.ietf.org/html/rfc7518#section-3.4
if (!/^ES/i.test(header.alg)) { return b64sig; }
var bufsig = Buffer.from(b64sig, 'base64');
var hlen = bufsig.byteLength / 2; // should be even
var r = bufsig.slice(0, hlen);
var s = bufsig.slice(hlen);
// unpad positive ints less than 32 bytes wide
while (!r[0]) { r = r.slice(1); }
while (!s[0]) { s = s.slice(1); }
// pad (or re-pad) ambiguously non-negative BigInts to 33 bytes wide
if (0x80 & r[0]) { r = Buffer.concat([Buffer.from([0]), r]); }
if (0x80 & s[0]) { s = Buffer.concat([Buffer.from([0]), s]); }
var len = 2 + r.byteLength + 2 + s.byteLength;
var head = [0x30];
// hard code 0x80 + 1 because it won't be longer than
// two SHA512 plus two pad bytes (130 bytes <= 256)
if (len >= 0x80) { head.push(0x81); }
head.push(len);
var buf = Buffer.concat([
Buffer.from(head)
, Buffer.from([0x02, r.byteLength]), r
, Buffer.from([0x02, s.byteLength]), s
]);
return buf.toString('base64')
.replace(/-/g, '+')
.replace(/_/g, '/')
.replace(/=/g, '')
;
}
var payload = jwt.split('.')[1]; // as string, as it was signed var payload = jwt.split('.')[1]; // as string, as it was signed
if (kid) { if (kid) {
return fetchOne(kid, iss).then(verifyOne); //.catch(fetchAny); return fetchOne(kid, iss).then(verifyOne); //.catch(fetchAny);
} else { } else {
return fetchAny(); return fetcher(iss).then(verifyAny);
} }
function verifyOne(jwk) { function verify(hit, payload) {
if (true === verify(jwk, payload)) { var alg = 'SHA' + decoded.header.alg.replace(/[^\d]+/i, '');
var sig = ecdsaAsn1SigToJwtSig(decoded.header, decoded.signature);
return require('crypto')
.createVerify(alg)
.update(jwt.split('.')[0] + '.' + payload)
.verify(hit.pem, sig, 'base64')
;
}
function verifyOne(hit) {
if (true === verify(hit, payload)) {
return decoded; return decoded;
} }
throw new Error('token signature verification was unsuccessful'); throw new Error('token signature verification was unsuccessful');
} }
function fetchAny() { function verifyAny(hits) {
return fetcher(iss).then(function (jwks) { if (hits.some(function (hit) {
if (jwks.some(function (jwk) { if (kid) {
if (kid) { if (kid !== hit.jwk.kid && kid !== hit.thumbprint) { return; }
if (kid !== jwk.kid && kid !== jwk.thumbprint) { return; } if (true === verify(hit, payload)) { return true; }
if (true === verify(jwk, payload)) { return true; } throw new Error('token signature verification was unsuccessful');
throw new Error('token signature verification was unsuccessful'); } else {
} else { if (true === verify(hit, payload)) { return true; }
if (true === verify(jwk, payload)) { return true; }
}
})) {
return decoded;
} }
throw new Error("Retrieved a list of keys, but none of them matched the 'kid' (key id) of the token."); })) {
}); return decoded;
}
throw new Error("Retrieved a list of keys, but none of them matched the 'kid' (key id) of the token.");
} }
}); });
}; };
function ecdsaAsn1SigToJwtSig(header, b64sig) {
// ECDSA JWT signatures differ from "normal" ECDSA signatures
// https://tools.ietf.org/html/rfc7518#section-3.4
if (!/^ES/i.test(header.alg)) { return b64sig; }
var bufsig = Buffer.from(b64sig, 'base64');
var hlen = bufsig.byteLength / 2; // should be even
var r = bufsig.slice(0, hlen);
var s = bufsig.slice(hlen);
// unpad positive ints less than 32 bytes wide
while (!r[0]) { r = r.slice(1); }
while (!s[0]) { s = s.slice(1); }
// pad (or re-pad) ambiguously non-negative BigInts to 33 bytes wide
if (0x80 & r[0]) { r = Buffer.concat([Buffer.from([0]), r]); }
if (0x80 & s[0]) { s = Buffer.concat([Buffer.from([0]), s]); }
var len = 2 + r.byteLength + 2 + s.byteLength;
var head = [0x30];
// hard code 0x80 + 1 because it won't be longer than
// two SHA512 plus two pad bytes (130 bytes <= 256)
if (len >= 0x80) { head.push(0x81); }
head.push(len);
var buf = Buffer.concat([
Buffer.from(head)
, Buffer.from([0x02, r.byteLength]), r
, Buffer.from([0x02, s.byteLength]), s
]);
return buf.toString('base64')
.replace(/-/g, '+')
.replace(/_/g, '/')
.replace(/=/g, '')
;
}

2
package-lock.json generated
View File

@ -1,6 +1,6 @@
{ {
"name": "keyfetch", "name": "keyfetch",
"version": "1.1.0", "version": "1.1.8",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {

View File

@ -1,4 +1,5 @@
{ "author": { {
"author": {
"name": "AJ ONeal", "name": "AJ ONeal",
"email": "solderjs@gmail.com" "email": "solderjs@gmail.com"
}, },
@ -29,5 +30,5 @@
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1" "test": "echo \"Error: no test specified\" && exit 1"
}, },
"version": "1.1.7" "version": "1.1.8"
} }