Comparing version
@@ -12,6 +12,6 @@ /** | ||
export { Cryptosuites } from './lib/types'; | ||
export { generateKey } from './lib/crypto_utils'; | ||
export { generateKey, jwkToCrypto } from './lib/crypto_utils'; | ||
/** | ||
* Generate a (separate) proof graph (or graphs), per the DI spec. The signature is stored in | ||
* multibase format, using base64url encoding. Keys are accepted in JWK format (and stored in JWK or in Multikey, depending on the crypto key). | ||
* multibase format, using base64url encoding. Keys are accepted in WebCrypto Key format (and stored in JWK or in Multikey, depending on the crypto key). | ||
* | ||
@@ -22,3 +22,3 @@ * A single previous proof reference may also be set, although that really makes sense in the case of a single key only | ||
* @param keyData | ||
* @param previous - A previous proof ID, when applicable | ||
* @param previous - A previous proof ID, when applicable; this is added as an extra statement in the proof graphs. This parameter is only relevant internally when a proof chain is generated. | ||
* @throws - Error if there was an issue while signing. | ||
@@ -25,0 +25,0 @@ * @returns |
@@ -9,3 +9,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.verifyEmbeddedProofGraph = exports.embedProofGraph = exports.verifyProofGraph = exports.generateProofGraph = exports.generateKey = exports.Cryptosuites = void 0; | ||
exports.verifyEmbeddedProofGraph = exports.embedProofGraph = exports.verifyProofGraph = exports.generateProofGraph = exports.jwkToCrypto = exports.generateKey = exports.Cryptosuites = void 0; | ||
const n3 = require("n3"); | ||
@@ -20,13 +20,18 @@ const types = require("./lib/types"); | ||
Object.defineProperty(exports, "generateKey", { enumerable: true, get: function () { return crypto_utils_1.generateKey; } }); | ||
Object.defineProperty(exports, "jwkToCrypto", { enumerable: true, get: function () { return crypto_utils_1.jwkToCrypto; } }); | ||
// n3.DataFactory is a namespace with some functions... | ||
const { quad, namedNode } = n3.DataFactory; | ||
const { quad } = n3.DataFactory; | ||
async function generateProofGraph(dataset, keyData, previous) { | ||
// Start fresh with results | ||
const report = { errors: [], warnings: [] }; | ||
// This is to be signed | ||
const toBeSigned = await (0, utils_1.calculateDatasetHash)(dataset); | ||
// This is not optimal. It will regenerate the hash for every key and, except for an occasional ECDSA+P-384, it will generate the same data. | ||
// Some sort of a caching information on the hash values could replace this, but that is left for later... | ||
const signAndGenerate = async (keypair) => { | ||
const toBeSigned = await (0, utils_1.calculateDatasetHash)(dataset, keypair.publicKey); | ||
return (0, proof_utils_1.generateAProofGraph)(report, toBeSigned, keypair, previous); | ||
}; | ||
// prepare for the overload of arguments | ||
const keyPairs = (0, utils_1.isKeyData)(keyData) ? [keyData] : keyData; | ||
// execute the proof graph generation concurrently | ||
const promises = Array.from(keyPairs).map((keypair) => (0, proof_utils_1.generateAProofGraph)(report, toBeSigned, keypair, previous)); | ||
const promises = Array.from(keyPairs).map(signAndGenerate); | ||
const retval = await Promise.all(promises); | ||
@@ -66,3 +71,2 @@ // return by taking care of overloading. | ||
const report = { errors: [], warnings: [] }; | ||
const hash = await (0, utils_1.calculateDatasetHash)(dataset); | ||
const proofGraphs = (0, utils_1.isDatasetCore)(proofGraph) ? [proofGraph] : proofGraph; | ||
@@ -75,3 +79,3 @@ const proofs = proofGraphs.map((pr) => { | ||
}); | ||
const verified = await (0, proof_utils_1.verifyProofGraphs)(report, hash, proofs); | ||
const verified = await (0, proof_utils_1.verifyProofGraphs)(report, dataset, proofs); | ||
return { | ||
@@ -168,2 +172,3 @@ verified, | ||
// Merge all generated proof datasets into the result | ||
// The reference to the proof graph(s) from the dedicated anchor is added to the result | ||
for (const proof of allProofs) { | ||
@@ -239,3 +244,3 @@ if (anchor) { | ||
for (const q of dataset) { | ||
if (q.predicate.equals(proof_utils_1.sec_proof) && proofGraphs.has(q.graph)) { | ||
if (q.predicate.equals(proof_utils_1.sec_proof) && proofGraphs.has(q.object)) { | ||
// this is an extra entry, not part of the triples that were signed | ||
@@ -289,4 +294,3 @@ // neither it is part of any proof graphs | ||
dataStore.addQuads(extraQuads); | ||
const hash = await (0, utils_1.calculateDatasetHash)(dataStore); | ||
const verifiedChainLink = await (0, proof_utils_1.verifyProofGraphs)(report, hash, [allProofs[i]]); | ||
const verifiedChainLink = await (0, proof_utils_1.verifyProofGraphs)(report, dataStore, [allProofs[i]]); | ||
verified_list.push(verifiedChainLink); | ||
@@ -306,5 +310,4 @@ dataStore.removeQuads(extraQuads); | ||
// This is the simple case... | ||
const hash = await (0, utils_1.calculateDatasetHash)(dataStore); | ||
const proofs = proofGraphs.data(); | ||
const verified = await (0, proof_utils_1.verifyProofGraphs)(report, hash, proofs); | ||
const verified = await (0, proof_utils_1.verifyProofGraphs)(report, dataStore, proofs); | ||
return { | ||
@@ -311,0 +314,0 @@ verified, |
@@ -14,21 +14,7 @@ /** | ||
*/ | ||
import { KeyMetadata, KeyData, Cryptosuites, KeyPair, Errors } from './types'; | ||
/** JWK values for the algorithms that are relevant for this package */ | ||
export type Alg = "RS256" | "RS384" | "RS512" | "PS256" | "PS384" | "PS512"; | ||
import { KeyMetadata, KeyData, Cryptosuites, Errors } from './types'; | ||
/** JWK values for the elliptic curves that are relevant for this package */ | ||
export type Crv = "P-256" | "P-384" | "P-521"; | ||
export type Crv = "P-256" | "P-384"; | ||
/** JWK values for the hash methods that are relevant for this package */ | ||
export type Hsh = "SHA-256" | "SHA-384" | "SHA-512"; | ||
/** JWK values for the key types that are relevant for this package */ | ||
export type Kty = "EC" | "RSA" | "OKP"; | ||
/** | ||
* Interface to the Web Crypto information that has to be provided for the | ||
* creation of some RSA encryption keys. | ||
*/ | ||
interface WebCryptoAPIData { | ||
name: string; | ||
hash?: Hsh; | ||
saltLength?: number; | ||
namedCurve?: Crv; | ||
} | ||
export type Hsh = "SHA-256" | "SHA-384"; | ||
/** Information that may be used when generating new keys */ | ||
@@ -40,10 +26,2 @@ export interface KeyDetails { | ||
} | ||
/** | ||
* Mapping of the JWK instance and the corresponding terms for the WebCrypto API. | ||
* | ||
* @param report | ||
* @param key | ||
* @returns | ||
*/ | ||
export declare function algorithmData(report: Errors, key: JsonWebKey): WebCryptoAPIData | null; | ||
/*********************************************************************************** | ||
@@ -55,2 +33,14 @@ * | ||
/** | ||
* Convert a JWK key into WebCrypto; a thin layer on top of WebCrypto, which gathers | ||
* the right algorithmic details needed for the import itself. | ||
* | ||
* This function is also useful to the end user, so it is also meant to be | ||
* re-exported via the `index.ts` module. | ||
* | ||
* @param jwkKey | ||
* @param privateKey - whether this is a private or public key | ||
* @returns | ||
*/ | ||
export declare function jwkToCrypto(jwkKey: JsonWebKey, privateKey?: boolean): Promise<CryptoKey>; | ||
/** | ||
* Sign a message. | ||
@@ -62,6 +52,6 @@ * | ||
* @param message | ||
* @param secretKey | ||
* @param privateKey | ||
* @returns - either the signature in Multicode format, or `null` in case of an error. | ||
*/ | ||
export declare function sign(report: Errors, message: string, secretKey: JsonWebKey): Promise<string | null>; | ||
export declare function sign(report: Errors, message: string, privateKey: CryptoKey): Promise<string | null>; | ||
/** | ||
@@ -78,5 +68,5 @@ * Verify a signature | ||
*/ | ||
export declare function verify(report: Errors, message: string, signature: string, publicKey: JsonWebKey): Promise<boolean>; | ||
export declare function verify(report: Errors, message: string, signature: string, publicKey: CryptoKey): Promise<boolean>; | ||
/** | ||
* Mapping from the JWK data to the corresponding DI cryptosuite identifier. | ||
* Mapping from the Crypto Key data to the corresponding DI cryptosuite identifier. | ||
* | ||
@@ -87,3 +77,3 @@ * @param report - placeholder for error reports | ||
*/ | ||
export declare function cryptosuiteId(report: Errors, keyPair: KeyPair): Cryptosuites | null; | ||
export declare function cryptosuiteId(report: Errors, keyPair: CryptoKeyPair): Cryptosuites | null; | ||
/** | ||
@@ -94,8 +84,7 @@ * Generate key pair to be used with DI in general. This function is not necessary for the core | ||
* | ||
* @param metadata | ||
* @param suite | ||
* @param keyData | ||
* @param metadata | ||
* @returns | ||
*/ | ||
export declare function generateKey(suite: Cryptosuites, metadata?: KeyMetadata, keyData?: KeyDetails): Promise<KeyData>; | ||
export {}; | ||
export declare function generateKey(suite: Cryptosuites, keyData?: KeyDetails, metadata?: KeyMetadata): Promise<KeyData>; |
@@ -16,3 +16,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.generateKey = exports.cryptosuiteId = exports.verify = exports.sign = exports.algorithmData = void 0; | ||
exports.generateKey = exports.cryptosuiteId = exports.verify = exports.sign = exports.jwkToCrypto = void 0; | ||
const types = require("./types"); | ||
@@ -37,6 +37,4 @@ const types_1 = require("./types"); | ||
"PS384": { name: 'RSA-PSS', hash: 'SHA-384', saltLength: SALT_LENGTH }, | ||
"PS512": { name: 'RSA-PSS', hash: 'SHA-512', saltLength: SALT_LENGTH }, | ||
"RS256": { name: 'RSASSA-PKCS1-v1_5', hash: 'SHA-256' }, | ||
"RS384": { name: 'RSASSA-PKCS1-v1_5', hash: 'SHA-384' }, | ||
"RS512": { name: 'RSASSA-PKCS1-v1_5', hash: 'SHA-512' }, | ||
}; | ||
@@ -50,3 +48,3 @@ /** | ||
*/ | ||
function algorithmData(report, key) { | ||
function algorithmDataJWK(key) { | ||
switch (key.kty) { | ||
@@ -58,4 +56,3 @@ case "RSA": { | ||
catch (e) { | ||
report.errors.push(new types.Unclassified_Error(`Key's error in 'alg': ${e.message}`)); | ||
return null; | ||
throw new Error(`Key's error in 'alg': ${e.message}`); | ||
} | ||
@@ -78,13 +75,33 @@ } | ||
} | ||
exports.algorithmData = algorithmData; | ||
/** | ||
* Export a WebCrypto crypto key pair into their JWK equivalent. | ||
* Mapping of the CryptoKey instance and the corresponding terms for the WebCrypto API. | ||
* | ||
* @param newPair | ||
* @param report | ||
* @param key | ||
* @returns | ||
*/ | ||
async function toJWK(newPair) { | ||
const publicKey = await crypto.subtle.exportKey("jwk", newPair.publicKey); | ||
const privateKey = await crypto.subtle.exportKey("jwk", newPair.privateKey); | ||
return { public: publicKey, private: privateKey }; | ||
function algorithmDataCR(report, key) { | ||
const alg = key.algorithm; | ||
switch (alg.name) { | ||
case "RSA-PSS": { | ||
return { name: 'RSA-PSS', hash: 'SHA-256', saltLength: SALT_LENGTH }; | ||
} | ||
case "RSASSA-PKCS1-v1_5": { | ||
return { name: 'RSASSA-PKCS1-v1_5', hash: 'SHA-256' }; | ||
} | ||
case "ECDSA": { | ||
const curve = alg.namedCurve; | ||
return { | ||
name: "ECDSA", | ||
namedCurve: curve, | ||
hash: curve === "P-384" ? "SHA-384" : "SHA-256", | ||
}; | ||
} | ||
case "Ed25519": | ||
default: { | ||
return { | ||
name: "Ed25519" | ||
}; | ||
} | ||
} | ||
} | ||
@@ -152,2 +169,18 @@ /*********************************************************************************** | ||
/** | ||
* Convert a JWK key into WebCrypto; a thin layer on top of WebCrypto, which gathers | ||
* the right algorithmic details needed for the import itself. | ||
* | ||
* This function is also useful to the end user, so it is also meant to be | ||
* re-exported via the `index.ts` module. | ||
* | ||
* @param jwkKey | ||
* @param privateKey - whether this is a private or public key | ||
* @returns | ||
*/ | ||
async function jwkToCrypto(jwkKey, privateKey = false) { | ||
const algorithm = algorithmDataJWK(jwkKey); | ||
return await crypto.subtle.importKey("jwk", jwkKey, algorithm, true, privateKey ? ["sign"] : ["verify"]); | ||
} | ||
exports.jwkToCrypto = jwkToCrypto; | ||
/** | ||
* Sign a message. | ||
@@ -159,10 +192,10 @@ * | ||
* @param message | ||
* @param secretKey | ||
* @param privateKey | ||
* @returns - either the signature in Multicode format, or `null` in case of an error. | ||
*/ | ||
async function sign(report, message, secretKey) { | ||
async function sign(report, message, privateKey) { | ||
// Prepare the message to signature: | ||
const rawMessage = textToArrayBuffer(message); | ||
// The crypto algorithm to be used with this key: | ||
const algorithm = algorithmData(report, secretKey); | ||
const algorithm = algorithmDataCR(report, privateKey); | ||
if (algorithm === null) { | ||
@@ -173,6 +206,4 @@ return null; | ||
try { | ||
// Import the JWK key into crypto key: | ||
const key = await crypto.subtle.importKey("jwk", secretKey, algorithm, true, ["sign"]); | ||
const rawSignature = await crypto.subtle.sign(algorithm, key, rawMessage); | ||
// Turn the the signature into Base64URL, and the into multicode | ||
const rawSignature = await crypto.subtle.sign(algorithm, privateKey, rawMessage); | ||
// Turn the the signature into Base64URL, and then into multicode | ||
return `u${arrayBufferToBase64Url(rawSignature)}`; | ||
@@ -205,4 +236,4 @@ } | ||
const rawSignature = base64UrlToArrayBuffer(signature.slice(1)); | ||
// get the keys: | ||
const algorithm = algorithmData(report, publicKey); | ||
// get the algorithm details | ||
const algorithm = algorithmDataCR(report, publicKey); | ||
if (algorithm === null) { | ||
@@ -213,4 +244,3 @@ return false; | ||
try { | ||
const key = await crypto.subtle.importKey("jwk", publicKey, algorithm, true, ["verify"]); | ||
const retval = await crypto.subtle.verify(algorithm, key, rawSignature, rawMessage); | ||
const retval = await crypto.subtle.verify(algorithm, publicKey, rawSignature, rawMessage); | ||
if (retval === false) { | ||
@@ -229,3 +259,3 @@ report.errors.push(new types.Proof_Verification_Error(`Signature ${signature} is invalid`)); | ||
/** | ||
* Mapping from the JWK data to the corresponding DI cryptosuite identifier. | ||
* Mapping from the Crypto Key data to the corresponding DI cryptosuite identifier. | ||
* | ||
@@ -237,10 +267,3 @@ * @param report - placeholder for error reports | ||
function cryptosuiteId(report, keyPair) { | ||
// Some elementary check | ||
if (keyPair.private.kty !== keyPair.public.kty || | ||
keyPair.private.crv !== keyPair.public.crv || | ||
keyPair.private.alg !== keyPair.private.alg) { | ||
report.errors.push(new types.Invalid_Verification_Method('Keys are not in pair (in:\n ${JSON.stringify(keyPair,null,4)})')); | ||
return null; | ||
} | ||
const alg = algorithmData(report, keyPair.public); | ||
const alg = keyPair.publicKey.algorithm; | ||
if (alg === null) { | ||
@@ -256,3 +279,3 @@ return null; | ||
default: { | ||
report.errors.push(new types.Invalid_Verification_Method(`Unknown alg (${alg.name} in:\n ${JSON.stringify(keyPair, null, 4)})`)); | ||
report.errors.push(new types.Invalid_Verification_Method(`Invalid algorithm name (${alg.name})`)); | ||
return null; | ||
@@ -269,8 +292,8 @@ } | ||
* | ||
* @param metadata | ||
* @param suite | ||
* @param keyData | ||
* @param metadata | ||
* @returns | ||
*/ | ||
async function generateKey(suite, metadata, keyData) { | ||
async function generateKey(suite, keyData, metadata) { | ||
const suiteToAPI = () => { | ||
@@ -300,10 +323,8 @@ switch (suite) { | ||
const newPair = await crypto.subtle.generateKey(suiteToAPI(), true, ["sign", "verify"]); | ||
const keyPair = await toJWK(newPair); | ||
const retval = { | ||
public: keyPair.public, | ||
private: keyPair.private, | ||
cryptosuite: `${suite}`, | ||
const output = { | ||
publicKey: newPair.publicKey, | ||
privateKey: newPair.privateKey, | ||
}; | ||
return { ...retval, ...metadata }; | ||
return { ...output, ...metadata }; | ||
} | ||
exports.generateKey = generateKey; |
@@ -63,6 +63,6 @@ /** | ||
* @param report - placeholder for error reports | ||
* @param hash | ||
* @param dataset - the original dataset to be checked with | ||
* @param proofs | ||
* @returns | ||
*/ | ||
export declare function verifyProofGraphs(report: Errors, hash: string, proofs: ProofStore[]): Promise<boolean>; | ||
export declare function verifyProofGraphs(report: Errors, dataset: rdf.DatasetCore, proofs: ProofStore[]): Promise<boolean>; |
@@ -17,6 +17,6 @@ "use strict"; | ||
const canonify_1 = require("@truestamp/canonify"); | ||
const mkwc = require("multikey-webcrypto"); | ||
const types = require("./types"); | ||
const utils_1 = require("./utils"); | ||
const crypto_utils_1 = require("./crypto_utils"); | ||
const multikey_1 = require("./multikey"); | ||
const debug = require("./debug"); | ||
@@ -50,6 +50,6 @@ // n3.DataFactory is a namespace with some functions... | ||
* The proof option graph is the collection of all quads in a proof graph, except the proof | ||
* value. The hash of this graph is combined with the hash of the original data. | ||
* value setting triple. The hash of this graph is combined with the hash of the original data. | ||
* | ||
* This function does one more step before hashing: it canonicalizes the (possible) JWK key. This | ||
* key is in a JSON Literal; this must be canonicalized to ensure a proper validation. | ||
* key is in a JSON Literal; this must be canonicalized to ensure proper validation. | ||
* | ||
@@ -59,3 +59,3 @@ * @param proofGraph | ||
*/ | ||
async function calculateProofOptionsHash(proofGraph) { | ||
async function calculateProofOptionsHash(proofGraph, key) { | ||
const proofOptions = new n3.Store(); | ||
@@ -79,5 +79,3 @@ // The proof option graph is a copy of the proof graph quads, except that: | ||
// The return value must be the hash of the proof option graph | ||
// debug.log(`The proof graph to hash:`, proofOptions); | ||
// debug.log('\n'); | ||
return await (0, utils_1.calculateDatasetHash)(proofOptions); | ||
return await (0, utils_1.calculateDatasetHash)(proofOptions, key); | ||
} | ||
@@ -95,10 +93,10 @@ /** | ||
async function generateAProofGraph(report, hashValue, keyData, previousProof) { | ||
const cryptosuite = keyData?.cryptosuite || (0, crypto_utils_1.cryptosuiteId)(report, keyData); | ||
const cryptosuite = (0, crypto_utils_1.cryptosuiteId)(report, keyData); | ||
/* @@@@@ */ debug.log(`Generating a proof graph with ${cryptosuite}`); | ||
// Generate the key data to be stored in the proof graph; either multikey or jwk, depending on the cryptosuite | ||
const addKeyResource = async (jsonKey, proofGraph, keyResource) => { | ||
const addKeyResource = async (cryptoKey, proofGraph, keyResource) => { | ||
let retval = []; | ||
if (jsonKey.kty === "OKP" || jsonKey.kty === "EC") { | ||
if (cryptoKey.algorithm.name === "ECDSA" || cryptoKey.algorithm.name === "Ed25519") { | ||
// We are in multikey land... | ||
const key = await crypto.subtle.importKey("jwk", jsonKey, (0, crypto_utils_1.algorithmData)(report, jsonKey), true, ['verify']); | ||
const { cryptosuite, multikey } = await (0, multikey_1.keyToMultikey)(key); | ||
const multikey = await mkwc.cryptoToMultikey(cryptoKey); | ||
retval = [ | ||
@@ -111,6 +109,7 @@ quad(proofGraph, (0, exports.sec_prefix)('cryptosuite'), literal(cryptosuite)), | ||
else { | ||
const jwkKey = await crypto.subtle.exportKey("jwk", cryptoKey); | ||
retval = [ | ||
quad(proofGraph, (0, exports.sec_prefix)('cryptosuite'), literal(cryptosuite)), | ||
quad(keyResource, exports.rdf_type, (0, exports.sec_prefix)('JsonWebKey')), | ||
quad(keyResource, exports.sec_publicKeyJwk, literal(JSON.stringify(jsonKey), exports.rdf_json)), | ||
quad(keyResource, exports.sec_publicKeyJwk, literal(JSON.stringify(jwkKey), exports.rdf_json)), | ||
]; | ||
@@ -143,3 +142,3 @@ } | ||
proofGraph.add(quad(keyResource, exports.sec_revoked, literal(keyData.revoked, exports.xsd_datetime))); | ||
proofGraph.addQuads(await addKeyResource(keyData.public, proofGraphResource, keyResource)); | ||
proofGraph.addQuads(await addKeyResource(keyData.publicKey, proofGraphResource, keyResource)); | ||
return { proofGraph, proofGraphResource }; | ||
@@ -149,7 +148,7 @@ }; | ||
const { proofGraph, proofGraphResource } = await createProofOptionGraph(); | ||
const proofOptionHashValue = await calculateProofOptionsHash(proofGraph); | ||
// This is the extra trick in the cryptosuite specifications: the signature is upon the | ||
const proofOptionHashValue = await calculateProofOptionsHash(proofGraph, keyData.publicKey); | ||
// This is the extra trick in the cryptosuite specifications: the signature is on the | ||
// concatenation of the original dataset's hash and the hash of the proof option graph. | ||
/* @@@@@ */ debug.log(`Signing ${proofOptionHashValue} + ${hashValue}`); | ||
const signature = await (0, crypto_utils_1.sign)(report, proofOptionHashValue + hashValue, keyData.private); | ||
const signature = await (0, crypto_utils_1.sign)(report, proofOptionHashValue + hashValue, keyData.privateKey); | ||
// Close up... | ||
@@ -184,3 +183,3 @@ if (signature === null) { | ||
* @param report - placeholder for error reports | ||
* @param hash | ||
* @param dataset - the original dataset | ||
* @param proof - the proof graph | ||
@@ -190,6 +189,26 @@ * @param proofId - Id of the proof graph, if known; used in the error reports only | ||
*/ | ||
async function verifyAProofGraph(report, hash, proof, proofId) { | ||
async function verifyAProofGraph(report, dataset, proof, proofId) { | ||
const localErrors = []; | ||
const localWarnings = []; | ||
const getProofValue = (store) => { | ||
// Check the "proofPurpose" property value; raise errors if it is problematic | ||
{ | ||
const purposes = proof.getQuads(null, exports.sec_proofPurpose, null, null); | ||
if (purposes.length === 0) { | ||
localErrors.push(new types.Invalid_Verification_Method("No proof purpose set")); | ||
} | ||
else { | ||
const wrongPurposes = []; | ||
for (const q of purposes) { | ||
if (!(q.object.equals(exports.sec_authenticationMethod) || q.object.equals(exports.sec_assertionMethod))) { | ||
wrongPurposes.push(`<${q.object.value}>`); | ||
} | ||
} | ||
if (wrongPurposes.length > 0) { | ||
localErrors.push(new types.Proof_Transformation_Error(`Invalid proof purpose value(s): ${wrongPurposes.join(", ")}`)); | ||
} | ||
} | ||
} | ||
; | ||
// Retrieve the proof value | ||
const proofValue = ((store) => { | ||
// Retrieve the signature value per spec: | ||
@@ -205,4 +224,5 @@ const proof_values = store.getQuads(null, exports.sec_proofValue, null, null); | ||
return proof_values[0].object.value; | ||
}; | ||
const getPublicKey = async (store) => { | ||
})(proof); | ||
// retrieve the public key from the graph | ||
const publicKey = await (async (store) => { | ||
// first see if the verificationMethod has been set properly | ||
@@ -217,3 +237,3 @@ const verificationMethod = store.getQuads(null, exports.sec_verificationMethod, null, null); | ||
} | ||
const publicKey = verificationMethod[0].object; | ||
const publicKeyRef = verificationMethod[0].object; | ||
// Check the creation/expiration/revocation dates, if any... | ||
@@ -227,13 +247,13 @@ const now = new Date(); | ||
} | ||
const expirationDates = store.getQuads(publicKey, exports.sec_expires, null, null); | ||
const expirationDates = store.getQuads(publicKeyRef, exports.sec_expires, null, null); | ||
for (const exp of expirationDates) { | ||
if ((new Date(exp.object.value)) < now) { | ||
localErrors.push(new types.Invalid_Verification_Method(`<${publicKey.value}> key expired on ${exp.object.value}`)); | ||
localErrors.push(new types.Invalid_Verification_Method(`<${publicKeyRef.value}> key expired on ${exp.object.value}`)); | ||
return null; | ||
} | ||
} | ||
const revocationDates = store.getQuads(publicKey, exports.sec_revoked, null, null); | ||
const revocationDates = store.getQuads(publicKeyRef, exports.sec_revoked, null, null); | ||
for (const exp of revocationDates) { | ||
if ((new Date(exp.object.value)) < now) { | ||
localErrors.push(new types.Invalid_Verification_Method(`<${publicKey.value}> key was revoked on ${exp.object.value}`)); | ||
localErrors.push(new types.Invalid_Verification_Method(`<${publicKeyRef.value}> key was revoked on ${exp.object.value}`)); | ||
return null; | ||
@@ -244,4 +264,4 @@ } | ||
// The key itself can be in JWK or in Multikey format | ||
const keys_jwk = store.getQuads(publicKey, exports.sec_publicKeyJwk, null, null); | ||
const keys_multikey = store.getQuads(publicKey, exports.sec_publicKeyMultibase, null, null); | ||
const keys_jwk = store.getQuads(publicKeyRef, exports.sec_publicKeyJwk, null, null); | ||
const keys_multikey = store.getQuads(publicKeyRef, exports.sec_publicKeyMultibase, null, null); | ||
// Both arrays cannot exist at the same time! | ||
@@ -260,4 +280,3 @@ if (keys_jwk.length > 0 && keys_multikey.length > 0) { | ||
try { | ||
const key = await (0, multikey_1.multikeyToKey)(keys_multikey[0].object.value); | ||
return crypto.subtle.exportKey('jwk', key); | ||
return await mkwc.multikeyToCrypto(keys_multikey[0].object.value); | ||
} | ||
@@ -277,6 +296,14 @@ catch (e) { | ||
try { | ||
return JSON.parse(keys_jwk[0].object.value); | ||
const jwk = JSON.parse(keys_jwk[0].object.value); | ||
try { | ||
return await (0, crypto_utils_1.jwkToCrypto)(jwk); | ||
} | ||
catch (e) { | ||
// This happens if there is a problem with the crypto import did not work out | ||
localWarnings.push(new types.Proof_Verification_Error(`JWK could not be imported into crypto: ${e.message}`)); | ||
return null; | ||
} | ||
} | ||
catch (e) { | ||
// This happens if there is a JSON parse error with the key... | ||
// This happens if there is a JSON parse error with the key | ||
localWarnings.push(new types.Proof_Verification_Error(`Parsing error for JWK: ${e.message}`)); | ||
@@ -290,25 +317,5 @@ return null; | ||
} | ||
}; | ||
// Check the "proofPurpose" property value | ||
const checkProofPurposes = (store) => { | ||
const purposes = store.getQuads(null, exports.sec_proofPurpose, null, null); | ||
if (purposes.length === 0) { | ||
localErrors.push(new types.Invalid_Verification_Method("No proof purpose set")); | ||
} | ||
else { | ||
const wrongPurposes = []; | ||
for (const q of purposes) { | ||
if (!(q.object.equals(exports.sec_authenticationMethod) || q.object.equals(exports.sec_assertionMethod))) { | ||
wrongPurposes.push(`<${q.object.value}>`); | ||
} | ||
} | ||
if (wrongPurposes.length > 0) { | ||
localErrors.push(new types.Proof_Transformation_Error(`Invalid proof purpose value(s): ${wrongPurposes.join(", ")}`)); | ||
} | ||
} | ||
}; | ||
// Retrieve necessary values with checks | ||
checkProofPurposes(proof); | ||
const publicKey = await getPublicKey(proof); | ||
const proofValue = getProofValue(proof); | ||
})(proof); | ||
// Calculate the dataset hash, that should be used for verification | ||
const hash = await (0, utils_1.calculateDatasetHash)(dataset, publicKey); | ||
// The final set of error/warning should be modified with the proof graph's ID, if applicable | ||
@@ -328,3 +335,3 @@ if (proofId !== undefined) { | ||
// First the proof option graph must be created and then hashed | ||
const proofOptionGraphHash = await calculateProofOptionsHash(proof); | ||
const proofOptionGraphHash = await calculateProofOptionsHash(proof, publicKey); | ||
/* @@@@@ */ debug.log(`Verifying ${proofOptionGraphHash} + ${hash}`); | ||
@@ -357,7 +364,7 @@ const check_results = await (0, crypto_utils_1.verify)(report, proofOptionGraphHash + hash, proofValue, publicKey); | ||
* @param report - placeholder for error reports | ||
* @param hash | ||
* @param dataset - the original dataset to be checked with | ||
* @param proofs | ||
* @returns | ||
*/ | ||
async function verifyProofGraphs(report, hash, proofs) { | ||
async function verifyProofGraphs(report, dataset, proofs) { | ||
const allErrors = []; | ||
@@ -368,3 +375,3 @@ // deno-lint-ignore require-await | ||
allErrors.push(singleReport); | ||
return verifyAProofGraph(singleReport, hash, pr.proofQuads, pr.proofGraph); | ||
return verifyAProofGraph(singleReport, dataset, pr.proofQuads, pr.proofGraph); | ||
}; | ||
@@ -371,0 +378,0 @@ const promises = proofs.map(singleVerification); |
@@ -17,6 +17,5 @@ /** | ||
} | ||
export interface KeyPair { | ||
public: JsonWebKey; | ||
private: JsonWebKey; | ||
} | ||
/** | ||
* Some additional meta data that can be stored with the keys, and then re-appear as part of the proof graphs | ||
*/ | ||
export interface KeyMetadata { | ||
@@ -26,5 +25,4 @@ controller?: string; | ||
revoked?: string; | ||
cryptosuite?: string; | ||
} | ||
export interface KeyData extends KeyMetadata, KeyPair { | ||
export interface KeyData extends KeyMetadata, CryptoKeyPair { | ||
} | ||
@@ -31,0 +29,0 @@ /***************************************************************************************** |
@@ -14,3 +14,2 @@ /** | ||
import * as n3 from 'n3'; | ||
import { KeyMetadata } from './types'; | ||
/*************************************************************************************** | ||
@@ -130,3 +129,3 @@ * Namespace handling | ||
/** | ||
* Type guard to check if an object implements the KeyPair interface. | ||
* Type guard to check if an object implements the CryptoKeyPair interface. | ||
* | ||
@@ -136,10 +135,16 @@ * @param obj | ||
*/ | ||
export declare function isKeyData(obj: any): obj is KeyMetadata; | ||
export declare function isKeyData(obj: any): obj is CryptoKeyPair; | ||
/** | ||
* Calculate the canonical hash of a dataset using the implementation of RDFC 1.0. | ||
* | ||
* Note that the hash calculation's detail depend on the crypto key being used. | ||
* If the key belongs to an ECDSA key, and the corresponding curve is P-384, then | ||
* SHA-384 must be used by the algorithm. Hence the presence of the second | ||
* argument in the call. | ||
* | ||
* @param dataset | ||
* @param key - to decide whether SHA-384 should be used instead of the (default) SHA-256 | ||
* @returns | ||
*/ | ||
export declare function calculateDatasetHash(dataset: rdf.DatasetCore): Promise<string>; | ||
export declare function calculateDatasetHash(dataset: rdf.DatasetCore, key?: CryptoKey): Promise<string>; | ||
/** | ||
@@ -146,0 +151,0 @@ * Create and store the values in a dataset in a new n3 Store. This may be |
@@ -193,3 +193,3 @@ "use strict"; | ||
/** | ||
* Type guard to check if an object implements the KeyPair interface. | ||
* Type guard to check if an object implements the CryptoKeyPair interface. | ||
* | ||
@@ -201,3 +201,3 @@ * @param obj | ||
function isKeyData(obj) { | ||
return obj.public !== undefined && obj.private !== undefined; | ||
return obj.publicKey !== undefined && obj.privateKey !== undefined; | ||
} | ||
@@ -208,7 +208,17 @@ exports.isKeyData = isKeyData; | ||
* | ||
* Note that the hash calculation's detail depend on the crypto key being used. | ||
* If the key belongs to an ECDSA key, and the corresponding curve is P-384, then | ||
* SHA-384 must be used by the algorithm. Hence the presence of the second | ||
* argument in the call. | ||
* | ||
* @param dataset | ||
* @param key - to decide whether SHA-384 should be used instead of the (default) SHA-256 | ||
* @returns | ||
*/ | ||
async function calculateDatasetHash(dataset) { | ||
async function calculateDatasetHash(dataset, key) { | ||
const rdfc10 = new rdfjs_c14n_1.RDFC10(); | ||
// Per cryptosuite specification if ECDSA+P-384 is used, the whole world should use SHA-384... | ||
if (key.algorithm.name === "ECDSA" && key.algorithm?.namedCurve === "P-384") { | ||
rdfc10.hash_algorithm = "sha384"; | ||
} | ||
const canonical_quads = await rdfc10.canonicalize(dataset); | ||
@@ -215,0 +225,0 @@ const datasetHash = await rdfc10.hash(canonical_quads); |
{ | ||
"name": "rdfjs-di", | ||
"version": "0.0.96", | ||
"date": "2024-07-24", | ||
"version": "0.1.0", | ||
"date": "2024-08-24", | ||
"description": "Secure an RDF Dataset through VC's Data Integrity", | ||
@@ -10,2 +10,3 @@ "main": "dist/index.js", | ||
"crkey": "./node_modules/.bin/ts-node testing/run/createKeys.ts", | ||
"crypto": "./node_modules/.bin/ts-node testing/run/testCrypto.ts", | ||
"docs": "./node_modules/.bin/typedoc index.ts lib/*", | ||
@@ -34,3 +35,3 @@ "dist": "tsc -d" | ||
"engines": { | ||
"node": ">21.0.0" | ||
"node": ">=22.6.0" | ||
}, | ||
@@ -42,2 +43,3 @@ "dependencies": { | ||
"base64url": "^3.0.1", | ||
"multikey-webcrypto": "^0.5.0", | ||
"n3": "^1.17.2", | ||
@@ -44,0 +46,0 @@ "rdfjs-c14n": "^3.1.0", |
@@ -1,2 +0,2 @@ | ||
--- NOT PRODUCTION READY --- | ||
***NOT PRODUCTION READY!*** | ||
@@ -7,3 +7,3 @@ # Data Integrity algorithms for RDF Datasets — Proof of concepts implementation | ||
The DI specification is primarily aimed at [Verifiable Credentials](https://www.w3.org/TR/vc-data-model-2.0/) (i.e., JSON-LD based RDF Datasets to express credentials), but the approach is general enough for any kind of RDF Datasets. | ||
The DI specification is primarily aimed at [Verifiable Credentials](https://www.w3.org/TR/vc-data-model-2.0/) (i.e., JSON-LD based RDF Datasets to express credentials), but the approach is general enough for any kind of RDF Datasets. | ||
This implementation is an attempt to implement that. | ||
@@ -15,6 +15,3 @@ | ||
- In contrast with the DI specification, the Verification Method (ie, the public key) is expected to be be present in the input. In other words, the package does not retrieve the keys through a URL, it looks for the respective quads in the input dataset. | ||
- Although it implements the the [EdDSA](https://www.w3.org/TR/vc-di-eddsa/) and [ECDSA](https://www.w3.org/TR/vc-di-ecdsa/) cryptosuites, the Multikey encoding of the latter is not yet conform to the Multikey specification. | ||
The difference is that the Multikey encoding is done on the uncompressed crypto key as opposed to the compressed one, which is required by the specification. | ||
(I have not yet found a reliable package, that also works with TypeScript, to uncompress a compressed key.) | ||
- The management of proof chains is a bit restricted compared to the specification: proof chains and sets are not mixed. In other words, either all proofs are part of a chain or form a chain; the case when a previous proof reference points at a set of proofs has not been implemented. | ||
- The management of proof chains is a bit restricted compared to the specification: proof chains and sets are not mixed. In other words, either all proofs are part of a set or form a chain. The case when a previous proof reference points at a set of proofs has not been implemented. | ||
- It has not (yet) been cross-checked with other DI implementations and, in general, should be much more thoroughly tested. | ||
@@ -25,4 +22,3 @@ | ||
What the implementation proves, however, is that the | ||
_DI specification may indeed be used, with minor adjustment on the "anchor", to provide proofs for an RDF Dataset in the form of separate "Proof Graphs"_, i.e., RDF Graphs containing a signature and its metadata that can be separately verified. | ||
What the implementation proves, however, is that the _DI specification may indeed be used, with minor adjustment on the "anchor", to provide proofs for an RDF Dataset in the form of separate "Proof Graphs"_, i.e., RDF Graphs containing a signature and its metadata that can be separately verified. | ||
@@ -34,12 +30,14 @@ ## Some details | ||
1. The input RDF Dataset is canonicalized, using the [RDF Dataset Canonicalization](https://www.w3.org/TR/rdf-canon/). | ||
2. The resulting canonical N-Quads are sorted, and hashed to yield a canonical hash of the Dataset (the W3C specification relies on SHA-256 for hashing by default, which is used here). | ||
3. A "proof option graph" is created, which includes crypto keys and some metadata. The key is stored in [JWK](https://www.rfc-editor.org/rfc/rfc7517) or in Multikey formats: the former is used for RSA keys (for which no Multikey encoding has been specified) and the latter is used for ECDSA and EdDSA, as required by the respective cryptosuite specifications. This separate graph is also canonicalized, sorted, and hashed. | ||
4. The the two hash values are concatenated (in the order of the proof option graph and the original dataset), and signed using a secret key. The signature value is stored as a base64url value following the [Multibase](https://datatracker.ietf.org/doc/draft-multiformats-multibase) format, and its value is added to the proof option graph (turning it into a proof graph). | ||
2. The resulting canonical N-Quads are sorted, and hashed to yield a canonical hash of the Dataset. By default, the hash is done using SHA-256, except if the key is ECDSA with a P-384 curve (in which case SHA-384 is used). | ||
3. A "proof option graph" is created, which includes crypto keys and some metadata. The key is stored in [JWK](https://www.rfc-editor.org/rfc/rfc7517) or in [Multikey](https://www.w3.org/TR/controller-document/#multikey) formats: the former is used for RSA keys (for which no Multikey encoding has been specified) and the latter is used for ECDSA and EdDSA, as required by the respective cryptosuite specifications. This separate graph is also canonicalized, sorted, and hashed. | ||
4. The the two hash values are concatenated (in the order of the proof option graph and the original dataset), and signed using a secret key. The signature value is stored as a base64url value following the [Multibase](https://datatracker.ietf.org/doc/draft-multiformats-multibase) format, and its value is added to the proof option graph (turning it into a "proof graph"). | ||
An extra complication occurs for proof chains: the specification requires that the previous proof in the chain is also "signed over", i.e., the dataset is expanded to include, for the purpose of a signature, the previous proof graph in its entirety. | ||
The package has API entries to generate, and validate, such proof graphs. The API gives the possibility to use a set of keys, yielding a set of proof graphs, which can also be validated in one step. | ||
The package has separate API entries to generate, and validate, such proof graphs. It is also possible, following the DI spec, to provide "embedded" proofs, i.e., a new dataset, containing the original data, as well as the proof graph(s), each as a separate graph within an RDF dataset. If a separate "anchor" resource is provided, then this new dataset will also contain additional RDF triples connecting the anchor to the proof graphs. | ||
It is also possible, following the DI spec, to create "embedded" proofs, i.e., a new dataset, containing the original data, as well as the proof graph(s), each as a separate graph within an RDF dataset. If a separate "anchor" resource is provided, then this new dataset will also contain additional RDF triples connecting the anchor to the proof graphs. | ||
The crypto layer for the package relies on the Web Crypto API specification, and its implementation in `node.js` or `deno`. Accordingly, the following crypto algorithms are available for this implementation | ||
When embedding proof graphs, this can come in two flavors: [proof sets](https://www.w3.org/TR/vc-data-integrity/#proof-sets) and [proof chains](https://www.w3.org/TR/vc-data-integrity/#proof-chains). Semantically, a proof set is just a collection of proofs. A proof chain implies an order of proofs: the specification requires that the previous proof in the chain is also "signed over" by the current proof, i.e., the dataset is expanded to include, for the purpose of a signature, the previous proof graph in its entirety. The different behaviors is reflected in the API by the type of the crypto key collection: if it is a Typescript `Array`, it is considered to be a chain of keys (and of embedded proofs), and a set otherwise (e.g., if a Typescript `Set` is used) | ||
The crypto layer for the package relies on the Web Crypto API specification, and its implementation in `node.js` or `deno`. The following crypto algorithms are available: | ||
- EDDSA, a.k.a. Ed25519. It is not official in the WebCrypto specification, but implemented both in `node.js` and `deno`. See also the [EdDSA cryptosuite](https://www.w3.org/TR/vc-di-eddsa/) specification. | ||
@@ -51,7 +49,7 @@ - [ECDSA](https://w3c.github.io/webcrypto/#ecdsa). See also the [ECDSA cryptosuite](https://www.w3.org/TR/vc-di-ecdsa/) specification. | ||
Although not strictly necessary for this package, a separate method is available as part of the API to generate cryptography keys for one of these four algorithms. | ||
The first two algorithms are specified by cryptosuites, identified as `eddsa-rdfc-2022` and `ecdsa-rdfc-2019`, respectively. | ||
The other two are non-standard, and are identified with the temporary cryptosuite name of `rsa-pss-rdfc-ih` and `rsa-ssa-rdfc-ih`, respectively. | ||
Note that there is no Multikey encoding for RSA keys, so the keys are stored in JWK format as a literal with an `rdf:JSON` datatype. | ||
The first two algorithms are identified by cryptosuite names, namely `eddsa-rdfc-2022` and `ecdsa-rdfc-2019`, respectively. | ||
The other two are non-standard, and are identified with the temporary cryptosuite names of `rsa-pss-rdfc-ih` and `rsa-ssa-rdfc-ih`, respectively. | ||
Note that there are no Multikey encodings for RSA keys, so the keys are stored in the proof graphs in JWK format as a literal with an `rdf:JSON` datatype. | ||
The user facing APIs use the JWK encoding of the keys only. This makes it easier for the user; Web Crypto provides JWK export "out of the box", and it becomes more complicated for Multikey. This may be changed in future. | ||
The user facing APIs accept WebCrypto CryptoKeyPair instances. To facilitate the application developer, the interface exports a function to generate keys that are usable with this package (see [_generateKey_](https://iherman.github.io/rdfjs-di/functions/lib_crypto_utils.generateKey.html)) and there is also an extra function to convert JWK instances into CryptoKeys (see [_jwkToCrypto_](https://iherman.github.io/rdfjs-di/functions/lib_crypto_utils.jwkToCrypto.html)). (Converting from WebCrypto to JWK can be done directly with the [_crypto.subtle.exportKey_](https://devdocs.io/node/webcrypto#subtleexportkeyformat-key) function). Note also the companion package [_multikey-webcrypto_](https://github.com/iherman/multikey-webcrypto) that performs conversions between Multikey and JWK, respectively WebCrypto. | ||
@@ -69,7 +67,15 @@ For more details, see: | ||
import * as rdf from '@rdfjs/types'; | ||
import { KeyData, generateProofGraph, VerificationResult } from 'rdfjs-di'; | ||
import { | ||
KeyData, | ||
generateProofGraph, verifyProofGraph, | ||
embedProofGraph, verifyEmbeddedProofGraph, | ||
VerificationResult, | ||
generateKey | ||
} from 'rdfjs-di'; | ||
const dataset: rdf.DatasetCore = generateYourDataset(); | ||
const keyPair: KeyData = generateYourWebCryptoKeyPair(); | ||
// Generate an ECDSA keyPair, with the P-256 curve | ||
const keyPair: KeyData = await generateKey(Cryptosuites.ecdsa); | ||
// 'proof' is a separate RDF graph with the keys, metadata, and the signature | ||
@@ -76,0 +82,0 @@ const proof: rdf.DatasetCore = await generateProofGraph(dataset, keyPair) |
108283
2.86%2309
1.45%98
6.52%8
14.29%+ Added
+ Added
+ Added
+ Added
+ Added