New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@toruslabs/torus.js

Package Overview
Dependencies
Maintainers
0
Versions
138
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@toruslabs/torus.js - npm Package Compare versions

Comparing version 15.1.1 to 15.2.0-alpha.0

dist/lib.cjs/helpers/passkeyConnectorUtils.js

7

dist/lib.cjs/constants.js

@@ -8,3 +8,8 @@ 'use strict';

IMPORT_SHARES: "ImportShares",
GET_SHARE_OR_KEY_ASSIGN: "GetShareOrKeyAssign"
GET_SHARE_OR_KEY_ASSIGN: "GetShareOrKeyAssign",
RETRIEVE_SHARES_WITH_LINKED_PASSKEY: "RetrieveSharesWithLinkedPasskey",
GENERATE_AUTH_MESSAGE: "GenerateAuthMessage",
LINK_PASSKEY: "LinkPasskey",
UNLINK_PASSKEY: "UnlinkPasskey",
GET_LINKED_PASSKEYS: "GetLinkedPasskeys"
};

@@ -11,0 +16,0 @@ const SAPPHIRE_METADATA_URL = "https://node-1.node.web3auth.io/metadata";

342

dist/lib.cjs/helpers/nodeUtils.js

@@ -188,3 +188,3 @@ 'use strict';

extended_verifier_id: verifierParams.extended_verifier_id,
is_import_key_flow: true
is_import_key_flow: finalImportedShares.length > 0
}), {}, {

@@ -270,165 +270,17 @@ logTracingHeader: config.config.logRequestTracing

};
async function retrieveOrImportShare(params) {
async function processShareResponse(params, promiseArrRequest) {
const {
legacyMetadataHost,
serverTimeOffset,
sessionAuthKey,
enableOneKey,
ecCurve,
keyType,
allowHost,
network,
clientId,
verifierParams,
verifier,
endpoints,
nodePubkeys,
indexes,
verifier,
verifierParams,
idToken,
overrideExistingKey,
newImportedShares,
extraParams,
useDkg = true,
serverTimeOffset,
checkCommitment = true
isImportedShares
} = params;
await httpHelpers.get(allowHost, {
headers: {
verifier,
verifierid: verifierParams.verifier_id,
network,
clientid: clientId,
enablegating: "true"
}
}, {
useAPIKey: true
});
// generate temporary private and public key that is used to secure receive shares
const sessionAuthKey = eccrypto.generatePrivate();
const pubKey = eccrypto.getPublic(sessionAuthKey).toString("hex");
const sessionPubX = pubKey.slice(2, 66);
const sessionPubY = pubKey.slice(66);
let finalImportedShares = [];
const halfThreshold = ~~(endpoints.length / 2) + 1;
if ((newImportedShares === null || newImportedShares === void 0 ? void 0 : newImportedShares.length) > 0) {
if (newImportedShares.length !== endpoints.length) {
throw new Error("Invalid imported shares length");
}
finalImportedShares = newImportedShares;
} else if (!useDkg) {
const bufferKey = keyType === constants$1.KEY_TYPE.SECP256K1 ? common.generatePrivateKey(ecCurve, Buffer) : await random.getRandomBytes(32);
const generatedShares = await keyUtils.generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey));
finalImportedShares = [...finalImportedShares, ...generatedShares];
}
let commitmentRequestResult = [];
let isExistingKey;
const nodeSigs = [];
if (checkCommitment) {
commitmentRequestResult = await commitmentRequest({
idToken,
endpoints,
indexes,
keyType,
verifier,
verifierParams,
pubKeyX: sessionPubX,
pubKeyY: sessionPubY,
finalImportedShares,
overrideExistingKey
});
for (let i = 0; i < commitmentRequestResult.length; i += 1) {
const x = commitmentRequestResult[i];
if (!x || typeof x !== "object" || x.error) {
continue;
}
if (x) nodeSigs.push(x.result);
}
// if user's account already
isExistingKey = !!common.thresholdSame(nodeSigs.map(x => x && x.pub_key_x), halfThreshold);
} else if (!checkCommitment && finalImportedShares.length > 0) {
// in case not allowed to override existing key for import request
// check if key exists
if (!overrideExistingKey) {
var _keyLookupResult$erro, _keyLookupResult$keyR;
const keyLookupResult = await VerifierLookupRequest({
endpoints,
verifier,
verifierId: verifierParams.verifier_id,
keyType
});
if (keyLookupResult.errorResult && !((_keyLookupResult$erro = keyLookupResult.errorResult) !== null && _keyLookupResult$erro !== void 0 && (_keyLookupResult$erro = _keyLookupResult$erro.data) !== null && _keyLookupResult$erro !== void 0 && _keyLookupResult$erro.includes("Verifier + VerifierID has not yet been assigned"))) {
throw new Error(`node results do not match at first lookup ${JSON.stringify(keyLookupResult.keyResult || {})}, ${JSON.stringify(keyLookupResult.errorResult || {})}`);
}
if (((_keyLookupResult$keyR = keyLookupResult.keyResult) === null || _keyLookupResult$keyR === void 0 || (_keyLookupResult$keyR = _keyLookupResult$keyR.keys) === null || _keyLookupResult$keyR === void 0 ? void 0 : _keyLookupResult$keyR.length) > 0) {
isExistingKey = !!keyLookupResult.keyResult.keys[0];
}
}
}
const promiseArrRequest = [];
const canImportedShares = overrideExistingKey || !useDkg && !isExistingKey;
if (canImportedShares) {
const proxyEndpointNum = common.getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id);
const items = [];
for (let i = 0; i < endpoints.length; i += 1) {
const importedShare = finalImportedShares[i];
if (!importedShare) {
throw new Error(`invalid imported share at index ${i}`);
}
items.push(_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
nodesignatures: nodeSigs,
verifieridentifier: verifier,
pub_key_x: importedShare.oauth_pub_key_x,
pub_key_y: importedShare.oauth_pub_key_y,
signing_pub_key_x: importedShare.signing_pub_key_x,
signing_pub_key_y: importedShare.signing_pub_key_y,
encrypted_share: importedShare.encrypted_share,
encrypted_share_metadata: importedShare.encrypted_share_metadata,
node_index: importedShare.node_index,
key_type: importedShare.key_type,
nonce_data: importedShare.nonce_data,
nonce_signature: importedShare.nonce_signature,
sss_endpoint: endpoints[i]
}, extraParams));
}
const p = httpHelpers.post(endpoints[proxyEndpointNum], httpHelpers.generateJsonRPCObject(constants.JRPC_METHODS.IMPORT_SHARES, {
encrypted: "yes",
use_temp: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: items,
key_type: keyType,
one_key_flow: true
}), {}, {
logTracingHeader: config.config.logRequestTracing
}).catch(err => loglevel.error("share req", err));
promiseArrRequest.push(p);
} else {
for (let i = 0; i < endpoints.length; i += 1) {
const p = httpHelpers.post(endpoints[i], httpHelpers.generateJsonRPCObject(constants.JRPC_METHODS.GET_SHARE_OR_KEY_ASSIGN, {
encrypted: "yes",
use_temp: true,
key_type: keyType,
distributed_metadata: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: [_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
key_type: keyType,
nodesignatures: nodeSigs,
verifieridentifier: verifier
}, extraParams)],
client_time: Math.floor(Date.now() / 1000).toString(),
one_key_flow: true
}), {}, {
logTracingHeader: config.config.logRequestTracing
});
promiseArrRequest.push(p);
}
}
return some.Some(promiseArrRequest, async (shareResponseResult, sharedState) => {

@@ -483,3 +335,3 @@ let thresholdNonceData;

});
const thresholdReqCount = canImportedShares ? endpoints.length : halfThreshold;
const thresholdReqCount = isImportedShares ? endpoints.length : halfThreshold;
// optimistically run lagrange interpolation once threshold number of shares have been received

@@ -809,5 +661,183 @@ // this is matched against the user public key to ensure that shares are consistent

}
async function retrieveOrImportShare(params) {
const {
legacyMetadataHost,
enableOneKey,
ecCurve,
keyType,
allowHost,
network,
clientId,
endpoints,
nodePubkeys,
indexes,
verifier,
verifierParams,
idToken,
overrideExistingKey,
newImportedShares,
extraParams,
useDkg = true,
serverTimeOffset,
checkCommitment = true
} = params;
await httpHelpers.get(allowHost, {
headers: {
verifier,
verifierid: verifierParams.verifier_id,
network,
clientid: clientId,
enablegating: "true"
}
}, {
useAPIKey: true
});
// generate temporary private and public key that is used to secure receive shares
const sessionAuthKey = eccrypto.generatePrivate();
const pubKey = eccrypto.getPublic(sessionAuthKey).toString("hex");
const sessionPubX = pubKey.slice(2, 66);
const sessionPubY = pubKey.slice(66);
let finalImportedShares = [];
const halfThreshold = ~~(endpoints.length / 2) + 1;
if ((newImportedShares === null || newImportedShares === void 0 ? void 0 : newImportedShares.length) > 0) {
if (newImportedShares.length !== endpoints.length) {
throw new Error("Invalid imported shares length");
}
finalImportedShares = newImportedShares;
} else if (!useDkg) {
const bufferKey = keyType === constants$1.KEY_TYPE.SECP256K1 ? common.generatePrivateKey(ecCurve, Buffer) : await random.getRandomBytes(32);
const generatedShares = await keyUtils.generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey));
finalImportedShares = [...finalImportedShares, ...generatedShares];
}
let commitmentRequestResult = [];
let isExistingKey;
const nodeSigs = [];
if (checkCommitment) {
commitmentRequestResult = await commitmentRequest({
idToken,
endpoints,
indexes,
keyType,
verifier,
verifierParams,
pubKeyX: sessionPubX,
pubKeyY: sessionPubY,
finalImportedShares,
overrideExistingKey
});
for (let i = 0; i < commitmentRequestResult.length; i += 1) {
const x = commitmentRequestResult[i];
if (!x || typeof x !== "object" || x.error) {
continue;
}
if (x) nodeSigs.push(x.result);
}
// if user's account already
isExistingKey = !!common.thresholdSame(nodeSigs.map(x => x && x.pub_key_x), halfThreshold);
} else if (!checkCommitment && finalImportedShares.length > 0) {
// in case not allowed to override existing key for import request
// check if key exists
if (!overrideExistingKey) {
var _keyLookupResult$erro, _keyLookupResult$keyR;
const keyLookupResult = await VerifierLookupRequest({
endpoints,
verifier,
verifierId: verifierParams.verifier_id,
keyType
});
if (keyLookupResult.errorResult && !((_keyLookupResult$erro = keyLookupResult.errorResult) !== null && _keyLookupResult$erro !== void 0 && (_keyLookupResult$erro = _keyLookupResult$erro.data) !== null && _keyLookupResult$erro !== void 0 && _keyLookupResult$erro.includes("Verifier + VerifierID has not yet been assigned"))) {
throw new Error(`node results do not match at first lookup ${JSON.stringify(keyLookupResult.keyResult || {})}, ${JSON.stringify(keyLookupResult.errorResult || {})}`);
}
if (((_keyLookupResult$keyR = keyLookupResult.keyResult) === null || _keyLookupResult$keyR === void 0 || (_keyLookupResult$keyR = _keyLookupResult$keyR.keys) === null || _keyLookupResult$keyR === void 0 ? void 0 : _keyLookupResult$keyR.length) > 0) {
isExistingKey = !!keyLookupResult.keyResult.keys[0];
}
}
}
const promiseArrRequest = [];
const canImportedShares = overrideExistingKey || !useDkg && !isExistingKey;
if (canImportedShares) {
const proxyEndpointNum = common.getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id);
const items = [];
for (let i = 0; i < endpoints.length; i += 1) {
const importedShare = finalImportedShares[i];
if (!importedShare) {
throw new Error(`invalid imported share at index ${i}`);
}
items.push(_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
nodesignatures: nodeSigs,
verifieridentifier: verifier,
pub_key_x: importedShare.oauth_pub_key_x,
pub_key_y: importedShare.oauth_pub_key_y,
signing_pub_key_x: importedShare.signing_pub_key_x,
signing_pub_key_y: importedShare.signing_pub_key_y,
encrypted_share: importedShare.encrypted_share,
encrypted_share_metadata: importedShare.encrypted_share_metadata,
node_index: importedShare.node_index,
key_type: importedShare.key_type,
nonce_data: importedShare.nonce_data,
nonce_signature: importedShare.nonce_signature,
sss_endpoint: endpoints[i]
}, extraParams));
}
const p = httpHelpers.post(endpoints[proxyEndpointNum], httpHelpers.generateJsonRPCObject(constants.JRPC_METHODS.IMPORT_SHARES, {
encrypted: "yes",
use_temp: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: items,
key_type: keyType,
one_key_flow: true
}), {}, {
logTracingHeader: config.config.logRequestTracing
}).catch(err => loglevel.error("share req", err));
promiseArrRequest.push(p);
} else {
for (let i = 0; i < endpoints.length; i += 1) {
const p = httpHelpers.post(endpoints[i], httpHelpers.generateJsonRPCObject(constants.JRPC_METHODS.GET_SHARE_OR_KEY_ASSIGN, {
encrypted: "yes",
use_temp: true,
key_type: keyType,
distributed_metadata: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: [_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
key_type: keyType,
nodesignatures: nodeSigs,
verifieridentifier: verifier
}, extraParams)],
client_time: Math.floor(Date.now() / 1000).toString(),
one_key_flow: true
}), {}, {
logTracingHeader: config.config.logRequestTracing
});
promiseArrRequest.push(p);
}
}
return processShareResponse({
legacyMetadataHost,
serverTimeOffset,
sessionAuthKey,
enableOneKey,
ecCurve,
keyType,
network,
verifier,
verifierParams,
endpoints,
isImportedShares: canImportedShares
}, promiseArrRequest);
}
exports.GetPubKeyOrKeyAssign = GetPubKeyOrKeyAssign;
exports.VerifierLookupRequest = VerifierLookupRequest;
exports.processShareResponse = processShareResponse;
exports.retrieveOrImportShare = retrieveOrImportShare;

@@ -14,2 +14,3 @@ 'use strict';

var nodeUtils = require('./helpers/nodeUtils.js');
var passkeyConnectorUtils = require('./helpers/passkeyConnectorUtils.js');
var tssPubKeyUtils = require('./helpers/tssPubKeyUtils.js');

@@ -68,3 +69,9 @@

exports.VerifierLookupRequest = nodeUtils.VerifierLookupRequest;
exports.processShareResponse = nodeUtils.processShareResponse;
exports.retrieveOrImportShare = nodeUtils.retrieveOrImportShare;
exports._linkedPasskeyRetrieveShares = passkeyConnectorUtils._linkedPasskeyRetrieveShares;
exports.getAuthMessageFromNodes = passkeyConnectorUtils.getAuthMessageFromNodes;
exports.linkPasskey = passkeyConnectorUtils.linkPasskey;
exports.listLinkedPasskey = passkeyConnectorUtils.listLinkedPasskey;
exports.unlinkPasskey = passkeyConnectorUtils.unlinkPasskey;
exports.GetOrSetTssDKGPubKey = tssPubKeyUtils.GetOrSetTssDKGPubKey;

@@ -12,2 +12,3 @@ 'use strict';

var nodeUtils = require('./helpers/nodeUtils.js');
var passkeyConnectorUtils = require('./helpers/passkeyConnectorUtils.js');
var keyUtils = require('./helpers/keyUtils.js');

@@ -137,2 +138,41 @@ var metadataUtils = require('./helpers/metadataUtils.js');

}
async retrieveSharesWithLinkedPasskey(params) {
const {
passkeyPublicKey,
idToken,
nodePubkeys,
indexes,
endpoints,
extraParams = {},
passkeyVerifierID
} = params;
if (nodePubkeys.length === 0) {
throw new Error("nodePubkeys param is required");
}
if (nodePubkeys.length !== indexes.length) {
throw new Error("nodePubkeys length must be same as indexes length");
}
if (nodePubkeys.length !== endpoints.length) {
throw new Error("nodePubkeys length must be same as endpoints length");
}
if (constants.LEGACY_NETWORKS_ROUTE_MAP[this.network]) {
throw new Error(`retrieveSharesWithLinkedPasskey is not supported by legacy network; ${this.network}`);
}
return passkeyConnectorUtils._linkedPasskeyRetrieveShares({
serverTimeOffset: this.serverTimeOffset,
ecCurve: this.ec,
keyType: this.keyType,
allowHost: this.allowHost,
network: this.network,
clientId: this.clientId,
endpoints,
indexes,
nodePubkeys,
idToken,
passkeyPublicKey,
passkeyVerifierID,
extraParams,
sessionExpSecond: Torus.sessionTime
});
}
async getPublicAddress(endpoints, torusNodePubs, {

@@ -139,0 +179,0 @@ verifier,

@@ -6,3 +6,8 @@ const JRPC_METHODS = {

IMPORT_SHARES: "ImportShares",
GET_SHARE_OR_KEY_ASSIGN: "GetShareOrKeyAssign"
GET_SHARE_OR_KEY_ASSIGN: "GetShareOrKeyAssign",
RETRIEVE_SHARES_WITH_LINKED_PASSKEY: "RetrieveSharesWithLinkedPasskey",
GENERATE_AUTH_MESSAGE: "GenerateAuthMessage",
LINK_PASSKEY: "LinkPasskey",
UNLINK_PASSKEY: "UnlinkPasskey",
GET_LINKED_PASSKEYS: "GetLinkedPasskeys"
};

@@ -9,0 +14,0 @@ const SAPPHIRE_METADATA_URL = "https://node-1.node.web3auth.io/metadata";

import _objectSpread from '@babel/runtime/helpers/objectSpread2';
import { KEY_TYPE, LEGACY_NETWORKS_ROUTE_MAP } from '@toruslabs/constants';
import { LEGACY_NETWORKS_ROUTE_MAP, KEY_TYPE } from '@toruslabs/constants';
import { generatePrivate, getPublic } from '@toruslabs/eccrypto';

@@ -12,3 +12,3 @@ import { post, generateJsonRPCObject, get } from '@toruslabs/http-helpers';

import { generatePrivateKey, thresholdSame, getProxyCoordinatorEndpointIndex, normalizeKeysResult, calculateMedian, normalizeLookUpResult, keccak256, retryCommitment, kCombinations } from './common.js';
import { generateShares, derivePubKey, generateAddressFromPrivKey, generateAddressFromPubKey } from './keyUtils.js';
import { derivePubKey, generateAddressFromPrivKey, generateAddressFromPubKey, generateShares } from './keyUtils.js';
import { lagrangeInterpolation } from './langrangeInterpolatePoly.js';

@@ -187,3 +187,3 @@ import { getOrSetSapphireMetadataNonce, getOrSetNonce, getMetadata, decryptSeedData, getSecpKeyFromEd25519, decryptNodeData, decryptNodeDataWithPadding } from './metadataUtils.js';

extended_verifier_id: verifierParams.extended_verifier_id,
is_import_key_flow: true
is_import_key_flow: finalImportedShares.length > 0
}), {}, {

@@ -269,165 +269,17 @@ logTracingHeader: config.logRequestTracing

};
async function retrieveOrImportShare(params) {
async function processShareResponse(params, promiseArrRequest) {
const {
legacyMetadataHost,
serverTimeOffset,
sessionAuthKey,
enableOneKey,
ecCurve,
keyType,
allowHost,
network,
clientId,
verifierParams,
verifier,
endpoints,
nodePubkeys,
indexes,
verifier,
verifierParams,
idToken,
overrideExistingKey,
newImportedShares,
extraParams,
useDkg = true,
serverTimeOffset,
checkCommitment = true
isImportedShares
} = params;
await get(allowHost, {
headers: {
verifier,
verifierid: verifierParams.verifier_id,
network,
clientid: clientId,
enablegating: "true"
}
}, {
useAPIKey: true
});
// generate temporary private and public key that is used to secure receive shares
const sessionAuthKey = generatePrivate();
const pubKey = getPublic(sessionAuthKey).toString("hex");
const sessionPubX = pubKey.slice(2, 66);
const sessionPubY = pubKey.slice(66);
let finalImportedShares = [];
const halfThreshold = ~~(endpoints.length / 2) + 1;
if ((newImportedShares === null || newImportedShares === void 0 ? void 0 : newImportedShares.length) > 0) {
if (newImportedShares.length !== endpoints.length) {
throw new Error("Invalid imported shares length");
}
finalImportedShares = newImportedShares;
} else if (!useDkg) {
const bufferKey = keyType === KEY_TYPE.SECP256K1 ? generatePrivateKey(ecCurve, Buffer) : await getRandomBytes(32);
const generatedShares = await generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey));
finalImportedShares = [...finalImportedShares, ...generatedShares];
}
let commitmentRequestResult = [];
let isExistingKey;
const nodeSigs = [];
if (checkCommitment) {
commitmentRequestResult = await commitmentRequest({
idToken,
endpoints,
indexes,
keyType,
verifier,
verifierParams,
pubKeyX: sessionPubX,
pubKeyY: sessionPubY,
finalImportedShares,
overrideExistingKey
});
for (let i = 0; i < commitmentRequestResult.length; i += 1) {
const x = commitmentRequestResult[i];
if (!x || typeof x !== "object" || x.error) {
continue;
}
if (x) nodeSigs.push(x.result);
}
// if user's account already
isExistingKey = !!thresholdSame(nodeSigs.map(x => x && x.pub_key_x), halfThreshold);
} else if (!checkCommitment && finalImportedShares.length > 0) {
// in case not allowed to override existing key for import request
// check if key exists
if (!overrideExistingKey) {
var _keyLookupResult$erro, _keyLookupResult$keyR;
const keyLookupResult = await VerifierLookupRequest({
endpoints,
verifier,
verifierId: verifierParams.verifier_id,
keyType
});
if (keyLookupResult.errorResult && !((_keyLookupResult$erro = keyLookupResult.errorResult) !== null && _keyLookupResult$erro !== void 0 && (_keyLookupResult$erro = _keyLookupResult$erro.data) !== null && _keyLookupResult$erro !== void 0 && _keyLookupResult$erro.includes("Verifier + VerifierID has not yet been assigned"))) {
throw new Error(`node results do not match at first lookup ${JSON.stringify(keyLookupResult.keyResult || {})}, ${JSON.stringify(keyLookupResult.errorResult || {})}`);
}
if (((_keyLookupResult$keyR = keyLookupResult.keyResult) === null || _keyLookupResult$keyR === void 0 || (_keyLookupResult$keyR = _keyLookupResult$keyR.keys) === null || _keyLookupResult$keyR === void 0 ? void 0 : _keyLookupResult$keyR.length) > 0) {
isExistingKey = !!keyLookupResult.keyResult.keys[0];
}
}
}
const promiseArrRequest = [];
const canImportedShares = overrideExistingKey || !useDkg && !isExistingKey;
if (canImportedShares) {
const proxyEndpointNum = getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id);
const items = [];
for (let i = 0; i < endpoints.length; i += 1) {
const importedShare = finalImportedShares[i];
if (!importedShare) {
throw new Error(`invalid imported share at index ${i}`);
}
items.push(_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
nodesignatures: nodeSigs,
verifieridentifier: verifier,
pub_key_x: importedShare.oauth_pub_key_x,
pub_key_y: importedShare.oauth_pub_key_y,
signing_pub_key_x: importedShare.signing_pub_key_x,
signing_pub_key_y: importedShare.signing_pub_key_y,
encrypted_share: importedShare.encrypted_share,
encrypted_share_metadata: importedShare.encrypted_share_metadata,
node_index: importedShare.node_index,
key_type: importedShare.key_type,
nonce_data: importedShare.nonce_data,
nonce_signature: importedShare.nonce_signature,
sss_endpoint: endpoints[i]
}, extraParams));
}
const p = post(endpoints[proxyEndpointNum], generateJsonRPCObject(JRPC_METHODS.IMPORT_SHARES, {
encrypted: "yes",
use_temp: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: items,
key_type: keyType,
one_key_flow: true
}), {}, {
logTracingHeader: config.logRequestTracing
}).catch(err => log.error("share req", err));
promiseArrRequest.push(p);
} else {
for (let i = 0; i < endpoints.length; i += 1) {
const p = post(endpoints[i], generateJsonRPCObject(JRPC_METHODS.GET_SHARE_OR_KEY_ASSIGN, {
encrypted: "yes",
use_temp: true,
key_type: keyType,
distributed_metadata: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: [_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
key_type: keyType,
nodesignatures: nodeSigs,
verifieridentifier: verifier
}, extraParams)],
client_time: Math.floor(Date.now() / 1000).toString(),
one_key_flow: true
}), {}, {
logTracingHeader: config.logRequestTracing
});
promiseArrRequest.push(p);
}
}
return Some(promiseArrRequest, async (shareResponseResult, sharedState) => {

@@ -482,3 +334,3 @@ let thresholdNonceData;

});
const thresholdReqCount = canImportedShares ? endpoints.length : halfThreshold;
const thresholdReqCount = isImportedShares ? endpoints.length : halfThreshold;
// optimistically run lagrange interpolation once threshold number of shares have been received

@@ -808,3 +660,180 @@ // this is matched against the user public key to ensure that shares are consistent

}
async function retrieveOrImportShare(params) {
const {
legacyMetadataHost,
enableOneKey,
ecCurve,
keyType,
allowHost,
network,
clientId,
endpoints,
nodePubkeys,
indexes,
verifier,
verifierParams,
idToken,
overrideExistingKey,
newImportedShares,
extraParams,
useDkg = true,
serverTimeOffset,
checkCommitment = true
} = params;
await get(allowHost, {
headers: {
verifier,
verifierid: verifierParams.verifier_id,
network,
clientid: clientId,
enablegating: "true"
}
}, {
useAPIKey: true
});
export { GetPubKeyOrKeyAssign, VerifierLookupRequest, retrieveOrImportShare };
// generate temporary private and public key that is used to secure receive shares
const sessionAuthKey = generatePrivate();
const pubKey = getPublic(sessionAuthKey).toString("hex");
const sessionPubX = pubKey.slice(2, 66);
const sessionPubY = pubKey.slice(66);
let finalImportedShares = [];
const halfThreshold = ~~(endpoints.length / 2) + 1;
if ((newImportedShares === null || newImportedShares === void 0 ? void 0 : newImportedShares.length) > 0) {
if (newImportedShares.length !== endpoints.length) {
throw new Error("Invalid imported shares length");
}
finalImportedShares = newImportedShares;
} else if (!useDkg) {
const bufferKey = keyType === KEY_TYPE.SECP256K1 ? generatePrivateKey(ecCurve, Buffer) : await getRandomBytes(32);
const generatedShares = await generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey));
finalImportedShares = [...finalImportedShares, ...generatedShares];
}
let commitmentRequestResult = [];
let isExistingKey;
const nodeSigs = [];
if (checkCommitment) {
commitmentRequestResult = await commitmentRequest({
idToken,
endpoints,
indexes,
keyType,
verifier,
verifierParams,
pubKeyX: sessionPubX,
pubKeyY: sessionPubY,
finalImportedShares,
overrideExistingKey
});
for (let i = 0; i < commitmentRequestResult.length; i += 1) {
const x = commitmentRequestResult[i];
if (!x || typeof x !== "object" || x.error) {
continue;
}
if (x) nodeSigs.push(x.result);
}
// if user's account already
isExistingKey = !!thresholdSame(nodeSigs.map(x => x && x.pub_key_x), halfThreshold);
} else if (!checkCommitment && finalImportedShares.length > 0) {
// in case not allowed to override existing key for import request
// check if key exists
if (!overrideExistingKey) {
var _keyLookupResult$erro, _keyLookupResult$keyR;
const keyLookupResult = await VerifierLookupRequest({
endpoints,
verifier,
verifierId: verifierParams.verifier_id,
keyType
});
if (keyLookupResult.errorResult && !((_keyLookupResult$erro = keyLookupResult.errorResult) !== null && _keyLookupResult$erro !== void 0 && (_keyLookupResult$erro = _keyLookupResult$erro.data) !== null && _keyLookupResult$erro !== void 0 && _keyLookupResult$erro.includes("Verifier + VerifierID has not yet been assigned"))) {
throw new Error(`node results do not match at first lookup ${JSON.stringify(keyLookupResult.keyResult || {})}, ${JSON.stringify(keyLookupResult.errorResult || {})}`);
}
if (((_keyLookupResult$keyR = keyLookupResult.keyResult) === null || _keyLookupResult$keyR === void 0 || (_keyLookupResult$keyR = _keyLookupResult$keyR.keys) === null || _keyLookupResult$keyR === void 0 ? void 0 : _keyLookupResult$keyR.length) > 0) {
isExistingKey = !!keyLookupResult.keyResult.keys[0];
}
}
}
const promiseArrRequest = [];
const canImportedShares = overrideExistingKey || !useDkg && !isExistingKey;
if (canImportedShares) {
const proxyEndpointNum = getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id);
const items = [];
for (let i = 0; i < endpoints.length; i += 1) {
const importedShare = finalImportedShares[i];
if (!importedShare) {
throw new Error(`invalid imported share at index ${i}`);
}
items.push(_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
nodesignatures: nodeSigs,
verifieridentifier: verifier,
pub_key_x: importedShare.oauth_pub_key_x,
pub_key_y: importedShare.oauth_pub_key_y,
signing_pub_key_x: importedShare.signing_pub_key_x,
signing_pub_key_y: importedShare.signing_pub_key_y,
encrypted_share: importedShare.encrypted_share,
encrypted_share_metadata: importedShare.encrypted_share_metadata,
node_index: importedShare.node_index,
key_type: importedShare.key_type,
nonce_data: importedShare.nonce_data,
nonce_signature: importedShare.nonce_signature,
sss_endpoint: endpoints[i]
}, extraParams));
}
const p = post(endpoints[proxyEndpointNum], generateJsonRPCObject(JRPC_METHODS.IMPORT_SHARES, {
encrypted: "yes",
use_temp: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: items,
key_type: keyType,
one_key_flow: true
}), {}, {
logTracingHeader: config.logRequestTracing
}).catch(err => log.error("share req", err));
promiseArrRequest.push(p);
} else {
for (let i = 0; i < endpoints.length; i += 1) {
const p = post(endpoints[i], generateJsonRPCObject(JRPC_METHODS.GET_SHARE_OR_KEY_ASSIGN, {
encrypted: "yes",
use_temp: true,
key_type: keyType,
distributed_metadata: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: [_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
key_type: keyType,
nodesignatures: nodeSigs,
verifieridentifier: verifier
}, extraParams)],
client_time: Math.floor(Date.now() / 1000).toString(),
one_key_flow: true
}), {}, {
logTracingHeader: config.logRequestTracing
});
promiseArrRequest.push(p);
}
}
return processShareResponse({
legacyMetadataHost,
serverTimeOffset,
sessionAuthKey,
enableOneKey,
ecCurve,
keyType,
network,
verifier,
verifierParams,
endpoints,
isImportedShares: canImportedShares
}, promiseArrRequest);
}
export { GetPubKeyOrKeyAssign, VerifierLookupRequest, processShareResponse, retrieveOrImportShare };

@@ -11,3 +11,4 @@ export { JRPC_METHODS, SAPPHIRE_DEVNET_METADATA_URL, SAPPHIRE_METADATA_URL } from './constants.js';

export { convertMetadataToNonce, decryptNodeData, decryptNodeDataWithPadding, decryptSeedData, generateMetadataParams, generateNonceMetadataParams, getMetadata, getNonce, getOrSetNonce, getOrSetSapphireMetadataNonce, getSecpKeyFromEd25519 } from './helpers/metadataUtils.js';
export { GetPubKeyOrKeyAssign, VerifierLookupRequest, retrieveOrImportShare } from './helpers/nodeUtils.js';
export { GetPubKeyOrKeyAssign, VerifierLookupRequest, processShareResponse, retrieveOrImportShare } from './helpers/nodeUtils.js';
export { _linkedPasskeyRetrieveShares, getAuthMessageFromNodes, linkPasskey, listLinkedPasskey, unlinkPasskey } from './helpers/passkeyConnectorUtils.js';
export { GetOrSetTssDKGPubKey } from './helpers/tssPubKeyUtils.js';

@@ -10,2 +10,3 @@ import _defineProperty from '@babel/runtime/helpers/defineProperty';

import { retrieveOrImportShare, GetPubKeyOrKeyAssign } from './helpers/nodeUtils.js';
import { _linkedPasskeyRetrieveShares } from './helpers/passkeyConnectorUtils.js';
import { generateShares, getEd25519ExtendedPublicKey, encodeEd25519Point, generateAddressFromPubKey } from './helpers/keyUtils.js';

@@ -135,2 +136,41 @@ import { getOrSetNonce, getMetadata } from './helpers/metadataUtils.js';

}
async retrieveSharesWithLinkedPasskey(params) {
const {
passkeyPublicKey,
idToken,
nodePubkeys,
indexes,
endpoints,
extraParams = {},
passkeyVerifierID
} = params;
if (nodePubkeys.length === 0) {
throw new Error("nodePubkeys param is required");
}
if (nodePubkeys.length !== indexes.length) {
throw new Error("nodePubkeys length must be same as indexes length");
}
if (nodePubkeys.length !== endpoints.length) {
throw new Error("nodePubkeys length must be same as endpoints length");
}
if (LEGACY_NETWORKS_ROUTE_MAP[this.network]) {
throw new Error(`retrieveSharesWithLinkedPasskey is not supported by legacy network; ${this.network}`);
}
return _linkedPasskeyRetrieveShares({
serverTimeOffset: this.serverTimeOffset,
ecCurve: this.ec,
keyType: this.keyType,
allowHost: this.allowHost,
network: this.network,
clientId: this.clientId,
endpoints,
indexes,
nodePubkeys,
idToken,
passkeyPublicKey,
passkeyVerifierID,
extraParams,
sessionExpSecond: Torus.sessionTime
});
}
async getPublicAddress(endpoints, torusNodePubs, {

@@ -137,0 +177,0 @@ verifier,

@@ -9,1 +9,3 @@ /*!

/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh <https://feross.org/opensource> */
/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */

@@ -7,4 +7,9 @@ export declare const JRPC_METHODS: {

GET_SHARE_OR_KEY_ASSIGN: string;
RETRIEVE_SHARES_WITH_LINKED_PASSKEY: string;
GENERATE_AUTH_MESSAGE: string;
LINK_PASSKEY: string;
UNLINK_PASSKEY: string;
GET_LINKED_PASSKEYS: string;
};
export declare const SAPPHIRE_METADATA_URL = "https://node-1.node.web3auth.io/metadata";
export declare const SAPPHIRE_DEVNET_METADATA_URL = "https://node-1.dev-node.web3auth.io/metadata";

@@ -7,2 +7,3 @@ export * from "./common";

export * from "./nodeUtils";
export * from "./passkeyConnectorUtils";
export * from "./tssPubKeyUtils";
import { INodePub, TORUS_NETWORK_TYPE } from "@toruslabs/constants";
import { ec } from "elliptic";
import { ImportedShare, KeyLookupResult, KeyType, TorusKey, VerifierLookupResult, VerifierParams } from "../interfaces";
import { ImportedShare, ImportShareRequestResult, JRPCResponse, KeyLookupResult, KeyType, ShareRequestResult, TorusKey, VerifierLookupResult, VerifierParams } from "../interfaces";
import { TorusUtilsExtraParams } from "../TorusUtilsExtraParams";

@@ -19,2 +19,15 @@ export declare const GetPubKeyOrKeyAssign: (params: {

}) => Promise<VerifierLookupResult>;
export declare function processShareResponse(params: {
legacyMetadataHost: string;
serverTimeOffset: number;
sessionAuthKey: Buffer;
enableOneKey: boolean;
ecCurve: ec;
keyType: KeyType;
network: TORUS_NETWORK_TYPE;
verifierParams: VerifierParams;
endpoints: string[];
isImportedShares: boolean;
verifier?: string;
}, promiseArrRequest: Promise<void | JRPCResponse<ShareRequestResult> | JRPCResponse<ImportShareRequestResult[]>>[]): Promise<TorusKey>;
export declare function retrieveOrImportShare(params: {

@@ -21,0 +34,0 @@ legacyMetadataHost: string;

export * from "./constants";
export * from "./helpers";
export * from "./interfaces";
export * from "./passkeyConnectorInterfaces";
export { default as Point } from "./Point";

@@ -5,0 +6,0 @@ export { default as Polynomial } from "./Polynomial";

import { INodePub, TORUS_NETWORK_TYPE } from "@toruslabs/constants";
import { ec as EC } from "elliptic";
import { ImportKeyParams, RetrieveSharesParams, TorusCtorOptions, TorusKey, TorusPublicKey } from "./interfaces";
import { RetrieveSharesWithLinkedPasskeyParams } from "./passkeyConnectorInterfaces";
declare class Torus {

@@ -22,2 +23,3 @@ private static sessionTime;

retrieveShares(params: RetrieveSharesParams): Promise<TorusKey>;
retrieveSharesWithLinkedPasskey(params: RetrieveSharesWithLinkedPasskeyParams): Promise<TorusKey>;
getPublicAddress(endpoints: string[], torusNodePubs: INodePub[], { verifier, verifierId, extendedVerifierId }: {

@@ -24,0 +26,0 @@ verifier: string;

@@ -1,4 +0,2 @@

export interface TorusUtilsExtraParams {
nonce?: string;
message?: string;
export interface TorusUtilsPasskeyExtraParams {
signature?: string;

@@ -11,4 +9,9 @@ clientDataJson?: string;

rpId?: string;
}
export interface TorusUtilsExtraParams extends TorusUtilsPasskeyExtraParams {
nonce?: string;
message?: string;
signature?: string;
session_token_exp_second?: number;
timestamp?: number;
}
{
"name": "@toruslabs/torus.js",
"version": "15.1.1",
"version": "15.2.0-alpha.0",
"description": "Handle communication with torus nodes",

@@ -5,0 +5,0 @@ "main": "dist/lib.cjs/index.js",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc