New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@toruslabs/torus.js

Package Overview
Dependencies
Maintainers
0
Versions
139
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@toruslabs/torus.js - npm Package Compare versions

Comparing version

to
16.0.0

dist/lib.cjs/types/config.d.ts

7

dist/lib.cjs/constants.js

@@ -8,8 +8,3 @@ 'use strict';

IMPORT_SHARES: "ImportShares",
GET_SHARE_OR_KEY_ASSIGN: "GetShareOrKeyAssign",
RETRIEVE_SHARES_WITH_LINKED_PASSKEY: "RetrieveSharesWithLinkedPasskey",
GENERATE_AUTH_MESSAGE: "GenerateAuthMessage",
LINK_PASSKEY: "LinkPasskey",
UNLINK_PASSKEY: "UnlinkPasskey",
GET_LINKED_PASSKEYS: "GetLinkedPasskeys"
GET_SHARE_OR_KEY_ASSIGN: "GetShareOrKeyAssign"
};

@@ -16,0 +11,0 @@ const SAPPHIRE_METADATA_URL = "https://node-1.node.web3auth.io/metadata";

@@ -16,7 +16,11 @@ 'use strict';

};
let secp256k1EC;
let ed25519EC;
const getKeyCurve = keyType => {
if (keyType === constants.KEY_TYPE.ED25519) {
return new elliptic.ec(constants.KEY_TYPE.ED25519);
} else if (keyType === constants.KEY_TYPE.SECP256K1) {
return new elliptic.ec(constants.KEY_TYPE.SECP256K1);
if (keyType === constants.KEY_TYPE.SECP256K1) {
if (!secp256k1EC) secp256k1EC = new elliptic.ec("secp256k1");
return secp256k1EC;
} else if (keyType === constants.KEY_TYPE.ED25519) {
if (!ed25519EC) ed25519EC = new elliptic.ec("ed25519");
return ed25519EC;
}

@@ -123,3 +127,2 @@ throw new Error(`Invalid keyType: ${keyType}`);

});
// odd length

@@ -129,3 +132,2 @@ if (arrSize % 2 !== 0) {

}
// return average of two mid values in case of even arrSize

@@ -132,0 +134,0 @@ const mid1 = sortedArr[arrSize / 2 - 1];

@@ -43,3 +43,2 @@ 'use strict';

}
/** Convenience method that creates public key and other stuff. RFC8032 5.1.5 */

@@ -46,0 +45,0 @@ function getEd25519ExtendedPublicKey(keyBuffer) {

@@ -104,3 +104,2 @@ 'use strict';

}
// generateRandomPolynomial - determinisiticShares are assumed random

@@ -107,0 +106,0 @@ function generateRandomPolynomial(ecCurve, degree, secret, deterministicShares) {

@@ -133,3 +133,2 @@ 'use strict';

}
// for sapphire metadata

@@ -136,0 +135,0 @@ const operation = getOnly ? "getNonce" : "getOrSetNonce";

'use strict';
var _objectSpread = require('@babel/runtime/helpers/objectSpread2');
var constants$1 = require('@toruslabs/constants');
var constants = require('@toruslabs/constants');
var eccrypto = require('@toruslabs/eccrypto');

@@ -10,3 +10,3 @@ var httpHelpers = require('@toruslabs/http-helpers');

var config = require('../config.js');
var constants = require('../constants.js');
var constants$1 = require('../constants.js');
var loglevel = require('../loglevel.js');

@@ -29,3 +29,3 @@ var some = require('../some.js');

const minThreshold = ~~(endpoints.length / 2) + 1;
const lookupPromises = endpoints.map(x => httpHelpers.post(x, httpHelpers.generateJsonRPCObject(constants.JRPC_METHODS.GET_OR_SET_KEY, {
const lookupPromises = endpoints.map(x => httpHelpers.post(x, httpHelpers.generateJsonRPCObject(constants$1.JRPC_METHODS.GET_OR_SET_KEY, {
distributed_metadata: true,

@@ -41,3 +41,3 @@ verifier,

logTracingHeader: config.config.logRequestTracing
}).catch(err => loglevel.error(`${constants.JRPC_METHODS.GET_OR_SET_KEY} request failed`, err)));
}).catch(err => loglevel.error(`${constants$1.JRPC_METHODS.GET_OR_SET_KEY} request failed`, err)));
let nonceResult;

@@ -54,5 +54,4 @@ const nodeIndexes = [];

const keyResult = common.thresholdSame(lookupPubKeys.map(x3 => x3 && common.normalizeKeysResult(x3.result)), minThreshold);
// check for nonce result in response if not a extendedVerifierId and not a legacy network
if (keyResult && !nonceResult && !extendedVerifierId && !constants$1.LEGACY_NETWORKS_ROUTE_MAP[network]) {
if (keyResult && !nonceResult && !extendedVerifierId && !constants.LEGACY_NETWORKS_ROUTE_MAP[network]) {
for (let i = 0; i < lookupResults.length; i++) {

@@ -71,3 +70,2 @@ const x1 = lookupResults[i];

}
// if nonce result is not returned by nodes, fetch directly from metadata

@@ -87,3 +85,3 @@ if (!nonceResult) {

// nonceResult must exist except for extendedVerifierId and legacy networks along with keyResult
if (keyResult && (nonceResult || extendedVerifierId || constants$1.LEGACY_NETWORKS_ROUTE_MAP[network]) || errorResult) {
if (keyResult && (nonceResult || extendedVerifierId || constants.LEGACY_NETWORKS_ROUTE_MAP[network]) || errorResult) {
if (keyResult) {

@@ -126,3 +124,3 @@ lookupResults.forEach(x1 => {

const minThreshold = ~~(endpoints.length / 2) + 1;
const lookupPromises = endpoints.map(x => httpHelpers.post(x, httpHelpers.generateJsonRPCObject(constants.JRPC_METHODS.VERIFIER_LOOKUP, {
const lookupPromises = endpoints.map(x => httpHelpers.post(x, httpHelpers.generateJsonRPCObject(constants$1.JRPC_METHODS.VERIFIER_LOOKUP, {
verifier,

@@ -134,3 +132,3 @@ verifier_id: verifierId.toString(),

logTracingHeader: config.config.logRequestTracing
}).catch(err => loglevel.error(`${constants.JRPC_METHODS.GET_OR_SET_KEY} request failed`, err)));
}).catch(err => loglevel.error(`${constants$1.JRPC_METHODS.GET_OR_SET_KEY} request failed`, err)));
const result = await some.Some(lookupPromises, async lookupResults => {

@@ -185,5 +183,5 @@ const lookupPubKeys = lookupResults.filter(x1 => {

VerifierIdentifier string `json:"verifieridentifier"`
}
}
*/
const p = () => httpHelpers.post(endpoints[i], httpHelpers.generateJsonRPCObject(constants.JRPC_METHODS.COMMITMENT_REQUEST, {
const p = () => httpHelpers.post(endpoints[i], httpHelpers.generateJsonRPCObject(constants$1.JRPC_METHODS.COMMITMENT_REQUEST, {
messageprefix: "mug00",

@@ -197,3 +195,3 @@ keytype: keyType,

extended_verifier_id: verifierParams.extended_verifier_id,
is_import_key_flow: finalImportedShares.length > 0
is_import_key_flow: true
}), {}, {

@@ -246,3 +244,2 @@ logTracingHeader: config.config.logRequestTracing

const requiredNodeIndex = indexes[proxyEndpointNum].toString(10);
// if not a existing key we need to wait for nodes to agree on commitment

@@ -280,17 +277,164 @@ if (existingPubKey || !existingPubKey && completedRequests.length === endpoints.length) {

};
async function processShareResponse(params, promiseArrRequest) {
async function retrieveOrImportShare(params) {
const {
legacyMetadataHost,
serverTimeOffset,
sessionAuthKey,
enableOneKey,
ecCurve,
keyType,
allowHost,
network,
clientId,
endpoints,
nodePubkeys,
indexes,
verifier,
verifierParams,
verifier,
endpoints,
isImportedShares
idToken,
overrideExistingKey,
newImportedShares,
extraParams,
useDkg = true,
serverTimeOffset,
checkCommitment = true
} = params;
await httpHelpers.get(allowHost, {
headers: {
verifier,
verifierid: verifierParams.verifier_id,
network,
clientid: clientId,
enablegating: "true"
}
}, {
useAPIKey: true
});
// generate temporary private and public key that is used to secure receive shares
const sessionAuthKey = eccrypto.generatePrivate();
const pubKey = eccrypto.getPublic(sessionAuthKey).toString("hex");
const sessionPubX = pubKey.slice(2, 66);
const sessionPubY = pubKey.slice(66);
let finalImportedShares = [];
const halfThreshold = ~~(endpoints.length / 2) + 1;
if ((newImportedShares === null || newImportedShares === void 0 ? void 0 : newImportedShares.length) > 0) {
if (newImportedShares.length !== endpoints.length) {
throw new Error("Invalid imported shares length");
}
finalImportedShares = newImportedShares;
} else if (!useDkg) {
const bufferKey = keyType === constants.KEY_TYPE.SECP256K1 ? common.generatePrivateKey(ecCurve, Buffer) : await random.getRandomBytes(32);
const generatedShares = await keyUtils.generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey));
finalImportedShares = [...finalImportedShares, ...generatedShares];
}
let commitmentRequestResult = [];
let isExistingKey;
const nodeSigs = [];
if (checkCommitment) {
commitmentRequestResult = await commitmentRequest({
idToken,
endpoints,
indexes,
keyType,
verifier,
verifierParams,
pubKeyX: sessionPubX,
pubKeyY: sessionPubY,
finalImportedShares,
overrideExistingKey
});
for (let i = 0; i < commitmentRequestResult.length; i += 1) {
const x = commitmentRequestResult[i];
if (!x || typeof x !== "object" || x.error) {
continue;
}
if (x) nodeSigs.push(x.result);
}
// if user's account already
isExistingKey = !!common.thresholdSame(nodeSigs.map(x => x && x.pub_key_x), halfThreshold);
} else if (!checkCommitment && finalImportedShares.length > 0) {
// in case not allowed to override existing key for import request
// check if key exists
if (!overrideExistingKey) {
var _keyLookupResult$erro, _keyLookupResult$keyR;
const keyLookupResult = await VerifierLookupRequest({
endpoints,
verifier,
verifierId: verifierParams.verifier_id,
keyType
});
if (keyLookupResult.errorResult && !((_keyLookupResult$erro = keyLookupResult.errorResult) !== null && _keyLookupResult$erro !== void 0 && (_keyLookupResult$erro = _keyLookupResult$erro.data) !== null && _keyLookupResult$erro !== void 0 && _keyLookupResult$erro.includes("Verifier + VerifierID has not yet been assigned"))) {
throw new Error(`node results do not match at first lookup ${JSON.stringify(keyLookupResult.keyResult || {})}, ${JSON.stringify(keyLookupResult.errorResult || {})}`);
}
if (((_keyLookupResult$keyR = keyLookupResult.keyResult) === null || _keyLookupResult$keyR === void 0 || (_keyLookupResult$keyR = _keyLookupResult$keyR.keys) === null || _keyLookupResult$keyR === void 0 ? void 0 : _keyLookupResult$keyR.length) > 0) {
isExistingKey = !!keyLookupResult.keyResult.keys[0];
}
}
}
const promiseArrRequest = [];
const canImportedShares = overrideExistingKey || !useDkg && !isExistingKey;
if (canImportedShares) {
const proxyEndpointNum = common.getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id);
const items = [];
for (let i = 0; i < endpoints.length; i += 1) {
const importedShare = finalImportedShares[i];
if (!importedShare) {
throw new Error(`invalid imported share at index ${i}`);
}
items.push(_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
nodesignatures: nodeSigs,
verifieridentifier: verifier,
pub_key_x: importedShare.oauth_pub_key_x,
pub_key_y: importedShare.oauth_pub_key_y,
signing_pub_key_x: importedShare.signing_pub_key_x,
signing_pub_key_y: importedShare.signing_pub_key_y,
encrypted_share: importedShare.encrypted_share,
encrypted_share_metadata: importedShare.encrypted_share_metadata,
node_index: importedShare.node_index,
key_type: importedShare.key_type,
nonce_data: importedShare.nonce_data,
nonce_signature: importedShare.nonce_signature,
sss_endpoint: endpoints[i]
}, extraParams));
}
const p = httpHelpers.post(endpoints[proxyEndpointNum], httpHelpers.generateJsonRPCObject(constants$1.JRPC_METHODS.IMPORT_SHARES, {
encrypted: "yes",
use_temp: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: items,
key_type: keyType,
one_key_flow: true
}), {}, {
logTracingHeader: config.config.logRequestTracing
}).catch(err => loglevel.error("share req", err));
promiseArrRequest.push(p);
} else {
for (let i = 0; i < endpoints.length; i += 1) {
const p = httpHelpers.post(endpoints[i], httpHelpers.generateJsonRPCObject(constants$1.JRPC_METHODS.GET_SHARE_OR_KEY_ASSIGN, {
encrypted: "yes",
use_temp: true,
key_type: keyType,
distributed_metadata: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: [_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
key_type: keyType,
nodesignatures: nodeSigs,
verifieridentifier: verifier
}, extraParams)],
client_time: Math.floor(Date.now() / 1000).toString(),
one_key_flow: true
}), {}, {
logTracingHeader: config.config.logRequestTracing
});
promiseArrRequest.push(p);
}
}
return some.Some(promiseArrRequest, async (shareResponseResult, sharedState) => {

@@ -345,3 +489,3 @@ let thresholdNonceData;

});
const thresholdReqCount = isImportedShares ? endpoints.length : halfThreshold;
const thresholdReqCount = canImportedShares ? endpoints.length : halfThreshold;
// optimistically run lagrange interpolation once threshold number of shares have been received

@@ -475,3 +619,2 @@ // this is matched against the user public key to ensure that shares are consistent

});
// Convert each string timestamp to a number

@@ -510,6 +653,5 @@ const serverOffsetTimes = serverTimeOffsetResponses.map(timestamp => Number.parseInt(timestamp, 10));

const oAuthPubkeyY = oAuthPubKey.getY().toString("hex", 64);
// if both thresholdNonceData and extended_verifier_id are not available
// then we need to throw other wise address would be incorrect.
if (!nonceResult && !verifierParams.extended_verifier_id && !constants$1.LEGACY_NETWORKS_ROUTE_MAP[network]) {
if (!nonceResult && !verifierParams.extended_verifier_id && !constants.LEGACY_NETWORKS_ROUTE_MAP[network]) {
// NOTE: dont use padded pub key anywhere in metadata apis, send pub keys as is received from nodes.

@@ -537,3 +679,3 @@ const metadataNonceResult = await metadataUtils.getOrSetSapphireMetadataNonce(network, thresholdPubKey.X, thresholdPubKey.Y, serverTimeOffset, oAuthKey);

}).getPublic();
} else if (constants$1.LEGACY_NETWORKS_ROUTE_MAP[network]) {
} else if (constants.LEGACY_NETWORKS_ROUTE_MAP[network]) {
if (enableOneKey) {

@@ -608,5 +750,5 @@ nonceResult = await metadataUtils.getOrSetNonce(legacyMetadataHost, ecCurve, serverTimeOffsetResponse, oAuthPubkeyX, oAuthPubkeyY, oAuthKey, !isNewKey);

}
if (keyType === constants$1.KEY_TYPE.SECP256K1) {
if (keyType === constants.KEY_TYPE.SECP256K1) {
finalPrivKey = keyWithNonce;
} else if (keyType === constants$1.KEY_TYPE.ED25519) {
} else if (keyType === constants.KEY_TYPE.ED25519) {
if (keyWithNonce && !nonceResult.seed) {

@@ -625,3 +767,3 @@ throw new Error("Invalid data, seed data is missing for ed25519 key, Please report this bug");

let postboxPubY = oAuthPubkeyY;
if (keyType === constants$1.KEY_TYPE.ED25519) {
if (keyType === constants.KEY_TYPE.ED25519) {
const {

@@ -676,183 +818,5 @@ scalar,

}
async function retrieveOrImportShare(params) {
const {
legacyMetadataHost,
enableOneKey,
ecCurve,
keyType,
allowHost,
network,
clientId,
endpoints,
nodePubkeys,
indexes,
verifier,
verifierParams,
idToken,
overrideExistingKey,
newImportedShares,
extraParams,
useDkg = true,
serverTimeOffset,
checkCommitment = true
} = params;
await httpHelpers.get(allowHost, {
headers: {
verifier,
verifierid: verifierParams.verifier_id,
network,
clientid: clientId,
enablegating: "true"
}
}, {
useAPIKey: true
});
// generate temporary private and public key that is used to secure receive shares
const sessionAuthKey = eccrypto.generatePrivate();
const pubKey = eccrypto.getPublic(sessionAuthKey).toString("hex");
const sessionPubX = pubKey.slice(2, 66);
const sessionPubY = pubKey.slice(66);
let finalImportedShares = [];
const halfThreshold = ~~(endpoints.length / 2) + 1;
if ((newImportedShares === null || newImportedShares === void 0 ? void 0 : newImportedShares.length) > 0) {
if (newImportedShares.length !== endpoints.length) {
throw new Error("Invalid imported shares length");
}
finalImportedShares = newImportedShares;
} else if (!useDkg) {
const bufferKey = keyType === constants$1.KEY_TYPE.SECP256K1 ? common.generatePrivateKey(ecCurve, Buffer) : await random.getRandomBytes(32);
const generatedShares = await keyUtils.generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey));
finalImportedShares = [...finalImportedShares, ...generatedShares];
}
let commitmentRequestResult = [];
let isExistingKey;
const nodeSigs = [];
if (checkCommitment) {
commitmentRequestResult = await commitmentRequest({
idToken,
endpoints,
indexes,
keyType,
verifier,
verifierParams,
pubKeyX: sessionPubX,
pubKeyY: sessionPubY,
finalImportedShares,
overrideExistingKey
});
for (let i = 0; i < commitmentRequestResult.length; i += 1) {
const x = commitmentRequestResult[i];
if (!x || typeof x !== "object" || x.error) {
continue;
}
if (x) nodeSigs.push(x.result);
}
// if user's account already
isExistingKey = !!common.thresholdSame(nodeSigs.map(x => x && x.pub_key_x), halfThreshold);
} else if (!checkCommitment && finalImportedShares.length > 0) {
// in case not allowed to override existing key for import request
// check if key exists
if (!overrideExistingKey) {
var _keyLookupResult$erro, _keyLookupResult$keyR;
const keyLookupResult = await VerifierLookupRequest({
endpoints,
verifier,
verifierId: verifierParams.verifier_id,
keyType
});
if (keyLookupResult.errorResult && !((_keyLookupResult$erro = keyLookupResult.errorResult) !== null && _keyLookupResult$erro !== void 0 && (_keyLookupResult$erro = _keyLookupResult$erro.data) !== null && _keyLookupResult$erro !== void 0 && _keyLookupResult$erro.includes("Verifier + VerifierID has not yet been assigned"))) {
throw new Error(`node results do not match at first lookup ${JSON.stringify(keyLookupResult.keyResult || {})}, ${JSON.stringify(keyLookupResult.errorResult || {})}`);
}
if (((_keyLookupResult$keyR = keyLookupResult.keyResult) === null || _keyLookupResult$keyR === void 0 || (_keyLookupResult$keyR = _keyLookupResult$keyR.keys) === null || _keyLookupResult$keyR === void 0 ? void 0 : _keyLookupResult$keyR.length) > 0) {
isExistingKey = !!keyLookupResult.keyResult.keys[0];
}
}
}
const promiseArrRequest = [];
const canImportedShares = overrideExistingKey || !useDkg && !isExistingKey;
if (canImportedShares) {
const proxyEndpointNum = common.getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id);
const items = [];
for (let i = 0; i < endpoints.length; i += 1) {
const importedShare = finalImportedShares[i];
if (!importedShare) {
throw new Error(`invalid imported share at index ${i}`);
}
items.push(_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
nodesignatures: nodeSigs,
verifieridentifier: verifier,
pub_key_x: importedShare.oauth_pub_key_x,
pub_key_y: importedShare.oauth_pub_key_y,
signing_pub_key_x: importedShare.signing_pub_key_x,
signing_pub_key_y: importedShare.signing_pub_key_y,
encrypted_share: importedShare.encrypted_share,
encrypted_share_metadata: importedShare.encrypted_share_metadata,
node_index: importedShare.node_index,
key_type: importedShare.key_type,
nonce_data: importedShare.nonce_data,
nonce_signature: importedShare.nonce_signature,
sss_endpoint: endpoints[i]
}, extraParams));
}
const p = httpHelpers.post(endpoints[proxyEndpointNum], httpHelpers.generateJsonRPCObject(constants.JRPC_METHODS.IMPORT_SHARES, {
encrypted: "yes",
use_temp: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: items,
key_type: keyType,
one_key_flow: true
}), {}, {
logTracingHeader: config.config.logRequestTracing
}).catch(err => loglevel.error("share req", err));
promiseArrRequest.push(p);
} else {
for (let i = 0; i < endpoints.length; i += 1) {
const p = httpHelpers.post(endpoints[i], httpHelpers.generateJsonRPCObject(constants.JRPC_METHODS.GET_SHARE_OR_KEY_ASSIGN, {
encrypted: "yes",
use_temp: true,
key_type: keyType,
distributed_metadata: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: [_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
key_type: keyType,
nodesignatures: nodeSigs,
verifieridentifier: verifier
}, extraParams)],
client_time: Math.floor(Date.now() / 1000).toString(),
one_key_flow: true
}), {}, {
logTracingHeader: config.config.logRequestTracing
});
promiseArrRequest.push(p);
}
}
return processShareResponse({
legacyMetadataHost,
serverTimeOffset,
sessionAuthKey,
enableOneKey,
ecCurve,
keyType,
network,
verifier,
verifierParams,
endpoints,
isImportedShares: canImportedShares
}, promiseArrRequest);
}
exports.GetPubKeyOrKeyAssign = GetPubKeyOrKeyAssign;
exports.VerifierLookupRequest = VerifierLookupRequest;
exports.processShareResponse = processShareResponse;
exports.retrieveOrImportShare = retrieveOrImportShare;

@@ -11,3 +11,2 @@ 'use strict';

// Note: Endpoints should be the sss node endpoints along with path
// for ex: [https://node-1.node.web3auth.io/sss/jrpc, https://node-2.node.web3auth.io/sss/jrpc ....]

@@ -14,0 +13,0 @@ const GetOrSetTssDKGPubKey = async params => {

'use strict';
var constants = require('./constants.js');
var Point = require('./Point.js');
var Polynomial = require('./Polynomial.js');
var Share = require('./Share.js');
var torus = require('./torus.js');
var common = require('./helpers/common.js');

@@ -14,4 +10,7 @@ var errorUtils = require('./helpers/errorUtils.js');

var nodeUtils = require('./helpers/nodeUtils.js');
var passkeyConnectorUtils = require('./helpers/passkeyConnectorUtils.js');
var tssPubKeyUtils = require('./helpers/tssPubKeyUtils.js');
var Point = require('./Point.js');
var Polynomial = require('./Polynomial.js');
var Share = require('./Share.js');
var torus = require('./torus.js');

@@ -23,6 +22,2 @@

exports.SAPPHIRE_METADATA_URL = constants.SAPPHIRE_METADATA_URL;
exports.Point = Point;
exports.Polynomial = Polynomial;
exports.Share = Share;
exports.Torus = torus;
exports.calculateMedian = common.calculateMedian;

@@ -70,9 +65,7 @@ exports.encParamsBufToHex = common.encParamsBufToHex;

exports.VerifierLookupRequest = nodeUtils.VerifierLookupRequest;
exports.processShareResponse = nodeUtils.processShareResponse;
exports.retrieveOrImportShare = nodeUtils.retrieveOrImportShare;
exports._linkedPasskeyRetrieveShares = passkeyConnectorUtils._linkedPasskeyRetrieveShares;
exports.getAuthMessageFromNodes = passkeyConnectorUtils.getAuthMessageFromNodes;
exports.linkPasskey = passkeyConnectorUtils.linkPasskey;
exports.listLinkedPasskey = passkeyConnectorUtils.listLinkedPasskey;
exports.unlinkPasskey = passkeyConnectorUtils.unlinkPasskey;
exports.GetOrSetTssDKGPubKey = tssPubKeyUtils.GetOrSetTssDKGPubKey;
exports.Point = Point;
exports.Polynomial = Polynomial;
exports.Share = Share;
exports.Torus = torus;

@@ -48,2 +48,3 @@ 'use strict';

promises.forEach((x, index) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
x.then(resp => {

@@ -58,3 +59,5 @@ resultArr[index] = resp;

if (sharedState.resolved) return;
return predicate(resultArr.slice(0), sharedState).then(data => {
return predicate(resultArr.slice(0), sharedState)
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.then(data => {
sharedState.resolved = true;

@@ -61,0 +64,0 @@ resolve(data);

@@ -9,8 +9,9 @@ 'use strict';

var config = require('./config.js');
var loglevel = require('./loglevel.js');
var common = require('./helpers/common.js');
var errorUtils = require('./helpers/errorUtils.js');
var nodeUtils = require('./helpers/nodeUtils.js');
var passkeyConnectorUtils = require('./helpers/passkeyConnectorUtils.js');
var keyUtils = require('./helpers/keyUtils.js');
var metadataUtils = require('./helpers/metadataUtils.js');
var nodeUtils = require('./helpers/nodeUtils.js');
require('loglevel');
var loglevel = require('./loglevel.js');

@@ -138,45 +139,7 @@ // Implement threshold logic wrappers around public APIs

}
async retrieveSharesWithLinkedPasskey(params) {
const {
passkeyPublicKey,
idToken,
nodePubkeys,
indexes,
endpoints,
extraParams = {},
passkeyVerifierID
} = params;
if (nodePubkeys.length === 0) {
throw new Error("nodePubkeys param is required");
}
if (nodePubkeys.length !== indexes.length) {
throw new Error("nodePubkeys length must be same as indexes length");
}
if (nodePubkeys.length !== endpoints.length) {
throw new Error("nodePubkeys length must be same as endpoints length");
}
if (constants.LEGACY_NETWORKS_ROUTE_MAP[this.network]) {
throw new Error(`retrieveSharesWithLinkedPasskey is not supported by legacy network; ${this.network}`);
}
return passkeyConnectorUtils._linkedPasskeyRetrieveShares({
serverTimeOffset: this.serverTimeOffset,
ecCurve: this.ec,
keyType: this.keyType,
allowHost: this.allowHost,
network: this.network,
clientId: this.clientId,
endpoints,
indexes,
nodePubkeys,
idToken,
passkeyPublicKey,
passkeyVerifierID,
extraParams,
sessionExpSecond: Torus.sessionTime
});
}
async getPublicAddress(endpoints, torusNodePubs, {
verifier,
verifierId,
extendedVerifierId
extendedVerifierId,
keyType
}) {

@@ -188,6 +151,8 @@ loglevel.info(torusNodePubs, {

});
const localKeyType = keyType !== null && keyType !== void 0 ? keyType : this.keyType;
return this.getNewPublicAddress(endpoints, {
verifier,
verifierId,
extendedVerifierId
extendedVerifierId,
keyType: localKeyType
}, this.enableOneKey);

@@ -262,3 +227,2 @@ }

}
/**

@@ -282,4 +246,10 @@ * Note: use this function only for openlogin tkey account lookups.

verifierId,
extendedVerifierId
extendedVerifierId,
keyType
}, enableOneKey) {
const localKeyType = keyType !== null && keyType !== void 0 ? keyType : this.keyType;
const localEc = common.getKeyCurve(localKeyType);
if (localKeyType === constants.KEY_TYPE.ED25519 && constants.LEGACY_NETWORKS_ROUTE_MAP[this.network]) {
throw new Error(`keyType: ${keyType} is not supported by ${this.network} network`);
}
const keyAssignResult = await nodeUtils.GetPubKeyOrKeyAssign({

@@ -290,3 +260,3 @@ endpoints,

verifierId,
keyType: this.keyType,
keyType: localKeyType,
extendedVerifierId

@@ -316,3 +286,2 @@ });

}
// no need of nonce for extendedVerifierId (tss verifier id)

@@ -332,3 +301,3 @@ if (!nonceResult && !extendedVerifierId && !constants.LEGACY_NETWORKS_ROUTE_MAP[this.network]) {

// for tss key no need to add pub nonce
finalPubKey = this.ec.keyFromPublic({
finalPubKey = localEc.keyFromPublic({
x: X,

@@ -349,10 +318,10 @@ y: Y

const v2NonceResult = nonceResult;
oAuthPubKey = this.ec.keyFromPublic({
oAuthPubKey = localEc.keyFromPublic({
x: X,
y: Y
}).getPublic();
finalPubKey = this.ec.keyFromPublic({
finalPubKey = localEc.keyFromPublic({
x: X,
y: Y
}).getPublic().add(this.ec.keyFromPublic({
}).getPublic().add(localEc.keyFromPublic({
x: v2NonceResult.pubNonce.x,

@@ -371,3 +340,3 @@ y: v2NonceResult.pubNonce.y

const oAuthY = oAuthPubKey.getY().toString(16, 64);
const oAuthAddress = keyUtils.generateAddressFromPubKey(this.keyType, oAuthPubKey.getX(), oAuthPubKey.getY());
const oAuthAddress = keyUtils.generateAddressFromPubKey(localKeyType, oAuthPubKey.getX(), oAuthPubKey.getY());
if (!finalPubKey) {

@@ -378,3 +347,3 @@ throw new Error("Unable to derive finalPubKey");

const finalY = finalPubKey ? finalPubKey.getY().toString(16, 64) : "";
const finalAddress = finalPubKey ? keyUtils.generateAddressFromPubKey(this.keyType, finalPubKey.getX(), finalPubKey.getY()) : "";
const finalAddress = finalPubKey ? keyUtils.generateAddressFromPubKey(localKeyType, finalPubKey.getX(), finalPubKey.getY()) : "";
return {

@@ -409,4 +378,7 @@ oAuthKeyData: {

isNewKey,
serverTimeOffset
serverTimeOffset,
keyType
} = params;
const localKeyType = keyType !== null && keyType !== void 0 ? keyType : this.keyType;
const localEc = common.getKeyCurve(localKeyType);
const {

@@ -421,3 +393,3 @@ pub_key_X: X,

let pubNonce;
const oAuthPubKey = this.ec.keyFromPublic({
const oAuthPubKey = localEc.keyFromPublic({
x: X,

@@ -429,3 +401,3 @@ y: Y

try {
nonceResult = await metadataUtils.getOrSetNonce(this.legacyMetadataHost, this.ec, finalServerTimeOffset, X, Y, undefined, !isNewKey);
nonceResult = await metadataUtils.getOrSetNonce(this.legacyMetadataHost, localEc, finalServerTimeOffset, X, Y, undefined, !isNewKey);
nonce = new BN(nonceResult.nonce || "0", 16);

@@ -441,11 +413,11 @@ typeOfUser = nonceResult.typeOfUser;

});
finalPubKey = this.ec.keyFromPublic({
finalPubKey = localEc.keyFromPublic({
x: X,
y: Y
}).getPublic().add(this.ec.keyFromPrivate(nonce.toString(16, 64), "hex").getPublic());
}).getPublic().add(localEc.keyFromPrivate(nonce.toString(16, 64), "hex").getPublic());
} else if (nonceResult.typeOfUser === "v2") {
finalPubKey = this.ec.keyFromPublic({
finalPubKey = localEc.keyFromPublic({
x: X,
y: Y
}).getPublic().add(this.ec.keyFromPublic({
}).getPublic().add(localEc.keyFromPublic({
x: nonceResult.pubNonce.x,

@@ -467,6 +439,6 @@ y: nonceResult.pubNonce.y

});
finalPubKey = this.ec.keyFromPublic({
finalPubKey = localEc.keyFromPublic({
x: X,
y: Y
}).getPublic().add(this.ec.keyFromPrivate(nonce.toString(16, 64), "hex").getPublic());
}).getPublic().add(localEc.keyFromPrivate(nonce.toString(16, 64), "hex").getPublic());
}

@@ -478,3 +450,3 @@ if (!oAuthPubKey) {

const oAuthY = oAuthPubKey.getY().toString(16, 64);
const oAuthAddress = keyUtils.generateAddressFromPubKey(this.keyType, oAuthPubKey.getX(), oAuthPubKey.getY());
const oAuthAddress = keyUtils.generateAddressFromPubKey(localKeyType, oAuthPubKey.getX(), oAuthPubKey.getY());
if (typeOfUser === "v2" && !finalPubKey) {

@@ -485,3 +457,3 @@ throw new Error("Unable to derive finalPubKey");

const finalY = finalPubKey ? finalPubKey.getY().toString(16, 64) : "";
const finalAddress = finalPubKey ? keyUtils.generateAddressFromPubKey(this.keyType, finalPubKey.getX(), finalPubKey.getY()) : "";
const finalAddress = finalPubKey ? keyUtils.generateAddressFromPubKey(localKeyType, finalPubKey.getX(), finalPubKey.getY()) : "";
return {

@@ -488,0 +460,0 @@ oAuthKeyData: {

@@ -6,8 +6,3 @@ const JRPC_METHODS = {

IMPORT_SHARES: "ImportShares",
GET_SHARE_OR_KEY_ASSIGN: "GetShareOrKeyAssign",
RETRIEVE_SHARES_WITH_LINKED_PASSKEY: "RetrieveSharesWithLinkedPasskey",
GENERATE_AUTH_MESSAGE: "GenerateAuthMessage",
LINK_PASSKEY: "LinkPasskey",
UNLINK_PASSKEY: "UnlinkPasskey",
GET_LINKED_PASSKEYS: "GetLinkedPasskeys"
GET_SHARE_OR_KEY_ASSIGN: "GetShareOrKeyAssign"
};

@@ -14,0 +9,0 @@ const SAPPHIRE_METADATA_URL = "https://node-1.node.web3auth.io/metadata";

@@ -14,7 +14,11 @@ import { KEY_TYPE } from '@toruslabs/constants';

};
let secp256k1EC;
let ed25519EC;
const getKeyCurve = keyType => {
if (keyType === KEY_TYPE.ED25519) {
return new ec(KEY_TYPE.ED25519);
} else if (keyType === KEY_TYPE.SECP256K1) {
return new ec(KEY_TYPE.SECP256K1);
if (keyType === KEY_TYPE.SECP256K1) {
if (!secp256k1EC) secp256k1EC = new ec("secp256k1");
return secp256k1EC;
} else if (keyType === KEY_TYPE.ED25519) {
if (!ed25519EC) ed25519EC = new ec("ed25519");
return ed25519EC;
}

@@ -21,0 +25,0 @@ throw new Error(`Invalid keyType: ${keyType}`);

import _objectSpread from '@babel/runtime/helpers/objectSpread2';
import { LEGACY_NETWORKS_ROUTE_MAP, KEY_TYPE } from '@toruslabs/constants';
import { KEY_TYPE, LEGACY_NETWORKS_ROUTE_MAP } from '@toruslabs/constants';
import { generatePrivate, getPublic } from '@toruslabs/eccrypto';

@@ -12,3 +12,3 @@ import { post, generateJsonRPCObject, get } from '@toruslabs/http-helpers';

import { generatePrivateKey, thresholdSame, getProxyCoordinatorEndpointIndex, normalizeKeysResult, calculateMedian, normalizeLookUpResult, keccak256, retryCommitment, kCombinations } from './common.js';
import { derivePubKey, generateAddressFromPrivKey, generateAddressFromPubKey, generateShares } from './keyUtils.js';
import { generateShares, derivePubKey, generateAddressFromPrivKey, generateAddressFromPubKey } from './keyUtils.js';
import { lagrangeInterpolation } from './langrangeInterpolatePoly.js';

@@ -187,3 +187,3 @@ import { getOrSetSapphireMetadataNonce, getOrSetNonce, getMetadata, decryptSeedData, getSecpKeyFromEd25519, decryptNodeData, decryptNodeDataWithPadding } from './metadataUtils.js';

extended_verifier_id: verifierParams.extended_verifier_id,
is_import_key_flow: finalImportedShares.length > 0
is_import_key_flow: true
}), {}, {

@@ -269,17 +269,165 @@ logTracingHeader: config.logRequestTracing

};
async function processShareResponse(params, promiseArrRequest) {
async function retrieveOrImportShare(params) {
const {
legacyMetadataHost,
serverTimeOffset,
sessionAuthKey,
enableOneKey,
ecCurve,
keyType,
allowHost,
network,
clientId,
endpoints,
nodePubkeys,
indexes,
verifier,
verifierParams,
verifier,
endpoints,
isImportedShares
idToken,
overrideExistingKey,
newImportedShares,
extraParams,
useDkg = true,
serverTimeOffset,
checkCommitment = true
} = params;
await get(allowHost, {
headers: {
verifier,
verifierid: verifierParams.verifier_id,
network,
clientid: clientId,
enablegating: "true"
}
}, {
useAPIKey: true
});
// generate temporary private and public key that is used to secure receive shares
const sessionAuthKey = generatePrivate();
const pubKey = getPublic(sessionAuthKey).toString("hex");
const sessionPubX = pubKey.slice(2, 66);
const sessionPubY = pubKey.slice(66);
let finalImportedShares = [];
const halfThreshold = ~~(endpoints.length / 2) + 1;
if ((newImportedShares === null || newImportedShares === void 0 ? void 0 : newImportedShares.length) > 0) {
if (newImportedShares.length !== endpoints.length) {
throw new Error("Invalid imported shares length");
}
finalImportedShares = newImportedShares;
} else if (!useDkg) {
const bufferKey = keyType === KEY_TYPE.SECP256K1 ? generatePrivateKey(ecCurve, Buffer) : await getRandomBytes(32);
const generatedShares = await generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey));
finalImportedShares = [...finalImportedShares, ...generatedShares];
}
let commitmentRequestResult = [];
let isExistingKey;
const nodeSigs = [];
if (checkCommitment) {
commitmentRequestResult = await commitmentRequest({
idToken,
endpoints,
indexes,
keyType,
verifier,
verifierParams,
pubKeyX: sessionPubX,
pubKeyY: sessionPubY,
finalImportedShares,
overrideExistingKey
});
for (let i = 0; i < commitmentRequestResult.length; i += 1) {
const x = commitmentRequestResult[i];
if (!x || typeof x !== "object" || x.error) {
continue;
}
if (x) nodeSigs.push(x.result);
}
// if user's account already
isExistingKey = !!thresholdSame(nodeSigs.map(x => x && x.pub_key_x), halfThreshold);
} else if (!checkCommitment && finalImportedShares.length > 0) {
// in case not allowed to override existing key for import request
// check if key exists
if (!overrideExistingKey) {
var _keyLookupResult$erro, _keyLookupResult$keyR;
const keyLookupResult = await VerifierLookupRequest({
endpoints,
verifier,
verifierId: verifierParams.verifier_id,
keyType
});
if (keyLookupResult.errorResult && !((_keyLookupResult$erro = keyLookupResult.errorResult) !== null && _keyLookupResult$erro !== void 0 && (_keyLookupResult$erro = _keyLookupResult$erro.data) !== null && _keyLookupResult$erro !== void 0 && _keyLookupResult$erro.includes("Verifier + VerifierID has not yet been assigned"))) {
throw new Error(`node results do not match at first lookup ${JSON.stringify(keyLookupResult.keyResult || {})}, ${JSON.stringify(keyLookupResult.errorResult || {})}`);
}
if (((_keyLookupResult$keyR = keyLookupResult.keyResult) === null || _keyLookupResult$keyR === void 0 || (_keyLookupResult$keyR = _keyLookupResult$keyR.keys) === null || _keyLookupResult$keyR === void 0 ? void 0 : _keyLookupResult$keyR.length) > 0) {
isExistingKey = !!keyLookupResult.keyResult.keys[0];
}
}
}
const promiseArrRequest = [];
const canImportedShares = overrideExistingKey || !useDkg && !isExistingKey;
if (canImportedShares) {
const proxyEndpointNum = getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id);
const items = [];
for (let i = 0; i < endpoints.length; i += 1) {
const importedShare = finalImportedShares[i];
if (!importedShare) {
throw new Error(`invalid imported share at index ${i}`);
}
items.push(_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
nodesignatures: nodeSigs,
verifieridentifier: verifier,
pub_key_x: importedShare.oauth_pub_key_x,
pub_key_y: importedShare.oauth_pub_key_y,
signing_pub_key_x: importedShare.signing_pub_key_x,
signing_pub_key_y: importedShare.signing_pub_key_y,
encrypted_share: importedShare.encrypted_share,
encrypted_share_metadata: importedShare.encrypted_share_metadata,
node_index: importedShare.node_index,
key_type: importedShare.key_type,
nonce_data: importedShare.nonce_data,
nonce_signature: importedShare.nonce_signature,
sss_endpoint: endpoints[i]
}, extraParams));
}
const p = post(endpoints[proxyEndpointNum], generateJsonRPCObject(JRPC_METHODS.IMPORT_SHARES, {
encrypted: "yes",
use_temp: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: items,
key_type: keyType,
one_key_flow: true
}), {}, {
logTracingHeader: config.logRequestTracing
}).catch(err => log.error("share req", err));
promiseArrRequest.push(p);
} else {
for (let i = 0; i < endpoints.length; i += 1) {
const p = post(endpoints[i], generateJsonRPCObject(JRPC_METHODS.GET_SHARE_OR_KEY_ASSIGN, {
encrypted: "yes",
use_temp: true,
key_type: keyType,
distributed_metadata: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: [_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
key_type: keyType,
nodesignatures: nodeSigs,
verifieridentifier: verifier
}, extraParams)],
client_time: Math.floor(Date.now() / 1000).toString(),
one_key_flow: true
}), {}, {
logTracingHeader: config.logRequestTracing
});
promiseArrRequest.push(p);
}
}
return Some(promiseArrRequest, async (shareResponseResult, sharedState) => {

@@ -334,3 +482,3 @@ let thresholdNonceData;

});
const thresholdReqCount = isImportedShares ? endpoints.length : halfThreshold;
const thresholdReqCount = canImportedShares ? endpoints.length : halfThreshold;
// optimistically run lagrange interpolation once threshold number of shares have been received

@@ -660,180 +808,3 @@ // this is matched against the user public key to ensure that shares are consistent

}
async function retrieveOrImportShare(params) {
const {
legacyMetadataHost,
enableOneKey,
ecCurve,
keyType,
allowHost,
network,
clientId,
endpoints,
nodePubkeys,
indexes,
verifier,
verifierParams,
idToken,
overrideExistingKey,
newImportedShares,
extraParams,
useDkg = true,
serverTimeOffset,
checkCommitment = true
} = params;
await get(allowHost, {
headers: {
verifier,
verifierid: verifierParams.verifier_id,
network,
clientid: clientId,
enablegating: "true"
}
}, {
useAPIKey: true
});
// generate temporary private and public key that is used to secure receive shares
const sessionAuthKey = generatePrivate();
const pubKey = getPublic(sessionAuthKey).toString("hex");
const sessionPubX = pubKey.slice(2, 66);
const sessionPubY = pubKey.slice(66);
let finalImportedShares = [];
const halfThreshold = ~~(endpoints.length / 2) + 1;
if ((newImportedShares === null || newImportedShares === void 0 ? void 0 : newImportedShares.length) > 0) {
if (newImportedShares.length !== endpoints.length) {
throw new Error("Invalid imported shares length");
}
finalImportedShares = newImportedShares;
} else if (!useDkg) {
const bufferKey = keyType === KEY_TYPE.SECP256K1 ? generatePrivateKey(ecCurve, Buffer) : await getRandomBytes(32);
const generatedShares = await generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey));
finalImportedShares = [...finalImportedShares, ...generatedShares];
}
let commitmentRequestResult = [];
let isExistingKey;
const nodeSigs = [];
if (checkCommitment) {
commitmentRequestResult = await commitmentRequest({
idToken,
endpoints,
indexes,
keyType,
verifier,
verifierParams,
pubKeyX: sessionPubX,
pubKeyY: sessionPubY,
finalImportedShares,
overrideExistingKey
});
for (let i = 0; i < commitmentRequestResult.length; i += 1) {
const x = commitmentRequestResult[i];
if (!x || typeof x !== "object" || x.error) {
continue;
}
if (x) nodeSigs.push(x.result);
}
// if user's account already
isExistingKey = !!thresholdSame(nodeSigs.map(x => x && x.pub_key_x), halfThreshold);
} else if (!checkCommitment && finalImportedShares.length > 0) {
// in case not allowed to override existing key for import request
// check if key exists
if (!overrideExistingKey) {
var _keyLookupResult$erro, _keyLookupResult$keyR;
const keyLookupResult = await VerifierLookupRequest({
endpoints,
verifier,
verifierId: verifierParams.verifier_id,
keyType
});
if (keyLookupResult.errorResult && !((_keyLookupResult$erro = keyLookupResult.errorResult) !== null && _keyLookupResult$erro !== void 0 && (_keyLookupResult$erro = _keyLookupResult$erro.data) !== null && _keyLookupResult$erro !== void 0 && _keyLookupResult$erro.includes("Verifier + VerifierID has not yet been assigned"))) {
throw new Error(`node results do not match at first lookup ${JSON.stringify(keyLookupResult.keyResult || {})}, ${JSON.stringify(keyLookupResult.errorResult || {})}`);
}
if (((_keyLookupResult$keyR = keyLookupResult.keyResult) === null || _keyLookupResult$keyR === void 0 || (_keyLookupResult$keyR = _keyLookupResult$keyR.keys) === null || _keyLookupResult$keyR === void 0 ? void 0 : _keyLookupResult$keyR.length) > 0) {
isExistingKey = !!keyLookupResult.keyResult.keys[0];
}
}
}
const promiseArrRequest = [];
const canImportedShares = overrideExistingKey || !useDkg && !isExistingKey;
if (canImportedShares) {
const proxyEndpointNum = getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id);
const items = [];
for (let i = 0; i < endpoints.length; i += 1) {
const importedShare = finalImportedShares[i];
if (!importedShare) {
throw new Error(`invalid imported share at index ${i}`);
}
items.push(_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
nodesignatures: nodeSigs,
verifieridentifier: verifier,
pub_key_x: importedShare.oauth_pub_key_x,
pub_key_y: importedShare.oauth_pub_key_y,
signing_pub_key_x: importedShare.signing_pub_key_x,
signing_pub_key_y: importedShare.signing_pub_key_y,
encrypted_share: importedShare.encrypted_share,
encrypted_share_metadata: importedShare.encrypted_share_metadata,
node_index: importedShare.node_index,
key_type: importedShare.key_type,
nonce_data: importedShare.nonce_data,
nonce_signature: importedShare.nonce_signature,
sss_endpoint: endpoints[i]
}, extraParams));
}
const p = post(endpoints[proxyEndpointNum], generateJsonRPCObject(JRPC_METHODS.IMPORT_SHARES, {
encrypted: "yes",
use_temp: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: items,
key_type: keyType,
one_key_flow: true
}), {}, {
logTracingHeader: config.logRequestTracing
}).catch(err => log.error("share req", err));
promiseArrRequest.push(p);
} else {
for (let i = 0; i < endpoints.length; i += 1) {
const p = post(endpoints[i], generateJsonRPCObject(JRPC_METHODS.GET_SHARE_OR_KEY_ASSIGN, {
encrypted: "yes",
use_temp: true,
key_type: keyType,
distributed_metadata: true,
verifieridentifier: verifier,
temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "",
// send session pub key x only if node signatures are not available (Ie. in non commitment flow)
temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "",
// send session pub key y only if node signatures are not available (Ie. in non commitment flow)
item: [_objectSpread(_objectSpread({}, verifierParams), {}, {
idtoken: idToken,
key_type: keyType,
nodesignatures: nodeSigs,
verifieridentifier: verifier
}, extraParams)],
client_time: Math.floor(Date.now() / 1000).toString(),
one_key_flow: true
}), {}, {
logTracingHeader: config.logRequestTracing
});
promiseArrRequest.push(p);
}
}
return processShareResponse({
legacyMetadataHost,
serverTimeOffset,
sessionAuthKey,
enableOneKey,
ecCurve,
keyType,
network,
verifier,
verifierParams,
endpoints,
isImportedShares: canImportedShares
}, promiseArrRequest);
}
export { GetPubKeyOrKeyAssign, VerifierLookupRequest, processShareResponse, retrieveOrImportShare };
export { GetPubKeyOrKeyAssign, VerifierLookupRequest, retrieveOrImportShare };

@@ -11,4 +11,3 @@ export { JRPC_METHODS, SAPPHIRE_DEVNET_METADATA_URL, SAPPHIRE_METADATA_URL } from './constants.js';

export { convertMetadataToNonce, decryptNodeData, decryptNodeDataWithPadding, decryptSeedData, generateMetadataParams, generateNonceMetadataParams, getMetadata, getNonce, getOrSetNonce, getOrSetSapphireMetadataNonce, getSecpKeyFromEd25519 } from './helpers/metadataUtils.js';
export { GetPubKeyOrKeyAssign, VerifierLookupRequest, processShareResponse, retrieveOrImportShare } from './helpers/nodeUtils.js';
export { _linkedPasskeyRetrieveShares, getAuthMessageFromNodes, linkPasskey, listLinkedPasskey, unlinkPasskey } from './helpers/passkeyConnectorUtils.js';
export { GetPubKeyOrKeyAssign, VerifierLookupRequest, retrieveOrImportShare } from './helpers/nodeUtils.js';
export { GetOrSetTssDKGPubKey } from './helpers/tssPubKeyUtils.js';

@@ -46,2 +46,3 @@ import _defineProperty from '@babel/runtime/helpers/defineProperty';

promises.forEach((x, index) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
x.then(resp => {

@@ -56,3 +57,5 @@ resultArr[index] = resp;

if (sharedState.resolved) return;
return predicate(resultArr.slice(0), sharedState).then(data => {
return predicate(resultArr.slice(0), sharedState)
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.then(data => {
sharedState.resolved = true;

@@ -59,0 +62,0 @@ resolve(data);

@@ -10,4 +10,4 @@ import _defineProperty from '@babel/runtime/helpers/defineProperty';

import { retrieveOrImportShare, GetPubKeyOrKeyAssign } from './helpers/nodeUtils.js';
import { _linkedPasskeyRetrieveShares } from './helpers/passkeyConnectorUtils.js';
import { generateShares, getEd25519ExtendedPublicKey, encodeEd25519Point, generateAddressFromPubKey } from './helpers/keyUtils.js';
import { getKeyCurve } from './helpers/common.js';
import { getOrSetNonce, getMetadata } from './helpers/metadataUtils.js';

@@ -136,45 +136,7 @@

}
async retrieveSharesWithLinkedPasskey(params) {
const {
passkeyPublicKey,
idToken,
nodePubkeys,
indexes,
endpoints,
extraParams = {},
passkeyVerifierID
} = params;
if (nodePubkeys.length === 0) {
throw new Error("nodePubkeys param is required");
}
if (nodePubkeys.length !== indexes.length) {
throw new Error("nodePubkeys length must be same as indexes length");
}
if (nodePubkeys.length !== endpoints.length) {
throw new Error("nodePubkeys length must be same as endpoints length");
}
if (LEGACY_NETWORKS_ROUTE_MAP[this.network]) {
throw new Error(`retrieveSharesWithLinkedPasskey is not supported by legacy network; ${this.network}`);
}
return _linkedPasskeyRetrieveShares({
serverTimeOffset: this.serverTimeOffset,
ecCurve: this.ec,
keyType: this.keyType,
allowHost: this.allowHost,
network: this.network,
clientId: this.clientId,
endpoints,
indexes,
nodePubkeys,
idToken,
passkeyPublicKey,
passkeyVerifierID,
extraParams,
sessionExpSecond: Torus.sessionTime
});
}
async getPublicAddress(endpoints, torusNodePubs, {
verifier,
verifierId,
extendedVerifierId
extendedVerifierId,
keyType
}) {

@@ -186,6 +148,8 @@ log.info(torusNodePubs, {

});
const localKeyType = keyType !== null && keyType !== void 0 ? keyType : this.keyType;
return this.getNewPublicAddress(endpoints, {
verifier,
verifierId,
extendedVerifierId
extendedVerifierId,
keyType: localKeyType
}, this.enableOneKey);

@@ -279,4 +243,10 @@ }

verifierId,
extendedVerifierId
extendedVerifierId,
keyType
}, enableOneKey) {
const localKeyType = keyType !== null && keyType !== void 0 ? keyType : this.keyType;
const localEc = getKeyCurve(localKeyType);
if (localKeyType === KEY_TYPE.ED25519 && LEGACY_NETWORKS_ROUTE_MAP[this.network]) {
throw new Error(`keyType: ${keyType} is not supported by ${this.network} network`);
}
const keyAssignResult = await GetPubKeyOrKeyAssign({

@@ -287,3 +257,3 @@ endpoints,

verifierId,
keyType: this.keyType,
keyType: localKeyType,
extendedVerifierId

@@ -328,3 +298,3 @@ });

// for tss key no need to add pub nonce
finalPubKey = this.ec.keyFromPublic({
finalPubKey = localEc.keyFromPublic({
x: X,

@@ -345,10 +315,10 @@ y: Y

const v2NonceResult = nonceResult;
oAuthPubKey = this.ec.keyFromPublic({
oAuthPubKey = localEc.keyFromPublic({
x: X,
y: Y
}).getPublic();
finalPubKey = this.ec.keyFromPublic({
finalPubKey = localEc.keyFromPublic({
x: X,
y: Y
}).getPublic().add(this.ec.keyFromPublic({
}).getPublic().add(localEc.keyFromPublic({
x: v2NonceResult.pubNonce.x,

@@ -367,3 +337,3 @@ y: v2NonceResult.pubNonce.y

const oAuthY = oAuthPubKey.getY().toString(16, 64);
const oAuthAddress = generateAddressFromPubKey(this.keyType, oAuthPubKey.getX(), oAuthPubKey.getY());
const oAuthAddress = generateAddressFromPubKey(localKeyType, oAuthPubKey.getX(), oAuthPubKey.getY());
if (!finalPubKey) {

@@ -374,3 +344,3 @@ throw new Error("Unable to derive finalPubKey");

const finalY = finalPubKey ? finalPubKey.getY().toString(16, 64) : "";
const finalAddress = finalPubKey ? generateAddressFromPubKey(this.keyType, finalPubKey.getX(), finalPubKey.getY()) : "";
const finalAddress = finalPubKey ? generateAddressFromPubKey(localKeyType, finalPubKey.getX(), finalPubKey.getY()) : "";
return {

@@ -405,4 +375,7 @@ oAuthKeyData: {

isNewKey,
serverTimeOffset
serverTimeOffset,
keyType
} = params;
const localKeyType = keyType !== null && keyType !== void 0 ? keyType : this.keyType;
const localEc = getKeyCurve(localKeyType);
const {

@@ -417,3 +390,3 @@ pub_key_X: X,

let pubNonce;
const oAuthPubKey = this.ec.keyFromPublic({
const oAuthPubKey = localEc.keyFromPublic({
x: X,

@@ -425,3 +398,3 @@ y: Y

try {
nonceResult = await getOrSetNonce(this.legacyMetadataHost, this.ec, finalServerTimeOffset, X, Y, undefined, !isNewKey);
nonceResult = await getOrSetNonce(this.legacyMetadataHost, localEc, finalServerTimeOffset, X, Y, undefined, !isNewKey);
nonce = new BN(nonceResult.nonce || "0", 16);

@@ -437,11 +410,11 @@ typeOfUser = nonceResult.typeOfUser;

});
finalPubKey = this.ec.keyFromPublic({
finalPubKey = localEc.keyFromPublic({
x: X,
y: Y
}).getPublic().add(this.ec.keyFromPrivate(nonce.toString(16, 64), "hex").getPublic());
}).getPublic().add(localEc.keyFromPrivate(nonce.toString(16, 64), "hex").getPublic());
} else if (nonceResult.typeOfUser === "v2") {
finalPubKey = this.ec.keyFromPublic({
finalPubKey = localEc.keyFromPublic({
x: X,
y: Y
}).getPublic().add(this.ec.keyFromPublic({
}).getPublic().add(localEc.keyFromPublic({
x: nonceResult.pubNonce.x,

@@ -463,6 +436,6 @@ y: nonceResult.pubNonce.y

});
finalPubKey = this.ec.keyFromPublic({
finalPubKey = localEc.keyFromPublic({
x: X,
y: Y
}).getPublic().add(this.ec.keyFromPrivate(nonce.toString(16, 64), "hex").getPublic());
}).getPublic().add(localEc.keyFromPrivate(nonce.toString(16, 64), "hex").getPublic());
}

@@ -474,3 +447,3 @@ if (!oAuthPubKey) {

const oAuthY = oAuthPubKey.getY().toString(16, 64);
const oAuthAddress = generateAddressFromPubKey(this.keyType, oAuthPubKey.getX(), oAuthPubKey.getY());
const oAuthAddress = generateAddressFromPubKey(localKeyType, oAuthPubKey.getX(), oAuthPubKey.getY());
if (typeOfUser === "v2" && !finalPubKey) {

@@ -481,3 +454,3 @@ throw new Error("Unable to derive finalPubKey");

const finalY = finalPubKey ? finalPubKey.getY().toString(16, 64) : "";
const finalAddress = finalPubKey ? generateAddressFromPubKey(this.keyType, finalPubKey.getX(), finalPubKey.getY()) : "";
const finalAddress = finalPubKey ? generateAddressFromPubKey(localKeyType, finalPubKey.getX(), finalPubKey.getY()) : "";
return {

@@ -484,0 +457,0 @@ oAuthKeyData: {

@@ -9,3 +9,1 @@ /*!

/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh <https://feross.org/opensource> */
/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */
{
"name": "@toruslabs/torus.js",
"version": "15.2.0-alpha.0",
"version": "16.0.0",
"description": "Handle communication with torus nodes",

@@ -9,3 +9,3 @@ "main": "dist/lib.cjs/index.js",

"jsdelivr": "dist/torusUtils.umd.min.js",
"types": "dist/types/index.d.ts",
"types": "dist/lib.cjs/types/index.d.ts",
"sideEffects": false,

@@ -18,4 +18,4 @@ "scripts": {

"lint": "eslint --fix 'src/**/*.ts'",
"test": "cross-env NODE_ENV=test mocha",
"prepare": "husky"
"prepare": "husky",
"test": "vitest run --config test/configs/node.config.mts --coverage"
},

@@ -29,39 +29,40 @@ "files": [

"dependencies": {
"@toruslabs/constants": "^14.0.0",
"@toruslabs/bs58": "^1.0.0",
"@toruslabs/eccrypto": "^5.0.4",
"@toruslabs/http-helpers": "^7.0.0",
"@toruslabs/constants": "^15.0.0",
"@toruslabs/eccrypto": "^6.0.2",
"@toruslabs/http-helpers": "^8.1.1",
"bn.js": "^5.2.1",
"elliptic": "^6.5.7",
"ethereum-cryptography": "^2.2.1",
"json-stable-stringify": "^1.1.1",
"elliptic": "^6.6.1",
"ethereum-cryptography": "^3.1.0",
"json-stable-stringify": "^1.2.1",
"loglevel": "^1.9.2"
},
"devDependencies": {
"@babel/register": "^7.25.7",
"@babel/runtime": "^7.25.7",
"@toruslabs/config": "^2.2.0",
"@toruslabs/eslint-config-typescript": "^3.3.4",
"@toruslabs/fetch-node-details": "^14.0.1",
"@toruslabs/torus-scripts": "^6.1.5",
"@types/chai": "^5.0.0",
"@babel/register": "^7.25.9",
"@babel/runtime": "^7.26.9",
"@faker-js/faker": "^9.6.0",
"@toruslabs/config": "^3.1.0",
"@toruslabs/eslint-config-typescript": "^4.1.0",
"@toruslabs/fetch-node-details": "^15.0.0",
"@toruslabs/torus-scripts": "^7.1.2",
"@types/elliptic": "^6.4.18",
"@types/json-stable-stringify": "^1.0.36",
"@types/jsonwebtoken": "^9.0.7",
"@types/mocha": "^10.0.9",
"@types/sinon": "^17.0.3",
"chai": "^5.1.1",
"@types/json-stable-stringify": "^1.2.0",
"@types/jsonwebtoken": "^9.0.9",
"@types/sinon": "^17.0.4",
"@vitest/browser": "^3.0.8",
"@vitest/coverage-istanbul": "^3.0.8",
"cross-env": "^7.0.3",
"dotenv": "^16.4.5",
"eslint": "^8.57.0",
"@faker-js/faker": "^9.0.3",
"husky": "^9.1.6",
"dotenv": "^16.4.7",
"eslint": "^9.22.0",
"husky": "^9.1.7",
"jsdom": "^26.0.0",
"jsonwebtoken": "^9.0.2",
"lint-staged": "^15.2.10",
"mocha": "^10.7.3",
"prettier": "^3.3.3",
"lint-staged": "^15.4.3",
"playwright": "^1.51.0",
"prettier": "^3.5.3",
"rimraf": "^6.0.1",
"sinon": "^19.0.2",
"tsx": "^4.19.1",
"typescript": "^5.6.3"
"tsx": "^4.19.3",
"typescript": "^5.8.2",
"vitest": "^3.0.8"
},

@@ -98,5 +99,5 @@ "repository": {

"engines": {
"node": ">=18.x",
"node": ">=20.x",
"npm": ">=9.x"
}
}

@@ -33,4 +33,4 @@ # Torus.js

- `esm` build `dist/torusUtils.esm.js` in es6 format
- `commonjs` build `dist/torusUtils.cjs.js` in es5 format
- `lib.esm` build `dist/lib.esm/index.js` in es6 format
- `lib.cjs` build `dist/lib.cjs/index.js` in es5 format
- `umd` build `dist/torusUtils.umd.min.js` in es5 format without polyfilling corejs minified

@@ -110,2 +110,2 @@

- This package requires a peer dependency of `@babel/runtime`
- Node 16+
- Node 20+

Sorry, the diff of this file is too big to display