Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@alephium/ledger-app

Package Overview
Dependencies
Maintainers
0
Versions
17
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@alephium/ledger-app - npm Package Compare versions

Comparing version 0.4.0 to 0.5.0

3

dist/src/index.d.ts
/// <reference types="node" />
import { Account, KeyType } from '@alephium/web3';
import Transport from '@ledgerhq/hw-transport';
import * as serde from './serde';
export declare const CLA = 128;

@@ -20,3 +21,3 @@ export declare enum INS {

signHash(path: string, hash: Buffer): Promise<string>;
signUnsignedTx(path: string, unsignedTx: Buffer): Promise<string>;
signUnsignedTx(path: string, unsignedTx: Buffer, tokenMetadata?: serde.TokenMetadata[]): Promise<string>;
}

@@ -42,2 +42,4 @@ "use strict";

exports.HASH_LEN = 32;
// The maximum payload size is 255: https://github.com/LedgerHQ/ledger-live/blob/develop/libs/ledgerjs/packages/hw-transport/src/Transport.ts#L261
const MAX_PAYLOAD_SIZE = 255;
class AlephiumApp {

@@ -82,8 +84,9 @@ constructor(transport) {

}
async signUnsignedTx(path, unsignedTx) {
async signUnsignedTx(path, unsignedTx, tokenMetadata = []) {
console.log(`unsigned tx size: ${unsignedTx.length}`);
const encodedPath = serde.serializePath(path);
const firstFrameTxLength = 256 - 25;
const encodedTokenMetadata = serde.serializeTokenMetadata(tokenMetadata);
const firstFrameTxLength = MAX_PAYLOAD_SIZE - 20 - encodedTokenMetadata.length;
const txData = unsignedTx.slice(0, unsignedTx.length > firstFrameTxLength ? firstFrameTxLength : unsignedTx.length);
const data = Buffer.concat([encodedPath, txData]);
const data = Buffer.concat([encodedPath, encodedTokenMetadata, txData]);
let response = await this.transport.send(exports.CLA, INS.SIGN_TX, 0x00, 0x00, data, [hw_transport_1.StatusCodes.OK]);

@@ -93,3 +96,3 @@ if (unsignedTx.length <= firstFrameTxLength) {

}
const frameLength = 256 - 5;
const frameLength = MAX_PAYLOAD_SIZE;
let fromIndex = firstFrameTxLength;

@@ -96,0 +99,0 @@ while (fromIndex < unsignedTx.length) {

@@ -6,1 +6,11 @@ /// <reference types="node" />

export declare function serializePath(path: string): Buffer;
export declare const MAX_TOKEN_SIZE = 5;
export declare const MAX_TOKEN_SYMBOL_LENGTH = 12;
export declare const TOKEN_METADATA_SIZE = 46;
export interface TokenMetadata {
version: number;
tokenId: string;
symbol: string;
decimals: number;
}
export declare function serializeTokenMetadata(tokens: TokenMetadata[]): Buffer;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.serializePath = exports.splitPath = exports.FALSE = exports.TRUE = void 0;
exports.serializeTokenMetadata = exports.TOKEN_METADATA_SIZE = exports.MAX_TOKEN_SYMBOL_LENGTH = exports.MAX_TOKEN_SIZE = exports.serializePath = exports.splitPath = exports.FALSE = exports.TRUE = void 0;
const web3_1 = require("@alephium/web3");
exports.TRUE = 0x10;

@@ -33,1 +34,47 @@ exports.FALSE = 0x00;

exports.serializePath = serializePath;
exports.MAX_TOKEN_SIZE = 5;
exports.MAX_TOKEN_SYMBOL_LENGTH = 12;
exports.TOKEN_METADATA_SIZE = 46;
function symbolToBytes(symbol) {
const buffer = Buffer.alloc(exports.MAX_TOKEN_SYMBOL_LENGTH, 0);
for (let i = 0; i < symbol.length; i++) {
buffer[i] = symbol.charCodeAt(i) & 0xFF;
}
return buffer;
}
function check(tokens) {
const hasDuplicate = tokens.some((token, index) => index !== tokens.findIndex((t) => t.tokenId === token.tokenId));
if (hasDuplicate) {
throw new Error(`There are duplicate tokens`);
}
tokens.forEach((token) => {
if (!((0, web3_1.isHexString)(token.tokenId) && token.tokenId.length === 64)) {
throw new Error(`Invalid token id: ${token.tokenId}`);
}
if (token.symbol.length > exports.MAX_TOKEN_SYMBOL_LENGTH) {
throw new Error(`The token symbol is too long: ${token.symbol}`);
}
});
if (tokens.length > exports.MAX_TOKEN_SIZE) {
throw new Error(`The token size exceeds maximum size`);
}
}
function serializeTokenMetadata(tokens) {
check(tokens);
const array = tokens
.map((metadata) => {
const symbolBytes = symbolToBytes(metadata.symbol);
const buffer = Buffer.concat([
Buffer.from([metadata.version]),
Buffer.from(metadata.tokenId, 'hex'),
symbolBytes,
Buffer.from([metadata.decimals])
]);
if (buffer.length !== exports.TOKEN_METADATA_SIZE) {
throw new Error(`Invalid token metadata: ${metadata}`);
}
return buffer;
});
return Buffer.concat([Buffer.from([array.length]), ...array]);
}
exports.serializeTokenMetadata = serializeTokenMetadata;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const web3_1 = require("@alephium/web3");
const serde_1 = require("./serde");
const crypto_1 = require("crypto");
describe('serde', () => {

@@ -13,2 +15,66 @@ it('should split path', () => {

});
it('should encode token metadata', () => {
const token0 = {
version: 0,
tokenId: (0, web3_1.binToHex)((0, crypto_1.randomBytes)(32)),
symbol: 'Token0',
decimals: 8
};
const token1 = {
version: 1,
tokenId: (0, web3_1.binToHex)((0, crypto_1.randomBytes)(32)),
symbol: 'Token1',
decimals: 18
};
const token2 = {
version: 2,
tokenId: (0, web3_1.binToHex)((0, crypto_1.randomBytes)(32)),
symbol: 'Token2',
decimals: 6
};
const token3 = {
version: 3,
tokenId: (0, web3_1.binToHex)((0, crypto_1.randomBytes)(32)),
symbol: 'Token3',
decimals: 0
};
const token4 = {
version: 4,
tokenId: (0, web3_1.binToHex)((0, crypto_1.randomBytes)(32)),
symbol: 'Token4',
decimals: 12
};
const encodeSymbol = (symbol) => {
return (0, web3_1.binToHex)(Buffer.from(symbol, 'ascii')).padEnd(serde_1.MAX_TOKEN_SYMBOL_LENGTH * 2, '0');
};
expect((0, web3_1.binToHex)((0, serde_1.serializeTokenMetadata)([]))).toEqual('00');
expect((0, web3_1.binToHex)((0, serde_1.serializeTokenMetadata)([token0]))).toEqual('01' + '00' + token0.tokenId + encodeSymbol(token0.symbol) + '08');
expect((0, web3_1.binToHex)((0, serde_1.serializeTokenMetadata)([token1]))).toEqual('01' + '01' + token1.tokenId + encodeSymbol(token1.symbol) + '12');
expect((0, web3_1.binToHex)((0, serde_1.serializeTokenMetadata)([token0, token1]))).toEqual('02' + '00' + token0.tokenId + encodeSymbol(token0.symbol) + '08' +
'01' + token1.tokenId + encodeSymbol(token1.symbol) + '12');
expect((0, web3_1.binToHex)((0, serde_1.serializeTokenMetadata)([token0, token1, token2, token3, token4]))).toEqual('05' + '00' + token0.tokenId + encodeSymbol(token0.symbol) + '08' +
'01' + token1.tokenId + encodeSymbol(token1.symbol) + '12' +
'02' + token2.tokenId + encodeSymbol(token2.symbol) + '06' +
'03' + token3.tokenId + encodeSymbol(token3.symbol) + '00' +
'04' + token4.tokenId + encodeSymbol(token4.symbol) + '0c');
expect(() => (0, serde_1.serializeTokenMetadata)([token0, token1, token0])).toThrow('There are duplicate tokens');
const token5 = {
version: 5,
tokenId: (0, web3_1.binToHex)((0, crypto_1.randomBytes)(32)),
symbol: 'Token5',
decimals: 18
};
expect(() => (0, serde_1.serializeTokenMetadata)([token0, token1, token2, token3, token4, token5])).toThrow('The token size exceeds maximum size');
const invalidToken = {
...token0,
tokenId: (0, web3_1.binToHex)((0, crypto_1.randomBytes)(33))
};
expect(() => (0, serde_1.serializeTokenMetadata)([token0, invalidToken])).toThrow('Invalid token id');
const longSymbolToken = {
...token0,
tokenId: (0, web3_1.binToHex)((0, crypto_1.randomBytes)(32)),
symbol: 'LongSymbolToken'
};
expect(() => (0, serde_1.serializeTokenMetadata)([token0, longSymbolToken, token1])).toThrow('The token symbol is too long');
});
});

@@ -6,3 +6,4 @@ import Transport from '@ledgerhq/hw-transport';

Token = 2,
MultisigAndToken = 3
BaseAndToken = 3,
MultisigAndToken = 4
}

@@ -9,0 +10,0 @@ export declare function staxFlexApproveOnce(): Promise<void>;

@@ -24,2 +24,6 @@ "use strict";

}
function getModel() {
const model = process.env.MODEL;
return model ? model : 'nanos';
}
var OutputType;

@@ -30,3 +34,4 @@ (function (OutputType) {

OutputType[OutputType["Token"] = 2] = "Token";
OutputType[OutputType["MultisigAndToken"] = 3] = "MultisigAndToken";
OutputType[OutputType["BaseAndToken"] = 3] = "BaseAndToken";
OutputType[OutputType["MultisigAndToken"] = 4] = "MultisigAndToken";
})(OutputType = exports.OutputType || (exports.OutputType = {}));

@@ -37,2 +42,3 @@ const NanosClickTable = new Map([

[OutputType.Token, 11],
[OutputType.BaseAndToken, 12],
[OutputType.MultisigAndToken, 16],

@@ -44,2 +50,3 @@ ]);

[OutputType.Token, 6],
[OutputType.BaseAndToken, 6],
[OutputType.MultisigAndToken, 8],

@@ -51,6 +58,7 @@ ]);

[OutputType.Token, 3],
[OutputType.BaseAndToken, 3],
[OutputType.MultisigAndToken, 4],
]);
function getOutputClickSize(outputType) {
const model = process.env.MODEL;
const model = getModel();
switch (model) {

@@ -93,11 +101,12 @@ case 'nanos': return NanosClickTable.get(outputType);

async function _touch(times) {
let continuePos = process.env.MODEL === 'stax' ? STAX_CONTINUE_POSITION : FLEX_CONTINUE_POSITION;
const model = getModel();
const continuePos = model === 'stax' ? STAX_CONTINUE_POSITION : FLEX_CONTINUE_POSITION;
for (let i = 0; i < times; i += 1) {
await touchPosition(continuePos);
}
let approvePos = process.env.MODEL === 'stax' ? STAX_APPROVE_POSITION : FLEX_APPROVE_POSITION;
const approvePos = model === 'stax' ? STAX_APPROVE_POSITION : FLEX_APPROVE_POSITION;
await touchPosition(approvePos);
}
async function staxFlexApproveOnce() {
if (process.env.MODEL === 'stax') {
if (getModel() === 'stax') {
await touchPosition(STAX_APPROVE_POSITION);

@@ -147,3 +156,3 @@ }

}
if (process.env.MODEL === 'nanos') {
if (getModel() === 'nanos') {
await clickAndApprove(5);

@@ -162,3 +171,3 @@ }

}
if (process.env.MODEL === 'nanos') {
if (getModel() === 'nanos') {
await clickAndApprove(4);

@@ -172,3 +181,3 @@ }

function isStaxOrFlex() {
return !process.env.MODEL.startsWith('nano');
return !getModel().startsWith('nano');
}

@@ -179,3 +188,3 @@ function skipBlindSigningWarning() {

if (isStaxOrFlex()) {
const rejectPos = process.env.MODEL === 'stax' ? STAX_REJECT_POSITION : FLEX_REJECT_POSITION;
const rejectPos = getModel() === 'stax' ? STAX_REJECT_POSITION : FLEX_REJECT_POSITION;
touchPosition(rejectPos);

@@ -192,4 +201,5 @@ }

if (isStaxOrFlex()) {
const settingsPos = process.env.MODEL === 'stax' ? STAX_SETTINGS_POSITION : FLEX_SETTINGS_POSITION;
const blindSettingPos = process.env.MODEL === 'stax' ? STAX_BLIND_SETTING_POSITION : FLEX_BLIND_SETTING_POSITION;
const model = getModel();
const settingsPos = model === 'stax' ? STAX_SETTINGS_POSITION : FLEX_SETTINGS_POSITION;
const blindSettingPos = model === 'stax' ? STAX_BLIND_SETTING_POSITION : FLEX_BLIND_SETTING_POSITION;
await touchPosition(settingsPos);

@@ -196,0 +206,0 @@ await touchPosition(blindSettingPos);

@@ -35,2 +35,3 @@ "use strict";

const utils_1 = require("./utils");
const crypto_1 = require("crypto");
describe('ledger wallet', () => {

@@ -112,3 +113,3 @@ const nodeProvider = new web3_1.NodeProvider("http://127.0.0.1:22973");

}, 10000);
it('shoudl transfer alph to one address', async () => {
it('should transfer alph to one address', async () => {
const transport = await (0, utils_1.createTransport)();

@@ -235,2 +236,75 @@ const app = new src_1.default(transport);

}, 120000);
async function genTokensAndDestinations(fromAddress, toAddress, mintAmount, transferAmount) {
const tokens = [];
const tokenSymbol = 'TestTokenABC';
const destinations = [];
for (let i = 0; i < 5; i += 1) {
const tokenInfo = await (0, web3_test_1.mintToken)(fromAddress, mintAmount);
const tokenMetadata = {
version: 0,
tokenId: tokenInfo.contractId,
symbol: tokenSymbol.slice(0, tokenSymbol.length - i),
decimals: 18 - i
};
tokens.push(tokenMetadata);
destinations.push({
address: toAddress,
attoAlphAmount: web3_1.DUST_AMOUNT.toString(),
tokens: [
{
id: tokenMetadata.tokenId,
amount: transferAmount.toString()
}
]
});
}
return { tokens, destinations };
}
it('should transfer token with metadata', async () => {
const transport = await (0, utils_1.createTransport)();
const app = new src_1.default(transport);
const [testAccount] = await app.getAccount(path);
await transferToAddress(testAccount.address);
const toAddress = '1BmVCLrjttchZMW7i6df7mTdCKzHpy38bgDbVL1GqV6P7';
const transferAmount = 1234567890123456789012345n;
const mintAmount = 2222222222222222222222222n;
const { tokens, destinations } = await genTokensAndDestinations(testAccount.address, toAddress, mintAmount, transferAmount);
const randomOrderTokens = tokens.sort((a, b) => b.tokenId.localeCompare(a.tokenId));
const buildTxResult = await nodeProvider.transactions.postTransactionsBuild({
fromPublicKey: testAccount.publicKey,
destinations: destinations
});
(0, utils_1.approveTx)(Array(5).fill(utils_1.OutputType.BaseAndToken));
const signature = await app.signUnsignedTx(path, Buffer.from(buildTxResult.unsignedTx, 'hex'), randomOrderTokens);
expect((0, web3_1.transactionVerifySignature)(buildTxResult.txId, testAccount.publicKey, signature)).toBe(true);
const submitResult = await nodeProvider.transactions.postTransactionsSubmit({
unsignedTx: buildTxResult.unsignedTx,
signature: signature
});
await (0, web3_1.waitForTxConfirmation)(submitResult.txId, 1, 1000);
const balances = await nodeProvider.addresses.getAddressesAddressBalance(toAddress);
tokens.forEach((metadata) => {
const tokenBalance = balances.tokenBalances.find((t) => t.id === metadata.tokenId);
expect(BigInt(tokenBalance.amount)).toEqual(transferAmount);
});
await app.close();
}, 120000);
it('should reject tx if the metadata version is invalid', async () => {
const transport = await (0, utils_1.createTransport)();
const app = new src_1.default(transport);
const [testAccount] = await app.getAccount(path);
await transferToAddress(testAccount.address);
const toAddress = '1BmVCLrjttchZMW7i6df7mTdCKzHpy38bgDbVL1GqV6P7';
const transferAmount = 1234567890123456789012345n;
const mintAmount = 2222222222222222222222222n;
const { tokens, destinations } = await genTokensAndDestinations(testAccount.address, toAddress, mintAmount, transferAmount);
const invalidTokenIndex = (0, crypto_1.randomInt)(5);
tokens[invalidTokenIndex] = { ...tokens[invalidTokenIndex], version: 1 };
const buildTxResult = await nodeProvider.transactions.postTransactionsBuild({
fromPublicKey: testAccount.publicKey,
destinations: destinations
});
await expect(app.signUnsignedTx(path, Buffer.from(buildTxResult.unsignedTx, 'hex'), tokens)).rejects.toThrow();
await app.close();
}, 120000);
it('should transfer from multiple inputs', async () => {

@@ -237,0 +311,0 @@ const transport = await (0, utils_1.createTransport)();

{
"name": "@alephium/ledger-app",
"version": "0.4.0",
"version": "0.5.0",
"license": "GPL",

@@ -5,0 +5,0 @@ "types": "dist/src/index.d.ts",

@@ -19,2 +19,5 @@ import { Account, KeyType, addressFromPublicKey, encodeHexSignature, groupOfAddress } from '@alephium/web3'

// The maximum payload size is 255: https://github.com/LedgerHQ/ledger-live/blob/develop/libs/ledgerjs/packages/hw-transport/src/Transport.ts#L261
const MAX_PAYLOAD_SIZE = 255
export default class AlephiumApp {

@@ -71,8 +74,13 @@ readonly transport: Transport

async signUnsignedTx(path: string, unsignedTx: Buffer): Promise<string> {
async signUnsignedTx(
path: string,
unsignedTx: Buffer,
tokenMetadata: serde.TokenMetadata[] = []
): Promise<string> {
console.log(`unsigned tx size: ${unsignedTx.length}`)
const encodedPath = serde.serializePath(path)
const firstFrameTxLength = 256 - 25;
const encodedTokenMetadata = serde.serializeTokenMetadata(tokenMetadata)
const firstFrameTxLength = MAX_PAYLOAD_SIZE - 20 - encodedTokenMetadata.length;
const txData = unsignedTx.slice(0, unsignedTx.length > firstFrameTxLength ? firstFrameTxLength : unsignedTx.length)
const data = Buffer.concat([encodedPath, txData])
const data = Buffer.concat([encodedPath, encodedTokenMetadata, txData])
let response = await this.transport.send(CLA, INS.SIGN_TX, 0x00, 0x00, data, [StatusCodes.OK])

@@ -83,3 +91,3 @@ if (unsignedTx.length <= firstFrameTxLength) {

const frameLength = 256 - 5
const frameLength = MAX_PAYLOAD_SIZE
let fromIndex = firstFrameTxLength

@@ -86,0 +94,0 @@ while (fromIndex < unsignedTx.length) {

@@ -0,1 +1,3 @@

import { isHexString } from "@alephium/web3"
export const TRUE = 0x10

@@ -31,1 +33,58 @@ export const FALSE = 0x00

}
export const MAX_TOKEN_SIZE = 5
export const MAX_TOKEN_SYMBOL_LENGTH = 12
export const TOKEN_METADATA_SIZE = 46
export interface TokenMetadata {
version: number,
tokenId: string,
symbol: string,
decimals: number
}
function symbolToBytes(symbol: string): Buffer {
const buffer = Buffer.alloc(MAX_TOKEN_SYMBOL_LENGTH, 0)
for (let i = 0; i < symbol.length; i++) {
buffer[i] = symbol.charCodeAt(i) & 0xFF
}
return buffer
}
function check(tokens: TokenMetadata[]) {
const hasDuplicate = tokens.some((token, index) => index !== tokens.findIndex((t) => t.tokenId === token.tokenId))
if (hasDuplicate) {
throw new Error(`There are duplicate tokens`)
}
tokens.forEach((token) => {
if (!(isHexString(token.tokenId) && token.tokenId.length === 64)) {
throw new Error(`Invalid token id: ${token.tokenId}`)
}
if (token.symbol.length > MAX_TOKEN_SYMBOL_LENGTH) {
throw new Error(`The token symbol is too long: ${token.symbol}`)
}
})
if (tokens.length > MAX_TOKEN_SIZE) {
throw new Error(`The token size exceeds maximum size`)
}
}
export function serializeTokenMetadata(tokens: TokenMetadata[]): Buffer {
check(tokens)
const array = tokens
.map((metadata) => {
const symbolBytes = symbolToBytes(metadata.symbol)
const buffer = Buffer.concat([
Buffer.from([metadata.version]),
Buffer.from(metadata.tokenId, 'hex'),
symbolBytes,
Buffer.from([metadata.decimals])
])
if (buffer.length !== TOKEN_METADATA_SIZE) {
throw new Error(`Invalid token metadata: ${metadata}`)
}
return buffer
})
return Buffer.concat([Buffer.from([array.length]), ...array])
}

@@ -22,2 +22,7 @@ import SpeculosTransport from '@ledgerhq/hw-transport-node-speculos'

function getModel(): string {
const model = process.env.MODEL
return model ? model as string : 'nanos'
}
export enum OutputType {

@@ -27,2 +32,3 @@ Base,

Token,
BaseAndToken,
MultisigAndToken

@@ -35,2 +41,3 @@ }

[OutputType.Token, 11],
[OutputType.BaseAndToken, 12],
[OutputType.MultisigAndToken, 16],

@@ -43,2 +50,3 @@ ])

[OutputType.Token, 6],
[OutputType.BaseAndToken, 6],
[OutputType.MultisigAndToken, 8],

@@ -51,2 +59,3 @@ ])

[OutputType.Token, 3],
[OutputType.BaseAndToken, 3],
[OutputType.MultisigAndToken, 4],

@@ -56,3 +65,3 @@ ])

function getOutputClickSize(outputType: OutputType) {
const model = process.env.MODEL
const model = getModel()
switch (model) {

@@ -107,7 +116,8 @@ case 'nanos': return NanosClickTable.get(outputType)!

async function _touch(times: number) {
let continuePos = process.env.MODEL === 'stax' ? STAX_CONTINUE_POSITION : FLEX_CONTINUE_POSITION
const model = getModel()
const continuePos = model === 'stax' ? STAX_CONTINUE_POSITION : FLEX_CONTINUE_POSITION
for (let i = 0; i < times; i += 1) {
await touchPosition(continuePos)
}
let approvePos = process.env.MODEL === 'stax' ? STAX_APPROVE_POSITION : FLEX_APPROVE_POSITION
const approvePos = model === 'stax' ? STAX_APPROVE_POSITION : FLEX_APPROVE_POSITION
await touchPosition(approvePos)

@@ -117,3 +127,3 @@ }

export async function staxFlexApproveOnce() {
if (process.env.MODEL === 'stax') {
if (getModel() === 'stax') {
await touchPosition(STAX_APPROVE_POSITION)

@@ -162,3 +172,3 @@ } else {

}
if (process.env.MODEL === 'nanos') {
if (getModel() === 'nanos') {
await clickAndApprove(5)

@@ -175,3 +185,3 @@ } else {

}
if (process.env.MODEL === 'nanos') {
if (getModel() === 'nanos') {
await clickAndApprove(4)

@@ -184,3 +194,3 @@ } else {

function isStaxOrFlex(): boolean {
return !process.env.MODEL!.startsWith('nano')
return !getModel().startsWith('nano')
}

@@ -191,3 +201,3 @@

if (isStaxOrFlex()) {
const rejectPos = process.env.MODEL === 'stax' ? STAX_REJECT_POSITION : FLEX_REJECT_POSITION
const rejectPos = getModel() === 'stax' ? STAX_REJECT_POSITION : FLEX_REJECT_POSITION
touchPosition(rejectPos)

@@ -202,4 +212,5 @@ } else {

if (isStaxOrFlex()) {
const settingsPos = process.env.MODEL === 'stax' ? STAX_SETTINGS_POSITION : FLEX_SETTINGS_POSITION
const blindSettingPos = process.env.MODEL === 'stax' ? STAX_BLIND_SETTING_POSITION : FLEX_BLIND_SETTING_POSITION
const model = getModel()
const settingsPos = model === 'stax' ? STAX_SETTINGS_POSITION : FLEX_SETTINGS_POSITION
const blindSettingPos = model === 'stax' ? STAX_BLIND_SETTING_POSITION : FLEX_BLIND_SETTING_POSITION
await touchPosition(settingsPos)

@@ -206,0 +217,0 @@ await touchPosition(blindSettingPos)

@@ -1,4 +0,3 @@

import SpeculosTransport from '@ledgerhq/hw-transport-node-speculos'
import AlephiumApp, { GROUP_NUM } from '../src'
import { ALPH_TOKEN_ID, Address, NodeProvider, ONE_ALPH, binToHex, codec, groupOfAddress, node, sleep, transactionVerifySignature, waitForTxConfirmation, web3 } from '@alephium/web3'
import { ALPH_TOKEN_ID, Address, DUST_AMOUNT, NodeProvider, ONE_ALPH, binToHex, codec, groupOfAddress, node, sleep, transactionVerifySignature, waitForTxConfirmation, web3 } from '@alephium/web3'
import { getSigner, mintToken, transfer } from '@alephium/web3-test'

@@ -8,2 +7,4 @@ import { PrivateKeyWallet } from '@alephium/web3-wallet'

import { approveAddress, approveHash, approveTx, createTransport, enableBlindSigning, getRandomInt, needToAutoApprove, OutputType, skipBlindSigningWarning, staxFlexApproveOnce } from './utils'
import { TokenMetadata } from '../src/serde'
import { randomInt } from 'crypto'

@@ -99,3 +100,3 @@ describe('ledger wallet', () => {

it('shoudl transfer alph to one address', async () => {
it('should transfer alph to one address', async () => {
const transport = await createTransport()

@@ -244,2 +245,92 @@ const app = new AlephiumApp(transport)

async function genTokensAndDestinations(
fromAddress: string,
toAddress: string,
mintAmount: bigint,
transferAmount: bigint
) {
const tokens: TokenMetadata[] = []
const tokenSymbol = 'TestTokenABC'
const destinations: node.Destination[] = []
for (let i = 0; i < 5; i += 1) {
const tokenInfo = await mintToken(fromAddress, mintAmount);
const tokenMetadata: TokenMetadata = {
version: 0,
tokenId: tokenInfo.contractId,
symbol: tokenSymbol.slice(0, tokenSymbol.length - i),
decimals: 18 - i
}
tokens.push(tokenMetadata)
destinations.push({
address: toAddress,
attoAlphAmount: DUST_AMOUNT.toString(),
tokens: [
{
id: tokenMetadata.tokenId,
amount: transferAmount.toString()
}
]
})
}
return { tokens, destinations }
}
it('should transfer token with metadata', async () => {
const transport = await createTransport()
const app = new AlephiumApp(transport)
const [testAccount] = await app.getAccount(path)
await transferToAddress(testAccount.address)
const toAddress = '1BmVCLrjttchZMW7i6df7mTdCKzHpy38bgDbVL1GqV6P7';
const transferAmount = 1234567890123456789012345n
const mintAmount = 2222222222222222222222222n
const { tokens, destinations } = await genTokensAndDestinations(testAccount.address, toAddress, mintAmount, transferAmount)
const randomOrderTokens = tokens.sort((a, b) => b.tokenId.localeCompare(a.tokenId))
const buildTxResult = await nodeProvider.transactions.postTransactionsBuild({
fromPublicKey: testAccount.publicKey,
destinations: destinations
})
approveTx(Array(5).fill(OutputType.BaseAndToken))
const signature = await app.signUnsignedTx(path, Buffer.from(buildTxResult.unsignedTx, 'hex'), randomOrderTokens)
expect(transactionVerifySignature(buildTxResult.txId, testAccount.publicKey, signature)).toBe(true)
const submitResult = await nodeProvider.transactions.postTransactionsSubmit({
unsignedTx: buildTxResult.unsignedTx,
signature: signature
})
await waitForTxConfirmation(submitResult.txId, 1, 1000)
const balances = await nodeProvider.addresses.getAddressesAddressBalance(toAddress)
tokens.forEach((metadata) => {
const tokenBalance = balances.tokenBalances!.find((t) => t.id === metadata.tokenId)!
expect(BigInt(tokenBalance.amount)).toEqual(transferAmount)
})
await app.close()
}, 120000)
it('should reject tx if the metadata version is invalid', async () => {
const transport = await createTransport()
const app = new AlephiumApp(transport)
const [testAccount] = await app.getAccount(path)
await transferToAddress(testAccount.address)
const toAddress = '1BmVCLrjttchZMW7i6df7mTdCKzHpy38bgDbVL1GqV6P7';
const transferAmount = 1234567890123456789012345n
const mintAmount = 2222222222222222222222222n
const { tokens, destinations } = await genTokensAndDestinations(testAccount.address, toAddress, mintAmount, transferAmount)
const invalidTokenIndex = randomInt(5)
tokens[invalidTokenIndex] = { ...tokens[invalidTokenIndex], version: 1 }
const buildTxResult = await nodeProvider.transactions.postTransactionsBuild({
fromPublicKey: testAccount.publicKey,
destinations: destinations
})
await expect(app.signUnsignedTx(path, Buffer.from(buildTxResult.unsignedTx, 'hex'), tokens)).rejects.toThrow()
await app.close()
}, 120000)
it('should transfer from multiple inputs', async () => {

@@ -246,0 +337,0 @@ const transport = await createTransport()

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc