+63
| language: node_js | ||
| node_js: | ||
| - "stable" | ||
| install: | ||
| - npm install -g truffle | ||
| - npm install -g ganache-cli | ||
| - npm install -g coveralls | ||
| - npm install | ||
| script: | ||
| - npm run test | ||
| after_success: | ||
| - export CURRENT_VERSION=$(node -p "require('./package.json').version") | ||
| - export LATEST=$(npm view $(node -p "require('./package.json').name") dist-tags.latest) | ||
| - export NEXT=$(npm view $(node -p "require('./package.json').name") dist-tags.next) | ||
| - npm run coverage && cat coverage/lcov.info | coveralls | ||
| before_deploy: | ||
| - if [ "$LATEST" != "$CURRENT_VERSION" ] && [ "$TRAVIS_BRANCH" = "master" ]; then | ||
| echo "on master branch"; | ||
| export TRAVIS_TAG=v$CURRENT_VERSION; | ||
| fi; | ||
| - if [ "$NEXT" != "$CURRENT_VERSION" ] && [ "$TRAVIS_BRANCH" = "develop" ]; then | ||
| echo "on develop branch"; | ||
| export TRAVIS_TAG=v$CURRENT_VERSION-next; | ||
| fi; | ||
| deploy: | ||
| - provider: npm | ||
| email: admin.npm@commitground.org | ||
| api_key: | ||
| secure: jVAfybwTNOK9iRELrHpfZhxYf5QvVBw7iP6wwkxlg2CnCz/BZj8k1HZ+lQAcZeoIzxF0x/0xQMh8hsWcIK4FXKbGvuPQ6EL7UqyX652V8mSJ6UqrHQGqkYNVB+bS+1563RnTIWitbFSDm6+artDjW4Yv5ftWGr8mMzoR1+MmyByblDcO7pCU2avqSOQLmxCCo0/hTb114RhS/5AeAL2cE3hhdKGx9sfyuyLeSGhpAZTL8Fq2soXY7LzAq0smOhs50SKeg7P/daT5ZM8UzCmoGL8wXuK8PMeZ/34Iqjt2p9dOtYHGZD4L28VOcTePuB/W+7C4dJjDV8mqLb37+d2paqQw3bqZ+Br3nlRmighlFfVxDBjy7S0wt8Wu6zL4Ih1h+33Qbrmc629MNDbE6xqFlAUpJzQg5t5n9b5vkqQoxy+63vmCFW2YmwM9Zm9TIZC1h7X7whvTB6e9N770kH7R2lunv16vGrVM73dBijeYvQsFj+heB6zr74vuULRmfXhB2Ezo48ArcxoyX/HyT5G9IKdauI1MLXNVqwvBp6cHn8SeQ+odGBo5PeJY3mJ55kNuoZsiavYikZn3Px0r+2ukNhn5cMGAeyDa1q++HKL8OY27LCf0V2v5ODP5n+9J/FN5yRz8YIggZZMEZhwFP8HA2IaXf+i1u/IXtyLfS5uhyNM= | ||
| tag: next | ||
| on: | ||
| condition: $NEXT != $CURRENT_VERSION | ||
| branch: develop | ||
| repo: commitground/merklux | ||
| - provider: npm | ||
| email: admin.npm@commitground.org | ||
| api_key: | ||
| secure: jVAfybwTNOK9iRELrHpfZhxYf5QvVBw7iP6wwkxlg2CnCz/BZj8k1HZ+lQAcZeoIzxF0x/0xQMh8hsWcIK4FXKbGvuPQ6EL7UqyX652V8mSJ6UqrHQGqkYNVB+bS+1563RnTIWitbFSDm6+artDjW4Yv5ftWGr8mMzoR1+MmyByblDcO7pCU2avqSOQLmxCCo0/hTb114RhS/5AeAL2cE3hhdKGx9sfyuyLeSGhpAZTL8Fq2soXY7LzAq0smOhs50SKeg7P/daT5ZM8UzCmoGL8wXuK8PMeZ/34Iqjt2p9dOtYHGZD4L28VOcTePuB/W+7C4dJjDV8mqLb37+d2paqQw3bqZ+Br3nlRmighlFfVxDBjy7S0wt8Wu6zL4Ih1h+33Qbrmc629MNDbE6xqFlAUpJzQg5t5n9b5vkqQoxy+63vmCFW2YmwM9Zm9TIZC1h7X7whvTB6e9N770kH7R2lunv16vGrVM73dBijeYvQsFj+heB6zr74vuULRmfXhB2Ezo48ArcxoyX/HyT5G9IKdauI1MLXNVqwvBp6cHn8SeQ+odGBo5PeJY3mJ55kNuoZsiavYikZn3Px0r+2ukNhn5cMGAeyDa1q++HKL8OY27LCf0V2v5ODP5n+9J/FN5yRz8YIggZZMEZhwFP8HA2IaXf+i1u/IXtyLfS5uhyNM= | ||
| tag: latest | ||
| on: | ||
| condition: $LATEST != $CURRENT_VERSION | ||
| branch: master | ||
| repo: commitground/merklux | ||
| - provider: releases | ||
| api_key: | ||
| secure: 26toS7KvokzQ+gDib9xoDIHtBzZOFjdCjFiHqla19I8IXxuiLhszgCTIzBXIvBPaajYyfG7u4tdhPwqUebBrQzoob/QA+/sBRuP+/CAQWJ3n+XmRb767ut4QhurnyfONmxqz8UYkQGIzr3gdEkrI3yXsxKuFOQI7k9MDCWagqT0mlqZyMyhjnEpWFEdHERv7Jz4L5vS3A2EtMZwGs6dwEhKS3nZzvZPusEOV6rmMXL83IjYg0XzhTjwkysaxq7owE66pEFayjAreydZ8uWtg96IaD2bbddQD5PcNRPuXRssGKI/2wDV0MPNtx1C9DOqnRVko1lL248d/em1cI7dGIlCzxU7Qt6T7N2qZOeUCsfpkThUuj6RpzLw8FGJHSdySf0YRJ6tMy1xBW5Nl+W40JhBO1xdFQ5oq9h3o8Q2Tn5okaAX9WsdiyAVrMXifE+UchE2F9X7K4lh8RlD6dCcFAvsObytZxx7rxmYOBxb0M+KpFXSP4FDm7VBzxLw+tMeg7gq8dB21NEYt95oAr4AasstZcyqluwDqz/cYK53Y7Q4PKfx8SwDVRA4hONYFfT4dMvYaGalcbu1FsI67ATkdqlyLhdgiBrKpjitCP0zT0lBOeBQF16W7TQTXyEqjhUV8EUJwd2nKjxIX0NOoVcCSJWwcAWcqMI6Eeon1/eh3FNc= | ||
| on: | ||
| tags: true | ||
| repo: commitground/merklux | ||
| branch: master | ||
| - provider: releases | ||
| prerelease: true | ||
| api_key: | ||
| secure: 26toS7KvokzQ+gDib9xoDIHtBzZOFjdCjFiHqla19I8IXxuiLhszgCTIzBXIvBPaajYyfG7u4tdhPwqUebBrQzoob/QA+/sBRuP+/CAQWJ3n+XmRb767ut4QhurnyfONmxqz8UYkQGIzr3gdEkrI3yXsxKuFOQI7k9MDCWagqT0mlqZyMyhjnEpWFEdHERv7Jz4L5vS3A2EtMZwGs6dwEhKS3nZzvZPusEOV6rmMXL83IjYg0XzhTjwkysaxq7owE66pEFayjAreydZ8uWtg96IaD2bbddQD5PcNRPuXRssGKI/2wDV0MPNtx1C9DOqnRVko1lL248d/em1cI7dGIlCzxU7Qt6T7N2qZOeUCsfpkThUuj6RpzLw8FGJHSdySf0YRJ6tMy1xBW5Nl+W40JhBO1xdFQ5oq9h3o8Q2Tn5okaAX9WsdiyAVrMXifE+UchE2F9X7K4lh8RlD6dCcFAvsObytZxx7rxmYOBxb0M+KpFXSP4FDm7VBzxLw+tMeg7gq8dB21NEYt95oAr4AasstZcyqluwDqz/cYK53Y7Q4PKfx8SwDVRA4hONYFfT4dMvYaGalcbu1FsI67ATkdqlyLhdgiBrKpjitCP0zT0lBOeBQF16W7TQTXyEqjhUV8EUJwd2nKjxIX0NOoVcCSJWwcAWcqMI6Eeon1/eh3FNc= | ||
| on: | ||
| tags: true | ||
| repo: commitground/merklux | ||
| branch: develop |
Sorry, the diff of this file is not supported yet
| #!/bin/bash | ||
| # Test script should be run in the base directory | ||
| check_truffle_project() { | ||
| cd `dirname "$0"` && cd ../ | ||
| if [ -f "truffle.js" ] | ||
| then | ||
| echo "Start testing" | ||
| else | ||
| echo "You should run this script in the base directory of this project" | ||
| exit 1 | ||
| fi | ||
| } | ||
| # Terminate running ganaches for testing | ||
| kill_ganache() { | ||
| echo "Terminate ganache" | ||
| if !([ -z ${pid+x} ]);then | ||
| kill $pid > /dev/null 2>&1 | ||
| fi | ||
| } | ||
| # Compile contracts | ||
| compile() { | ||
| truffle compile --all | ||
| [ $? -ne 0 ] && exit 1 | ||
| } | ||
| # Run private block-chain for test cases | ||
| run_ganache() { | ||
| ganache-cli > /dev/null & pid=$! | ||
| if ps -p $pid > /dev/null | ||
| then | ||
| echo "Running ganache..." | ||
| else | ||
| echo "Failed to run a chain" | ||
| exit 1 | ||
| fi | ||
| } | ||
| # Deploy contracts on the block-chain for testing | ||
| migrate() { | ||
| truffle migrate --network development | ||
| [ $? -ne 0 ] && exit 1 | ||
| } | ||
| # Run test cases with truffle | ||
| run_test() { | ||
| truffle test --network development | ||
| [ $? -ne 0 ] && exit 1 | ||
| } | ||
| trap kill_ganache SIGINT SIGTERM SIGTSTP | ||
| check_truffle_project | ||
| run_ganache | ||
| run_test | ||
| kill_ganache | ||
| exit 0 |
| const chai = require('chai') | ||
| const assert = chai.assert | ||
| const BigNumber = web3.BigNumber | ||
| const should = chai.use(require('chai-bignumber')(BigNumber)).should() | ||
| const MerkluxTree = artifacts.require('MerkluxTree') | ||
| const MerkluxCaseTree = artifacts.require('MerkluxCaseTree') | ||
| const { progress } = require('./utils') | ||
| const Status = { | ||
| OPENED: 0, | ||
| ONGOING: 1, | ||
| SUCCESS: 2, | ||
| FAILURE: 3 | ||
| } | ||
| contract('MerkluxCaseTree', async ([_, primary, nonPrimary]) => { | ||
| let originalRootEdge | ||
| let targetRootEdge | ||
| let snapshotTree | ||
| let testNode1 | ||
| let testNode2 | ||
| before('Plasma manages its state with MerkluxTree', async () => { | ||
| // Get the original root edge | ||
| let plasmaTree = await MerkluxTree.new({ from: primary }) | ||
| await plasmaTree.insert('key1', 'val1', { from: primary }) | ||
| await plasmaTree.insert('key2', 'val2', { from: primary }) | ||
| originalRootEdge = await plasmaTree.getRootEdge() | ||
| // Get the target root edge | ||
| await plasmaTree.insert('key3', 'val3', { from: primary }) | ||
| await plasmaTree.insert('key4', 'val4', { from: primary }) | ||
| targetRootEdge = await plasmaTree.getRootEdge() | ||
| // Get the target root edge | ||
| await plasmaTree.insert('key5', 'val5', { from: primary }) | ||
| let testRootHash = (await plasmaTree.getRootEdge())[2] | ||
| testNode1 = [testRootHash, ...(await plasmaTree.getNode(testRootHash))] | ||
| await plasmaTree.insert('key6', 'val6', { from: primary }) | ||
| testRootHash = (await plasmaTree.getRootEdge())[2] | ||
| testNode2 = [testRootHash, ...(await plasmaTree.getNode(testRootHash))] | ||
| // Init a test tree | ||
| snapshotTree = await MerkluxTree.new({ from: primary }) | ||
| await snapshotTree.insert('key1', 'val1', { from: primary }) | ||
| await snapshotTree.insert('key2', 'val2', { from: primary }) | ||
| }) | ||
| describe('constructor()', async () => { | ||
| let merkluxCase | ||
| it('should assign the original root edge and the target root edge', async () => { | ||
| merkluxCase = await MerkluxCaseTree.new(...originalRootEdge, ...targetRootEdge, { from: primary }) | ||
| assert.ok('deployed successfully') | ||
| }) | ||
| it('should set its initial status as OPENED', async () => { | ||
| merkluxCase = await MerkluxCaseTree.new(...originalRootEdge, ...targetRootEdge, { from: primary }) | ||
| assert.equal((await merkluxCase.status()).toNumber(), Status.OPENED) | ||
| }) | ||
| }) | ||
| context('Once deployed successfully', async () => { | ||
| let merkluxCase | ||
| let dataToCommit | ||
| const commitNodes = async (nodes) => { | ||
| for (const node of nodes) { | ||
| await merkluxCase.commitNode(...node, { from: primary }) | ||
| } | ||
| } | ||
| const commitValues = async (values) => { | ||
| for (const value of values) { | ||
| await merkluxCase.commitValue(value, { from: primary }) | ||
| } | ||
| } | ||
| before('prepare', async () => { | ||
| let rootValueOfOriginalState = originalRootEdge[2] | ||
| dataToCommit = await getNodeRecursively(snapshotTree, rootValueOfOriginalState) | ||
| }) | ||
| beforeEach('Use a newly deployed MerkluxCaseTree for every test', async () => { | ||
| merkluxCase = await MerkluxCaseTree.new(...originalRootEdge, ...targetRootEdge, { from: primary }) | ||
| }) | ||
| describe('commitNode()', async () => { | ||
| it('should be called only when the case is in OPENED status', async () => { | ||
| await commitNodes(dataToCommit.nodes) | ||
| assert.ok('successfully committed nodes') | ||
| }) | ||
| it('should revert when an invalid node is added', async () => { | ||
| await merkluxCase.commitNode(...testNode1, { from: primary }) | ||
| assert.ok('Successfully passed because the passed node is valid') | ||
| try { | ||
| await merkluxCase.commitNode(0, ...originalRootEdge, ...originalRootEdge, { from: primary }) | ||
| assert.fail('should revert') | ||
| } catch (e) { | ||
| assert.ok('reverted successfully') | ||
| } | ||
| }) | ||
| it('should revert if same hash already exists', async () => { | ||
| await merkluxCase.commitNode(...testNode1, { from: primary }) | ||
| assert.ok('Successfully passed because same hash does not exist') | ||
| try { | ||
| await merkluxCase.commitNode(...testNode1, { from: primary }) | ||
| assert.fail('should revert because the same hash already exists') | ||
| } catch (e) { | ||
| assert.ok('reverted successfully') | ||
| } | ||
| }) | ||
| it('should revert when it is not in OPENED status', async () => { | ||
| await commitNodes(dataToCommit.nodes) | ||
| await commitValues(dataToCommit.values) | ||
| await merkluxCase.seal({ from: primary }) | ||
| try { | ||
| await merkluxCase.commitNode(testNode1, { from: primary }) | ||
| assert.fail('it should revert') | ||
| } catch (e) { | ||
| assert.ok('successfully reverted') | ||
| } | ||
| }) | ||
| }) | ||
| describe('seal()', async () => { | ||
| it('should revert if it does not passes the merkle proof', async () => { | ||
| it('should have enough committed values which it refers', async () => { | ||
| await commitNodes(dataToCommit.nodes) | ||
| try { | ||
| await merkluxCase.seal({ from: primary }) | ||
| assert.fail('should revert') | ||
| } catch (e) { | ||
| assert.ok('reverted successfully') | ||
| } | ||
| }) | ||
| it('should have enough committed nodes for its merkle proof', async () => { | ||
| await commitValues(dataToCommit.values) | ||
| try { | ||
| await merkluxCase.seal({ from: primary }) | ||
| assert.fail('should revert') | ||
| } catch (e) { | ||
| assert.ok('reverted successfully') | ||
| } | ||
| }) | ||
| }) | ||
| it('should change its status as ONGOING and emit an event for it', async () => { | ||
| // make the case have ONGOING status first | ||
| await commitNodes(dataToCommit.nodes) | ||
| await commitValues(dataToCommit.values) | ||
| let response = await merkluxCase.seal({ from: primary }) | ||
| // check it emits an event | ||
| assert.equal( | ||
| web3.toDecimal(response.receipt.logs[0].data), | ||
| Status.ONGOING, | ||
| 'event will be logged in the receipt' | ||
| ) | ||
| // check it returns its status as ONGOING | ||
| assert.equal( | ||
| (await merkluxCase.status()).toNumber(), | ||
| Status.ONGOING, | ||
| 'it should return its status as ONGOING' | ||
| ) | ||
| }) | ||
| }) | ||
| describe('insert()', async () => { | ||
| it('should be called only when the case is in ONGOING status', async () => { | ||
| try { | ||
| await merkluxCase.insert('somekey', 'someval', { from: primary }) | ||
| assert.fail('should revert') | ||
| } catch (e) { | ||
| assert.ok('reverted successfully') | ||
| } | ||
| }) | ||
| it('should be called by only the primary account', async () => { | ||
| // make the case have ONGOING status first | ||
| await commitNodes(dataToCommit.nodes) | ||
| await commitValues(dataToCommit.values) | ||
| await merkluxCase.seal({ from: primary }) | ||
| // insert item with primary account | ||
| await merkluxCase.insert('somekey', 'someval', { from: primary }) | ||
| assert.ok('primary account can insert item') | ||
| try { | ||
| // insert item with non primary account | ||
| await merkluxCase.insert('somekey', 'someval', { from: nonPrimary }) | ||
| assert.fail('should revert') | ||
| } catch (e) { | ||
| assert.ok('non primary account can not insert item') | ||
| } | ||
| }) | ||
| }) | ||
| describe('proof()', async () => { | ||
| it('should revert when the calculated root hash is not equal to the target hash', async () => { | ||
| // make the case have ONGOING status first | ||
| await commitNodes(dataToCommit.nodes) | ||
| await commitValues(dataToCommit.values) | ||
| await merkluxCase.seal({ from: primary }) | ||
| // insert manipulated items | ||
| await merkluxCase.insert('key3', 'manipulatedval3', { from: primary }) // original value is 'val3' | ||
| await merkluxCase.insert('key4', 'manipulatedval4', { from: primary }) // original value is 'val4' | ||
| // try to proof | ||
| try { | ||
| await merkluxCase.proof({ from: nonPrimary }) | ||
| assert.fail('should revert') | ||
| } catch (e) { | ||
| assert.ok('reverted because it was manipulated') | ||
| } | ||
| }) | ||
| it('should change its state as SUCCESS when the calculated root hash is equal to the target hash', async () => { | ||
| // make the case have ONGOING status first | ||
| await commitNodes(dataToCommit.nodes) | ||
| await commitValues(dataToCommit.values) | ||
| await merkluxCase.seal({ from: primary }) | ||
| // insert correct items | ||
| await merkluxCase.insert('key3', 'val3', { from: primary }) | ||
| await merkluxCase.insert('key4', 'val4', { from: primary }) | ||
| // try to proof | ||
| await merkluxCase.proof({ from: primary }) | ||
| // check it changes its status as SUCCESS | ||
| assert.equal( | ||
| (await merkluxCase.status()).toNumber(), | ||
| Status.SUCCESS, | ||
| 'it should return its status as SUCCESS' | ||
| ) | ||
| }) | ||
| }) | ||
| }) | ||
| }) | ||
| const getNodeRecursively = async function (tree, hash) { | ||
| let result = { | ||
| values: [], | ||
| nodes: [] | ||
| } | ||
| let response = await tree.getNode(hash) | ||
| // response[2] means the first child's node value | ||
| // response[5] means the second child's node value | ||
| if (web3.toDecimal(response[2]) === 0 && web3.toDecimal(response[5]) === 0) { | ||
| // when if it is a leaf node, push the value to commit | ||
| let value = await tree.getValue(hash) | ||
| result.values.push(web3.toUtf8(value)) | ||
| } else { | ||
| // when if it is a branch node, push the node and repeat recursively | ||
| result.nodes.push([hash, ...response]) | ||
| let resultFromFirstChild = await getNodeRecursively(tree, response[2]) | ||
| result.values = [...result.values, ...resultFromFirstChild.values] | ||
| result.nodes = [...result.nodes, ...resultFromFirstChild.nodes] | ||
| if (response[5] != response[2]) { | ||
| let resultFromSecondChild = await getNodeRecursively(tree, response[5]) | ||
| result.values = [...result.values, ...resultFromSecondChild.values] | ||
| result.nodes = [...result.nodes, ...resultFromSecondChild.nodes] | ||
| } | ||
| } | ||
| return result | ||
| } |
@@ -1,5 +0,5 @@ | ||
| var Migrations = artifacts.require("./Migrations.sol"); | ||
| var Migrations = artifacts.require('./Migrations.sol') | ||
| module.exports = function(deployer) { | ||
| deployer.deploy(Migrations); | ||
| }; | ||
| module.exports = function (deployer) { | ||
| deployer.deploy(Migrations) | ||
| } |
@@ -1,5 +0,5 @@ | ||
| const PatriciaTree = artifacts.require("PatriciaTree"); | ||
| const PatriciaTree = artifacts.require('PatriciaTree') | ||
| module.exports = function(deployer) { | ||
| deployer.deploy(PatriciaTree); | ||
| }; | ||
| module.exports = function (deployer) { | ||
| deployer.deploy(PatriciaTree) | ||
| } |
+12
-7
| { | ||
| "name": "merklux", | ||
| "version": "0.0.0", | ||
| "description": "A merkleized unidirectional data flow for state verification across multiple chains", | ||
| "version": "0.0.1", | ||
| "description": "A state machine for child chain to manage the state transition with a merkleized unidirectional data flow", | ||
| "directories": { | ||
@@ -11,12 +11,17 @@ "test": "test" | ||
| "chai-bignumber": "^2.0.2", | ||
| "ganache-cli": "^6.1.6", | ||
| "openzeppelin-solidity": "^2.0.0-rc.2", | ||
| "truffle": "^4.1.13", | ||
| "ganache-cli": "^6.1.8", | ||
| "solidity-coverage": "^0.5.11", | ||
| "standard": "^12.0.1", | ||
| "truffle": "^4.1.14", | ||
| "truffle-hdwallet-provider": "^0.0.5" | ||
| }, | ||
| "dependencies": {}, | ||
| "dependencies": { | ||
| "openzeppelin-solidity": "^2.0.0-rc.2" | ||
| }, | ||
| "scripts": { | ||
| "sequenceTest": "scripts/sequenceTest.sh", | ||
| "coverage": "./node_modules/.bin/solidity-coverage", | ||
| "build": "truffle compile", | ||
| "migrate": "truffle migrate" | ||
| "migrate": "truffle migrate", | ||
| "test": "scripts/test.sh" | ||
| }, | ||
@@ -23,0 +28,0 @@ "repository": { |
+17
-1
@@ -1,3 +0,19 @@ | ||
| # merklux | ||
| # Merklux | ||
| [](https://gitter.im/commitground/merklux?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) | ||
| ##### latest released version | ||
| [](https://www.npmjs.com/package/merklux) | ||
| [](https://travis-ci.org/commitground/merklux) | ||
| [](https://coveralls.io/github/commitground/merklux?branch=develop) | ||
| ##### in progress | ||
| [](https://www.npmjs.com/package/merklux) | ||
| [](https://travis-ci.org/commitground/merklux) | ||
| [](https://coveralls.io/github/commitground/merklux?branch=develop) | ||
| [](https://github.com/standard/standard) | ||
| ## What is Merklux | ||
@@ -4,0 +20,0 @@ |
+196
-198
@@ -1,217 +0,215 @@ | ||
| const chai = require('chai'); | ||
| const assert = chai.assert; | ||
| const BigNumber = web3.BigNumber; | ||
| const should = chai.use(require('chai-bignumber')(BigNumber)).should(); | ||
| const chai = require('chai') | ||
| const assert = chai.assert | ||
| const BigNumber = web3.BigNumber | ||
| const should = chai.use(require('chai-bignumber')(BigNumber)).should() | ||
| const MerkluxTree = artifacts.require('MerkluxTree'); | ||
| const {toNodeObject, progress} = require('./utils'); | ||
| const MerkluxTree = artifacts.require('MerkluxTree') | ||
| const { toNodeObject, progress } = require('./utils') | ||
| const ZERO = '0x0000000000000000000000000000000000000000000000000000000000000000'; | ||
| const ZERO = '0x0000000000000000000000000000000000000000000000000000000000000000' | ||
| contract('MerkluxTree', async ([_, primary, nonPrimary]) => { | ||
| context('inherits the patricia tree smart contract', async () => { | ||
| let tree; | ||
| beforeEach('deploy MerkluxTree', async () => { | ||
| tree = await MerkluxTree.new({from: primary}); | ||
| }); | ||
| describe('insert()', async () => { | ||
| it('should not use gas more than 1 million', async () => { | ||
| let itemCount = 10; | ||
| let items = {}; | ||
| for (let i = 0; i < itemCount; i++) { | ||
| items[web3.sha3('key' + Math.random())] = web3.sha3('val' + Math.random()); | ||
| } | ||
| let count = 1; | ||
| for (const key of Object.keys(items)) { | ||
| await tree.insert(key, items[key], {from: primary}); | ||
| let estimatedGasToAddNewValue = await tree.insert.estimateGas(web3.sha3('key' + Math.random()), web3.sha3('val' + Math.random()), {from: primary}); | ||
| progress.log(`(${count++}/${itemCount}) Required gas for a transaction: ${estimatedGasToAddNewValue}`); | ||
| assert.isTrue(estimatedGasToAddNewValue < 1000000); | ||
| } | ||
| progress.close(); | ||
| }); | ||
| it('should allow only primary address to put items', async () => { | ||
| await tree.insert('foo', 'bar', {from: primary}); | ||
| }); | ||
| it('should allow overwriting', async () => { | ||
| await tree.insert('foo', 'bar', {from: primary}); | ||
| await tree.insert('foo', 'baz', {from: primary}); | ||
| assert.equal(web3.toUtf8(await tree.get('foo')), 'baz'); | ||
| }); | ||
| it('should revert when a non-primary address tries to insert a new item', async () => { | ||
| try { | ||
| await tree.insert('foo', 'bar', {from: nonPrimary}); | ||
| assert.fail('it should throw an error') | ||
| } catch (e) { | ||
| assert.ok('it is successfully reverted'); | ||
| } | ||
| }); | ||
| }); | ||
| context('inherits the patricia tree smart contract', async () => { | ||
| let tree | ||
| beforeEach('deploy MerkluxTree', async () => { | ||
| tree = await MerkluxTree.new({ from: primary }) | ||
| }) | ||
| describe('insert()', async () => { | ||
| it('should not use gas more than 1 million', async () => { | ||
| let itemCount = 10 | ||
| let items = {} | ||
| for (let i = 0; i < itemCount; i++) { | ||
| items[web3.sha3('key' + Math.random())] = web3.sha3('val' + Math.random()) | ||
| } | ||
| let count = 1 | ||
| for (const key of Object.keys(items)) { | ||
| await tree.insert(key, items[key], { from: primary }) | ||
| let estimatedGasToAddNewValue = await tree.insert.estimateGas(web3.sha3('key' + Math.random()), web3.sha3('val' + Math.random()), { from: primary }) | ||
| progress.log(`(${count++}/${itemCount}) Required gas for a transaction: ${estimatedGasToAddNewValue}`) | ||
| assert.isTrue(estimatedGasToAddNewValue < 1000000) | ||
| } | ||
| progress.close() | ||
| }) | ||
| it('should allow only primary address to put items', async () => { | ||
| await tree.insert('foo', 'bar', { from: primary }) | ||
| }) | ||
| it('should allow overwriting', async () => { | ||
| await tree.insert('foo', 'bar', { from: primary }) | ||
| await tree.insert('foo', 'baz', { from: primary }) | ||
| assert.equal(web3.toUtf8(await tree.get('foo')), 'baz') | ||
| }) | ||
| it('should revert when a non-primary address tries to insert a new item', async () => { | ||
| try { | ||
| await tree.insert('foo', 'bar', { from: nonPrimary }) | ||
| assert.fail('it should throw an error') | ||
| } catch (e) { | ||
| assert.ok('it is successfully reverted') | ||
| } | ||
| }) | ||
| }) | ||
| describe('getRootHash()', async () => { | ||
| it('should return its root hash value as zero when nothing is stored', async () => { | ||
| assert.equal(await tree.getRootHash(), ZERO) | ||
| }); | ||
| it('should update its root hash when every new items are put into', async () => { | ||
| // insert an item | ||
| await tree.insert('foo', 'bar', {from: primary}); | ||
| let firstRootHash = await tree.getRootHash(); | ||
| // insert an item again | ||
| await tree.insert('baz', 'qux', {from: primary}); | ||
| let secondRootHash = await tree.getRootHash(); | ||
| assert.notEqual(firstRootHash, secondRootHash); | ||
| // insert an item again | ||
| await tree.insert('foo', 'baz', {from: primary}); | ||
| let thirdRootHash = await tree.getRootHash(); | ||
| assert.notEqual(secondRootHash, thirdRootHash); | ||
| }); | ||
| describe('getRootHash()', async () => { | ||
| it('should return its root hash value as zero when nothing is stored', async () => { | ||
| assert.equal(await tree.getRootHash(), ZERO) | ||
| }) | ||
| it('should update its root hash when every new items are put into', async () => { | ||
| // insert an item | ||
| await tree.insert('foo', 'bar', { from: primary }) | ||
| let firstRootHash = await tree.getRootHash() | ||
| // insert an item again | ||
| await tree.insert('baz', 'qux', { from: primary }) | ||
| let secondRootHash = await tree.getRootHash() | ||
| assert.notEqual(firstRootHash, secondRootHash) | ||
| // insert an item again | ||
| await tree.insert('foo', 'baz', { from: primary }) | ||
| let thirdRootHash = await tree.getRootHash() | ||
| assert.notEqual(secondRootHash, thirdRootHash) | ||
| }) | ||
| it('should return same root hash for same write history', async () => { | ||
| // define items to put | ||
| let items = { | ||
| key1: 'val1', | ||
| key2: 'val2', | ||
| key3: 'val3', | ||
| }; | ||
| it('should return same root hash for same write history', async () => { | ||
| // define items to put | ||
| let items = { | ||
| key1: 'val1', | ||
| key2: 'val2', | ||
| key3: 'val3' | ||
| } | ||
| // insert items into the first tree | ||
| for (const key of Object.keys(items)) { | ||
| progress.log(`Insert items (${key}, ${items[key]})`); | ||
| await tree.insert(key, items[key], {from: primary}); | ||
| } | ||
| progress.close(); | ||
| // get root hash of the first tree | ||
| let rootEdgeOfTree = await tree.getRootEdge(); | ||
| let rootHashOfTree = rootEdgeOfTree[2]; | ||
| // insert items into the first tree | ||
| for (const key of Object.keys(items)) { | ||
| progress.log(`Insert items (${key}, ${items[key]})`) | ||
| await tree.insert(key, items[key], { from: primary }) | ||
| } | ||
| progress.close() | ||
| // get root hash of the first tree | ||
| let rootEdgeOfTree = await tree.getRootEdge() | ||
| let rootHashOfTree = rootEdgeOfTree[2] | ||
| // deploy a second tree | ||
| let secondTree = await MerkluxTree.new({from: primary}); | ||
| // insert same items into the second tree | ||
| for (const key of Object.keys(items)) { | ||
| await progress.log(`Insert items into the second tree (${key}, ${items[key]})`, 500); | ||
| await secondTree.insert(key, items[key], {from: primary}); | ||
| } | ||
| progress.close(); | ||
| // get root hash of the second tree | ||
| let rootEdgeOfSecondTree = await secondTree.getRootEdge(); | ||
| let rootHashOfSecondTree = rootEdgeOfSecondTree[2]; | ||
| // deploy a second tree | ||
| let secondTree = await MerkluxTree.new({ from: primary }) | ||
| // insert same items into the second tree | ||
| for (const key of Object.keys(items)) { | ||
| await progress.log(`Insert items into the second tree (${key}, ${items[key]})`, 500) | ||
| await secondTree.insert(key, items[key], { from: primary }) | ||
| } | ||
| progress.close() | ||
| // get root hash of the second tree | ||
| let rootEdgeOfSecondTree = await secondTree.getRootEdge() | ||
| let rootHashOfSecondTree = rootEdgeOfSecondTree[2] | ||
| // compare the two root hashes | ||
| assert.equal(rootHashOfTree, rootHashOfSecondTree); | ||
| }); | ||
| }); | ||
| // compare the two root hashes | ||
| assert.equal(rootHashOfTree, rootHashOfSecondTree) | ||
| }) | ||
| }) | ||
| describe('getNode()', async () => { | ||
| it('should able to find all nodes', async () => { | ||
| let items = { | ||
| 'key1': 'value1', | ||
| 'key2': 'value2', | ||
| 'key3': 'value3', | ||
| 'key4': 'value4', | ||
| 'key5': 'value5', | ||
| }; | ||
| describe('getNode()', async () => { | ||
| it('should able to find all nodes', async () => { | ||
| let items = { | ||
| 'key1': 'value1', | ||
| 'key2': 'value2', | ||
| 'key3': 'value3', | ||
| 'key4': 'value4', | ||
| 'key5': 'value5' | ||
| } | ||
| // insert items | ||
| for (const key of Object.keys(items)) { | ||
| await tree.insert(key, items[key], {from: primary}); | ||
| } | ||
| // insert items | ||
| for (const key of Object.keys(items)) { | ||
| await tree.insert(key, items[key], { from: primary }) | ||
| } | ||
| // find all nodes and check stored value hash | ||
| let leafNodes = []; | ||
| let nodeObjs = []; | ||
| // find all nodes and check stored value hash | ||
| let leafNodes = [] | ||
| let nodeObjs = [] | ||
| const getNodeRecursively = (depth, parent, hash) => new Promise(async res => { | ||
| let result = await tree.getNode(hash); | ||
| let nodes = [ | ||
| [result[0], result[1], result[2]], | ||
| [result[3], result[4], result[5]]]; | ||
| for (let i = 0; i < nodes.length; i++) { | ||
| let nodeObj = toNodeObject(depth, hash, nodes[i]); | ||
| nodeObjs.push(nodeObj); | ||
| let nodeHashValue = nodeObj.node; | ||
| if (nodeHashValue == ZERO) { | ||
| // Because an edge should always have two nodes, it duplicates a leaf node when only one exist. | ||
| // Therefore, if there already exists a same node, do not push it into the leaf node array. | ||
| let leafNode = { | ||
| parent, | ||
| hash | ||
| }; | ||
| let leafNodeAlreadyExist = leafNodes.reduce((val, item) => JSON.stringify(item) === JSON.stringify(leafNode), 0); | ||
| if (!leafNodeAlreadyExist) { | ||
| leafNodes.push(leafNode); | ||
| await progress.log(`Found leaf node (${leafNode.hash})`, 500); | ||
| } | ||
| } else { | ||
| await getNodeRecursively(depth + 1, hash, nodeHashValue); | ||
| } | ||
| } | ||
| progress.close(); | ||
| res(); | ||
| }); | ||
| const getNodeRecursively = (depth, parent, hash) => new Promise(async res => { | ||
| let result = await tree.getNode(hash) | ||
| let nodes = [ | ||
| [result[0], result[1], result[2]], | ||
| [result[3], result[4], result[5]]] | ||
| for (let i = 0; i < nodes.length; i++) { | ||
| let nodeObj = toNodeObject(depth, hash, nodes[i]) | ||
| nodeObjs.push(nodeObj) | ||
| let nodeHashValue = nodeObj.node | ||
| if (nodeHashValue == ZERO) { | ||
| // Because an edge should always have two nodes, it duplicates a leaf node when only one exist. | ||
| // Therefore, if there already exists a same node, do not push it into the leaf node array. | ||
| let leafNode = { | ||
| parent, | ||
| hash | ||
| } | ||
| let leafNodeAlreadyExist = leafNodes.reduce((val, item) => JSON.stringify(item) === JSON.stringify(leafNode), 0) | ||
| if (!leafNodeAlreadyExist) { | ||
| leafNodes.push(leafNode) | ||
| } | ||
| } else { | ||
| await getNodeRecursively(depth + 1, hash, nodeHashValue) | ||
| } | ||
| } | ||
| progress.close() | ||
| res() | ||
| }) | ||
| // Get root hash to start to find nodes recursively | ||
| let rootNode = toNodeObject(0, 'root', await tree.getRootEdge()); | ||
| let rootHash = rootNode.node; | ||
| // Find nodes recursively and add leaf nodes to the array | ||
| await getNodeRecursively(1, 'root', rootHash); | ||
| // Get root hash to start to find nodes recursively | ||
| let rootNode = toNodeObject(0, 'root', await tree.getRootEdge()) | ||
| let rootValue = rootNode.node | ||
| // Find nodes recursively and add leaf nodes to the array | ||
| await getNodeRecursively(1, 'root', rootValue) | ||
| // Compare the found leaf nodes and initial items | ||
| let hashValuesFromLeafNodes = leafNodes.map(leafNode => leafNode.hash); | ||
| let hashValuesFromInitialItems = Object.values(items).map(item => web3.sha3(item)); | ||
| assert.equal( | ||
| JSON.stringify(hashValuesFromLeafNodes.sort()), | ||
| JSON.stringify(hashValuesFromInitialItems.sort()) | ||
| ); | ||
| // Compare the found leaf nodes and initial items | ||
| let hashValuesFromLeafNodes = leafNodes.map(leafNode => leafNode.hash) | ||
| let hashValuesFromInitialItems = Object.values(items).map(item => web3.sha3(item)) | ||
| assert.equal( | ||
| JSON.stringify(hashValuesFromLeafNodes.sort()), | ||
| JSON.stringify(hashValuesFromInitialItems.sort()) | ||
| ) | ||
| // if you want to see more in detail, you can print the leafNodes and nodeObj arrays. | ||
| // console.log(nodeObjs); | ||
| // console.log(leafNodes); | ||
| }); | ||
| }); | ||
| // if you want to see more in detail, you can print the leafNodes and nodeObj arrays. | ||
| // console.log(nodeObjs); | ||
| // console.log(leafNodes); | ||
| }) | ||
| }) | ||
| describe('getProof() & verifyProof()', async () => { | ||
| it('should be able to verify merkle proof for a given key', async () => { | ||
| let items = {key1: 'value1', key2: 'value2', key3: 'value3'}; | ||
| for (const key of Object.keys(items)) { | ||
| await tree.insert(key, items[key], {from: primary}) | ||
| } | ||
| let count = 0; | ||
| for (const key of Object.keys(items)) { | ||
| let [branchMask, siblings] = await tree.getProof(key); | ||
| let rootHash = await tree.getRootHash(); | ||
| await tree.verifyProof(rootHash, key, items[key], branchMask, siblings); | ||
| progress.log(`(${count++}/${Object.keys(items).length}) Merkle proof for ${key}:${items[key]}`); | ||
| assert.ok('is not reverted'); | ||
| } | ||
| progress.close(); | ||
| }); | ||
| describe('getProof() & verifyProof()', async () => { | ||
| it('should be able to verify merkle proof for a given key', async () => { | ||
| let items = { key1: 'value1', key2: 'value2', key3: 'value3' } | ||
| for (const key of Object.keys(items)) { | ||
| await tree.insert(key, items[key], { from: primary }) | ||
| } | ||
| let count = 0 | ||
| for (const key of Object.keys(items)) { | ||
| let [branchMask, siblings] = await tree.getProof(key) | ||
| let rootHash = await tree.getRootHash() | ||
| await tree.verifyProof(rootHash, key, items[key], branchMask, siblings) | ||
| progress.log(`(${count++}/${Object.keys(items).length}) Merkle proof for ${key}:${items[key]}`) | ||
| assert.ok('is not reverted') | ||
| } | ||
| progress.close() | ||
| }) | ||
| it('should throw an error for an invalid merkle proof', async () => { | ||
| let items = {key1: 'value1', key2: 'value2', key3: 'value3'}; | ||
| for (const key of Object.keys(items)) { | ||
| await tree.insert(key, items[key], {from: primary}) | ||
| } | ||
| let count = 0; | ||
| for (const key of Object.keys(items)) { | ||
| let [branchMask, siblings] = await tree.getProof(key); | ||
| let rootHash = await tree.getRootHash(); | ||
| try { | ||
| await tree.verifyProof(rootHash, key, `manipulate${items[key]}`, branchMask, siblings); | ||
| } catch (e) { | ||
| progress.log(`(${count++}/${Object.keys(items).length}) fraud proof for ${key}:${items[key]}`); | ||
| assert.ok('reverted'); | ||
| } | ||
| } | ||
| progress.close(); | ||
| }); | ||
| }); | ||
| it('should throw an error for an invalid merkle proof', async () => { | ||
| let items = { key1: 'value1', key2: 'value2', key3: 'value3' } | ||
| for (const key of Object.keys(items)) { | ||
| await tree.insert(key, items[key], { from: primary }) | ||
| } | ||
| let count = 0 | ||
| for (const key of Object.keys(items)) { | ||
| let [branchMask, siblings] = await tree.getProof(key) | ||
| let rootHash = await tree.getRootHash() | ||
| try { | ||
| await tree.verifyProof(rootHash, key, `manipulate${items[key]}`, branchMask, siblings) | ||
| } catch (e) { | ||
| progress.log(`(${count++}/${Object.keys(items).length}) fraud proof for ${key}:${items[key]}`) | ||
| assert.ok('reverted') | ||
| } | ||
| } | ||
| progress.close() | ||
| }) | ||
| }) | ||
| describe('get()', async () => { | ||
| it('should return stored value for the given key', async () => { | ||
| await tree.insert('foo', 'bar', {from: primary}); | ||
| assert.equal(web3.toUtf8(await tree.get('foo')), 'bar'); | ||
| }); | ||
| }); | ||
| }); | ||
| }); | ||
| describe('get()', async () => { | ||
| it('should return stored value for the given key', async () => { | ||
| await tree.insert('foo', 'bar', { from: primary }) | ||
| assert.equal(web3.toUtf8(await tree.get('foo')), 'bar') | ||
| }) | ||
| }) | ||
| }) | ||
| }) |
+34
-39
| const hexToString = hex => { | ||
| const hexCodes = hex.startsWith("0x") ? hex.substr(2) : hex; | ||
| let str = ''; | ||
| let i; | ||
| for (i = 0; (i < hexCodes.length && hexCodes.substr(i, 2) !== '00'); i += 2) | ||
| str += String.fromCharCode(parseInt(hexCodes.substr(i, 2), 16)); | ||
| return str; | ||
| }; | ||
| const hexCodes = hex.startsWith('0x') ? hex.substr(2) : hex | ||
| let str = '' | ||
| let i | ||
| for (i = 0; (i < hexCodes.length && hexCodes.substr(i, 2) !== '00'); i += 2) { | ||
| str += String.fromCharCode(parseInt(hexCodes.substr(i, 2), 16)) | ||
| } | ||
| return str | ||
| } | ||
| const toNodeObject = (depth, label, node) => { | ||
| return { | ||
| parent: label, | ||
| depth, | ||
| labelLength: node[0].toNumber(), | ||
| labelData: node[1], | ||
| node: node[2], | ||
| }; | ||
| }; | ||
| return { | ||
| parent: label, | ||
| depth, | ||
| labelLength: node[0].toNumber(), | ||
| labelData: node[1], | ||
| node: node[2] | ||
| } | ||
| } | ||
| const logger4 = (on, ...args) => { | ||
| if (on) { | ||
| console.log(args); | ||
| } | ||
| }; | ||
| const progress = { | ||
| log: async (output, ms) => { | ||
| process.stdout.clearLine(); | ||
| process.stdout.cursorTo(0); | ||
| process.stdout.write(`Progress >>\t${output}`); | ||
| if (ms) { | ||
| let sleep = () => new Promise(resolve => setTimeout(resolve, ms)); | ||
| // await sleep(); | ||
| } | ||
| }, | ||
| close: () => { | ||
| process.stdout.clearLine(); | ||
| process.stdout.cursorTo(0); | ||
| process.stdout.write(''); | ||
| log: async (output, ms) => { | ||
| process.stdout.clearLine() | ||
| process.stdout.cursorTo(0) | ||
| process.stdout.write(`Progress >>\t${output}`) | ||
| if (ms) { | ||
| let sleep = () => new Promise(resolve => setTimeout(resolve, ms)) | ||
| await sleep() | ||
| } | ||
| }; | ||
| }, | ||
| close: () => { | ||
| process.stdout.clearLine() | ||
| process.stdout.cursorTo(0) | ||
| process.stdout.write('') | ||
| } | ||
| } | ||
| module.exports = { | ||
| hexToString, | ||
| toNodeObject, | ||
| progress | ||
| }; | ||
| hexToString, | ||
| toNodeObject, | ||
| progress | ||
| } |
+5
-25
@@ -16,3 +16,3 @@ /* | ||
| module.exports = { | ||
| migrations_directory: "./migrations", | ||
| migrations_directory: './migrations', | ||
| // See <http://truffleframework.com/docs/advanced/configuration> | ||
@@ -22,25 +22,5 @@ // to customize your Truffle configuration! | ||
| development: { | ||
| host: "localhost", | ||
| host: 'localhost', | ||
| port: 8545, | ||
| network_id: "*" | ||
| }, | ||
| devRoot: { | ||
| host: "localhost", | ||
| port: 8546, | ||
| network_id: "*" | ||
| }, | ||
| devSide: { | ||
| host: "localhost", | ||
| port: 8547, | ||
| network_id: "*" | ||
| }, | ||
| testRoot: { | ||
| host: "localhost", | ||
| port: 8548, | ||
| network_id: "*" | ||
| }, | ||
| testSide: { | ||
| host: "localhost", | ||
| port: 8549, | ||
| network_id: "*" | ||
| network_id: '*' | ||
| } | ||
@@ -53,3 +33,3 @@ }, | ||
| } | ||
| } | ||
| }; | ||
| } | ||
| } |
+5
-25
@@ -16,3 +16,3 @@ /* | ||
| module.exports = { | ||
| migrations_directory: "./migrations", | ||
| migrations_directory: './migrations', | ||
| // See <http://truffleframework.com/docs/advanced/configuration> | ||
@@ -22,25 +22,5 @@ // to customize your Truffle configuration! | ||
| development: { | ||
| host: "localhost", | ||
| host: 'localhost', | ||
| port: 8545, | ||
| network_id: "180902" | ||
| }, | ||
| devRoot: { | ||
| host: "localhost", | ||
| port: 8546, | ||
| network_id: "180903" | ||
| }, | ||
| devSide: { | ||
| host: "localhost", | ||
| port: 8547, | ||
| network_id: "180904" | ||
| }, | ||
| testRoot: { | ||
| host: "localhost", | ||
| port: 8548, | ||
| network_id: "180905" | ||
| }, | ||
| testSide: { | ||
| host: "localhost", | ||
| port: 8549, | ||
| network_id: "180906" | ||
| network_id: '*' | ||
| } | ||
@@ -53,3 +33,3 @@ }, | ||
| } | ||
| } | ||
| }; | ||
| } | ||
| } |
| #!/bin/bash | ||
| # Test script should be run in the base directory | ||
| cd `dirname "$0"` && cd ../ | ||
| if [ -f "truffle.js" ] | ||
| then | ||
| echo "Start testing" | ||
| else | ||
| echo "You should run this script in the base directory of this project" | ||
| exit 1 | ||
| fi | ||
| # Exit when the test directory is empty | ||
| if !([ "$(ls -A ./test/sequence)" ]); then | ||
| echo "There does not exist any test case" | ||
| exit 1 | ||
| fi | ||
| # Terminate running ganaches for testing | ||
| kill_ganaches() { | ||
| echo "Terminate ganaches" | ||
| if !([ -z ${rootpid+x} ]);then | ||
| kill $rootpid | ||
| fi | ||
| if !([ -z ${sidepid+x} ]);then | ||
| kill $sidepid | ||
| fi | ||
| } | ||
| # Compile contracts | ||
| truffle compile --all | ||
| if !([ $? -eq 0 ]) exit $? | ||
| # Run root chain for testing | ||
| ganache-cli --port 8547 --networkId 180905 --blocktime 1 > /dev/null & rootpid=$! | ||
| if ps -p $rootpid > /dev/null | ||
| then | ||
| echo "Running Root Chain..." | ||
| else | ||
| echo "Failed to run root chain on 8547 port." | ||
| exit 1 | ||
| fi | ||
| # Run side chain for testing | ||
| ganache-cli --port 8548 --networkId 180906 --blocktime 1 > /dev/null & sidepid=$! | ||
| if ps -p $sidepid > /dev/null | ||
| then | ||
| echo "Running Side Chain..." | ||
| else | ||
| echo "Failed to run side chain on 8548 port." | ||
| kill_ganaches | ||
| exit 1 | ||
| fi | ||
| # Deploy contracts on the root chain for testing | ||
| truffle migrate --network testRoot | ||
| [ $? -ne 0 ] && exit $? | ||
| # Deploy contracts on the side chain for testing | ||
| truffle migrate --network testSide | ||
| [ $? -ne 0 ] && exit $? | ||
| sleep 5 | ||
| # Trap interrupts | ||
| # Run test files by orders. | ||
| # A test file should have name like {order}-{sort of chain}-{title}.{ext} | ||
| # eg. "1-root-firstcase.js", "2-side-secondcase.js" | ||
| for testfile in ./test/sequence/*; do | ||
| if [[ "$testfile" =~ ^\.\/test\/[0-9]*-root-.*$ ]]; then | ||
| truffle test $testfile --network testRoot | ||
| [ $? -ne 0 ] && exit $? | ||
| elif [[ "$testfile" =~ ^\.\/test\/[0-9]*-side-.*$ ]]; then | ||
| truffle test $testfile --network testSide | ||
| [ $? -ne 0 ] && exit $? | ||
| else | ||
| echo "Invalid filename: $testfile" | ||
| echo "A test script's file name should be like {order}-{sort of chain}-{title}.{ext}" | ||
| kill_ganaches | ||
| exit 1 | ||
| fi | ||
| done | ||
| kill_ganaches | ||
| exit 0 |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
61020
37.15%24
14.29%544
55.43%32
100%1
Infinity%7
16.67%1
Infinity%+ Added