From 89463fcf9504e1eae4048c41ad5bafd1f7c27947 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 18 Feb 2020 13:54:52 +0200 Subject: [PATCH 001/186] Added Ownable.sol and constructor test --- bin/prepare-test-contracts.sh | 2 + contracts/contracts/Ownable.sol | 52 +++++++++++++++++++++++ contracts/test/unit_tests/ownable_test.js | 18 ++++++++ 3 files changed, 72 insertions(+) create mode 100644 contracts/contracts/Ownable.sol create mode 100644 contracts/test/unit_tests/ownable_test.js diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index b4dc54383d..872c29b3cb 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -15,6 +15,7 @@ cp $IN_DIR/Verifier.sol $OUT_DIR/VerifierTest.sol cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTest.sol cp $IN_DIR/Storage.sol $OUT_DIR/StorageTest.sol cp $IN_DIR/Config.sol $OUT_DIR/ConfigTest.sol +cp $IN_DIR/Ownable.sol $OUT_DIR/OwnableTest.sol # Rename contracts ssed 's/Governance/GovernanceTest/' -i $OUT_DIR/*.sol @@ -23,6 +24,7 @@ ssed 's/Storage/StorageTest/' -i $OUT_DIR/*.sol ssed 's/Config/ConfigTest/' -i $OUT_DIR/*.sol ssed 's/PriorityQueue/PriorityQueueTest/' -i $OUT_DIR/*.sol ssed 's/Verifier/VerifierTest/' -i $OUT_DIR/*.sol +ssed 's/Ownable/OwnableTest/' -i $OUT_DIR/*.sol # Workaround -> priority queue has FranklinTest in method names. ssed 's/FranklinTest/Franklin/' -i $OUT_DIR/PriorityQueueTest.sol diff --git a/contracts/contracts/Ownable.sol b/contracts/contracts/Ownable.sol new file mode 100644 index 0000000000..0ac6117ecc --- /dev/null +++ b/contracts/contracts/Ownable.sol @@ -0,0 +1,52 @@ +pragma solidity 0.5.16; + +/// @title Ownable Contract +/// @author Matter Labs +contract Ownable { + /// @notice Storage position of the owner address + bytes32 private constant ownerPosition = keccak256("owner"); + + /// @notice Contract constructor + /// @dev Sets msg sender address as owner address + constructor() public { + setOwner(msg.sender); + } + + /// @notice Check if specified address is owner + /// @param _address Address to check + function requireOwner(address _address) internal view { + require( + _address == getOwner(), + "oro11" + ); // oro11 - only by owner + } + + /// @notice Returns contract owner address + /// @return Owner address + function getOwner() public view returns (address owner) { + bytes32 position = ownerPosition; + assembly { + owner := sload(position) + } + } + + /// @notice Sets new owner address + /// @param _newOwner New owner address + function setOwner(address _newOwner) internal { + bytes32 position = ownerPosition; + assembly { + sstore(position, _newOwner) + } + } + + /// @notice Transfer ownership of the contract to new owner + /// @param _newOwner New owner address + function transferOwnership(address _newOwner) external { + requireOwner(msg.sender); + require( + _newOwner != address(0), + "otp11" + ); // otp11 - new owner can't be zero address + setOwner(_newOwner); + } +} diff --git a/contracts/test/unit_tests/ownable_test.js b/contracts/test/unit_tests/ownable_test.js new file mode 100644 index 0000000000..9a9f8549d8 --- /dev/null +++ b/contracts/test/unit_tests/ownable_test.js @@ -0,0 +1,18 @@ +const { expect } = require("chai") +const { deployContract } = require("ethereum-waffle"); +const { wallet, deployTestContract, getCallRevertReason } = require("./common") + + +describe("Ownable unit test", function () { + this.timeout(50000); + + let testContract + before(async () => { + testContract = await deployTestContract('../../build/OwnableTest') + }); + + it("checking correctness of setting ownership in constructor", async () => { + expect(await testContract.getOwner()).to.equal(wallet.address) + }); + +}); From 69c470a040d0c342f00d56e2deabe17f077a4bc7 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 18 Feb 2020 15:19:59 +0200 Subject: [PATCH 002/186] Change naming from 'owner' to 'master' --- contracts/contracts/Ownable.sol | 50 +++++++++++------------ contracts/test/unit_tests/ownable_test.js | 4 +- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/contracts/contracts/Ownable.sol b/contracts/contracts/Ownable.sol index 0ac6117ecc..10bbd450d4 100644 --- a/contracts/contracts/Ownable.sol +++ b/contracts/contracts/Ownable.sol @@ -3,50 +3,50 @@ pragma solidity 0.5.16; /// @title Ownable Contract /// @author Matter Labs contract Ownable { - /// @notice Storage position of the owner address - bytes32 private constant ownerPosition = keccak256("owner"); + /// @notice Storage position of the master address + bytes32 private constant masterPosition = keccak256("master"); /// @notice Contract constructor - /// @dev Sets msg sender address as owner address + /// @dev Sets msg sender address as master address constructor() public { - setOwner(msg.sender); + setMaster(msg.sender); } - /// @notice Check if specified address is owner + /// @notice Check if specified address is master /// @param _address Address to check - function requireOwner(address _address) internal view { + function requireMaster(address _address) internal view { require( - _address == getOwner(), + _address == getMaster(), "oro11" - ); // oro11 - only by owner + ); // oro11 - only by master } - /// @notice Returns contract owner address - /// @return Owner address - function getOwner() public view returns (address owner) { - bytes32 position = ownerPosition; + /// @notice Returns contract master address + /// @return Master address + function getMaster() public view returns (address master) { + bytes32 position = masterPosition; assembly { - owner := sload(position) + master := sload(position) } } - /// @notice Sets new owner address - /// @param _newOwner New owner address - function setOwner(address _newOwner) internal { - bytes32 position = ownerPosition; + /// @notice Sets new master address + /// @param _newMaster New master address + function setMaster(address _newMaster) internal { + bytes32 position = masterPosition; assembly { - sstore(position, _newOwner) + sstore(position, _newMaster) } } - /// @notice Transfer ownership of the contract to new owner - /// @param _newOwner New owner address - function transferOwnership(address _newOwner) external { - requireOwner(msg.sender); + /// @notice Transfer mastership of the contract to new master + /// @param _newMaster New master address + function transferMastership(address _newMaster) external { + requireMaster(msg.sender); require( - _newOwner != address(0), + _newMaster != address(0), "otp11" - ); // otp11 - new owner can't be zero address - setOwner(_newOwner); + ); // otp11 - new master can't be zero address + setMaster(_newMaster); } } diff --git a/contracts/test/unit_tests/ownable_test.js b/contracts/test/unit_tests/ownable_test.js index 9a9f8549d8..b0b0f3a503 100644 --- a/contracts/test/unit_tests/ownable_test.js +++ b/contracts/test/unit_tests/ownable_test.js @@ -11,8 +11,8 @@ describe("Ownable unit test", function () { testContract = await deployTestContract('../../build/OwnableTest') }); - it("checking correctness of setting ownership in constructor", async () => { - expect(await testContract.getOwner()).to.equal(wallet.address) + it("checking correctness of setting mastership in constructor", async () => { + expect(await testContract.getMaster()).to.equal(wallet.address) }); }); From 0023870b5abb98fe17cc80040742a53d4e239f32 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 18 Feb 2020 15:32:13 +0200 Subject: [PATCH 003/186] Comments fix --- contracts/contracts/Ownable.sol | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/contracts/contracts/Ownable.sol b/contracts/contracts/Ownable.sol index 10bbd450d4..fce2af6415 100644 --- a/contracts/contracts/Ownable.sol +++ b/contracts/contracts/Ownable.sol @@ -3,11 +3,11 @@ pragma solidity 0.5.16; /// @title Ownable Contract /// @author Matter Labs contract Ownable { - /// @notice Storage position of the master address + /// @notice Storage position of the masters address bytes32 private constant masterPosition = keccak256("master"); /// @notice Contract constructor - /// @dev Sets msg sender address as master address + /// @dev Sets msg sender address as masters address constructor() public { setMaster(msg.sender); } @@ -21,8 +21,8 @@ contract Ownable { ); // oro11 - only by master } - /// @notice Returns contract master address - /// @return Master address + /// @notice Returns contract masters address + /// @return Masters address function getMaster() public view returns (address master) { bytes32 position = masterPosition; assembly { @@ -30,8 +30,8 @@ contract Ownable { } } - /// @notice Sets new master address - /// @param _newMaster New master address + /// @notice Sets new masters address + /// @param _newMaster New masters address function setMaster(address _newMaster) internal { bytes32 position = masterPosition; assembly { @@ -40,13 +40,13 @@ contract Ownable { } /// @notice Transfer mastership of the contract to new master - /// @param _newMaster New master address + /// @param _newMaster New masters address function transferMastership(address _newMaster) external { requireMaster(msg.sender); require( _newMaster != address(0), "otp11" - ); // otp11 - new master can't be zero address + ); // otp11 - new masters address can't be zero address setMaster(_newMaster); } } From 491ba4f93fb05da0529c7e8ac337dad9b4d65ca7 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 18 Feb 2020 16:34:39 +0200 Subject: [PATCH 004/186] Added Ownable unit tests --- contracts/test/unit_tests/common.js | 4 +++- contracts/test/unit_tests/ownable_test.js | 28 ++++++++++++++++++++--- 2 files changed, 28 insertions(+), 4 deletions(-) diff --git a/contracts/test/unit_tests/common.js b/contracts/test/unit_tests/common.js index 25a960fa64..98271574fd 100644 --- a/contracts/test/unit_tests/common.js +++ b/contracts/test/unit_tests/common.js @@ -12,7 +12,7 @@ const { bigNumberify, parseEther, hexlify, formatEther } = require("ethers/utils // For: ganache const provider = createMockProvider() //{gasLimit: 7000000, gasPrice: 2000000000}); -const [wallet, exitWallet] = getWallets(provider); +const [wallet, wallet1, wallet2, exitWallet] = getWallets(provider); use(solidity); @@ -39,6 +39,8 @@ async function getCallRevertReason(f) { module.exports = { provider, wallet, + wallet1, + wallet2, exitWallet, deployTestContract, getCallRevertReason diff --git a/contracts/test/unit_tests/ownable_test.js b/contracts/test/unit_tests/ownable_test.js index b0b0f3a503..832054377f 100644 --- a/contracts/test/unit_tests/ownable_test.js +++ b/contracts/test/unit_tests/ownable_test.js @@ -1,6 +1,6 @@ const { expect } = require("chai") const { deployContract } = require("ethereum-waffle"); -const { wallet, deployTestContract, getCallRevertReason } = require("./common") +const { wallet1, wallet2, deployTestContract, getCallRevertReason } = require("./common") describe("Ownable unit test", function () { @@ -8,11 +8,33 @@ describe("Ownable unit test", function () { let testContract before(async () => { - testContract = await deployTestContract('../../build/OwnableTest') + testContract = await deployContract(wallet1, require('../../build/OwnableTest'), [], { + gasLimit: 6000000, + }) }); it("checking correctness of setting mastership in constructor", async () => { - expect(await testContract.getMaster()).to.equal(wallet.address) + expect(await testContract.getMaster()).to.equal(wallet1.address) + }); + + it("checking correctness of transferring mastership to zero address", async () => { + let revertReason = await getCallRevertReason( () => testContract.transferMastership("0x0000000000000000000000000000000000000000") ); + expect(revertReason).equal("otp11") + }); + + it("checking correctness of transferring mastership", async () => { + /// transfer mastership to wallet2 + await testContract.transferMastership(wallet2.address); + expect(await testContract.getMaster()).to.equal(wallet2.address) + + /// try to transfer mastership to wallet1 by wallet1 call + let revertReason = await getCallRevertReason( () => testContract.transferMastership(wallet1.address) ); + expect(revertReason).equal("oro11") + + /// transfer mastership back to wallet1 + let testContract_with_wallet2_signer = await testContract.connect(wallet2); + await testContract_with_wallet2_signer.transferMastership(wallet1.address); + expect(await testContract.getMaster()).to.equal(wallet1.address) }); }); From 7c3bf31da439a75dba81ee2690d0ab25bb2654a7 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Thu, 20 Feb 2020 18:48:26 +0200 Subject: [PATCH 005/186] Added WaitUpgradeMode contract and UpgradeModeEvents --- bin/prepare-test-contracts.sh | 8 ++ contracts/contracts/Events.sol | 26 +++++ contracts/contracts/WaitUpgradeMode.sol | 96 ++++++++++++++++ contracts/test/unit_tests/common.js | 7 +- .../test/unit_tests/waitUpgradeMode_test.js | 105 ++++++++++++++++++ 5 files changed, 240 insertions(+), 2 deletions(-) create mode 100644 contracts/contracts/WaitUpgradeMode.sol create mode 100644 contracts/test/unit_tests/waitUpgradeMode_test.js diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index 872c29b3cb..b9bb4511a2 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -16,6 +16,7 @@ cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTest.sol cp $IN_DIR/Storage.sol $OUT_DIR/StorageTest.sol cp $IN_DIR/Config.sol $OUT_DIR/ConfigTest.sol cp $IN_DIR/Ownable.sol $OUT_DIR/OwnableTest.sol +cp $IN_DIR/WaitUpgradeMode.sol $OUT_DIR/WaitUpgradeModeTest.sol # Rename contracts ssed 's/Governance/GovernanceTest/' -i $OUT_DIR/*.sol @@ -25,6 +26,7 @@ ssed 's/Config/ConfigTest/' -i $OUT_DIR/*.sol ssed 's/PriorityQueue/PriorityQueueTest/' -i $OUT_DIR/*.sol ssed 's/Verifier/VerifierTest/' -i $OUT_DIR/*.sol ssed 's/Ownable/OwnableTest/' -i $OUT_DIR/*.sol +ssed 's/WaitUpgradeMode/WaitUpgradeModeTest/' -i $OUT_DIR/*.sol # Workaround -> priority queue has FranklinTest in method names. ssed 's/FranklinTest/Franklin/' -i $OUT_DIR/PriorityQueueTest.sol @@ -35,11 +37,17 @@ ssed 's/FranklinTest/Franklin/' -i $OUT_DIR/PriorityQueueTest.sol set_constant() { ssed -E "s/(.*constant $1)(.*)\;/\1 = $2\;/" -i $3 } +create_constant_getter() { + ssed -E "s/ (.*) (constant $1)(.*)\;(.*)/ \1 \2\3\;\4\n function get_$1() external view returns (\1) {\n return $1\;\n }/" -i $2 +} # Change constants set_constant EXPECT_VERIFICATION_IN 8 $OUT_DIR/FranklinTest.sol set_constant MAX_UNVERIFIED_BLOCKS 4 $OUT_DIR/FranklinTest.sol set_constant PRIORITY_EXPIRATION 16 $OUT_DIR/ConfigTest.sol +set_constant WAIT_UPGRADE_MODE_PERIOD 3 $OUT_DIR/WaitUpgradeModeTest.sol + +create_constant_getter WAIT_UPGRADE_MODE_PERIOD $OUT_DIR/WaitUpgradeModeTest.sol # Verify always true set_constant DUMMY_VERIFIER true $OUT_DIR/VerifierTest.sol diff --git a/contracts/contracts/Events.sol b/contracts/contracts/Events.sol index 3c7fc09544..7753453401 100644 --- a/contracts/contracts/Events.sol +++ b/contracts/contracts/Events.sol @@ -52,3 +52,29 @@ contract Events { uint256 fee ); } + +/// @title Upgrade mode events +/// @author Matter Labs +contract UpgradeModeEvents { + + /// @notice Upgrade mode enter event + event UpgradeModeActivated( + uint64 version + ); + + /// @notice Upgrade mode cancel event + event UpgradeCanceled( + uint64 version + ); + + /// @notice Upgrade mode closed status event + event UpgradeModeClosedStatusActivated( + uint64 version + ); + + /// @notice Upgrade mode complete event + event UpgradeCompleted( + uint64 version + ); + +} diff --git a/contracts/contracts/WaitUpgradeMode.sol b/contracts/contracts/WaitUpgradeMode.sol new file mode 100644 index 0000000000..5c124fc4ff --- /dev/null +++ b/contracts/contracts/WaitUpgradeMode.sol @@ -0,0 +1,96 @@ +pragma solidity 0.5.16; + +import "./Events.sol"; +import "./Ownable.sol"; + + +/// @title WaitUpgradeMode Contract +/// @author Matter Labs +contract WaitUpgradeMode is UpgradeModeEvents, Ownable { + + /// @notice Waiting period to activate closed status mode (in seconds) + uint256 constant WAIT_UPGRADE_MODE_PERIOD = 60 * 60 * 24 * 7 * 2; /// two weeks + + /// @notice Version of upgradeable field + uint64 public version; + + /// @notice Flag indicating that wait upgrade mode is active + bool public waitUpgradeModeActive; + + /// @notice Flag indicating that closed status is active + bool public closedStatusActive; + + /// @notice Time of activating waiting upgrade mode + /// @dev Will be equal to zero in case of not active mode + uint256 public activationTime; + + /// @notice Contract constructor + /// @dev Calls Ownable contract constructor + constructor() Ownable() public { + version = 0; + waitUpgradeModeActive = false; + closedStatusActive = false; + activationTime = 0; + } + + /// @notice Activates wait upgrade mode + function activate() external { + requireMaster(msg.sender); + require( + !waitUpgradeModeActive, + "uma11" + ); // uma11 - unable to activate active mode + + waitUpgradeModeActive = true; + closedStatusActive = false; + activationTime = now; + emit UpgradeModeActivated(version); + } + + /// @notice Cancels upgrade + function cancel() external { + requireMaster(msg.sender); + require( + waitUpgradeModeActive, + "umc11" + ); // umc11 - unable to cancel not active mode + + waitUpgradeModeActive = false; + closedStatusActive = false; + activationTime = 0; + emit UpgradeCanceled(version); + } + + /// @notice Checks that closed status is active and activates it if needed + /// @return Bool flag indicating that closed status is active + function isClosedStatusActive() public returns (bool) { + if (!waitUpgradeModeActive) { + return false; + } + if (closedStatusActive) { + return true; + } + if (now >= activationTime + WAIT_UPGRADE_MODE_PERIOD) { + closedStatusActive = true; + emit UpgradeModeClosedStatusActivated(version); + } + return closedStatusActive; + } + + /// @notice Finishes upgrade + function finish() external { + requireMaster(msg.sender); + require( + closedStatusActive, + "umf11" + ); // umf11 - unable to finish upgrade without closed status active + + waitUpgradeModeActive = false; + closedStatusActive = false; + activationTime = 0; + + emit UpgradeCompleted(version); + version++; + } + +} diff --git a/contracts/test/unit_tests/common.js b/contracts/test/unit_tests/common.js index 98271574fd..0b70750b27 100644 --- a/contracts/test/unit_tests/common.js +++ b/contracts/test/unit_tests/common.js @@ -3,6 +3,8 @@ const { expect, use } = require("chai") const { createMockProvider, getWallets, solidity, deployContract } = require("ethereum-waffle"); const { bigNumberify, parseEther, hexlify, formatEther } = require("ethers/utils"); +const SKIP_TEST = true; + // For: geth // const provider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL); @@ -43,5 +45,6 @@ module.exports = { wallet2, exitWallet, deployTestContract, - getCallRevertReason -} \ No newline at end of file + getCallRevertReason, + SKIP_TEST +} diff --git a/contracts/test/unit_tests/waitUpgradeMode_test.js b/contracts/test/unit_tests/waitUpgradeMode_test.js new file mode 100644 index 0000000000..c9b898f87f --- /dev/null +++ b/contracts/test/unit_tests/waitUpgradeMode_test.js @@ -0,0 +1,105 @@ +const { expect } = require("chai") +const { deployContract } = require("ethereum-waffle"); +const { wallet1, wallet2, deployTestContract, getCallRevertReason, SKIP_TEST } = require("./common") + +const { performance } = require('perf_hooks'); + + +describe("WaitUpgradeMode unit test", function () { + this.timeout(50000); + + let testContract + before(async () => { + testContract = await deployContract(wallet1, require('../../build/WaitUpgradeModeTest'), [], { + gasLimit: 6000000, + }) + }); + + it("checking that requireMaster calls present", async () => { + let testContract_with_wallet2_signer = await testContract.connect(wallet2); + expect(await getCallRevertReason( () => testContract_with_wallet2_signer.activate() )).equal("oro11") + expect(await getCallRevertReason( () => testContract_with_wallet2_signer.cancel() )).equal("oro11") + expect(await getCallRevertReason( () => testContract_with_wallet2_signer.isClosedStatusActive() )).equal("VM did not revert") + expect(await getCallRevertReason( () => testContract_with_wallet2_signer.finish() )).equal("oro11") + }); + + it("test activate, test cancel, test finish without closed status active", async () => { + // activate + await expect(testContract.activate()) + .to.emit(testContract, 'UpgradeModeActivated') + .withArgs(0); + + expect(await testContract.waitUpgradeModeActive()).to.equal(true) + await testContract.isClosedStatusActive(); + expect(await testContract.closedStatusActive()).to.equal(false) + + expect(await getCallRevertReason( () => testContract.activate() )).equal("uma11") + + // cancel + await expect(testContract.cancel()) + .to.emit(testContract, 'UpgradeCanceled') + .withArgs(0); + + expect(await testContract.waitUpgradeModeActive()).to.equal(false) + + expect(await getCallRevertReason( () => testContract.cancel() )).equal("umc11") + + // finish + expect(await getCallRevertReason( () => testContract.finish() )).equal("umf11") + }); + + if (SKIP_TEST) { + it.skip("checking that the upgrade is done correctly", async () => {}); + } + else { + it("checking that the upgrade is done correctly", async () => { + let start_time = performance.now(); + + // activate + await expect(testContract.activate()) + .to.emit(testContract, 'UpgradeModeActivated') + .withArgs(0); + + let activated_time = performance.now(); + + // wait and activate closed status + let all_time_in_sec = parseInt(await testContract.get_WAIT_UPGRADE_MODE_PERIOD()); + for (let step = 1; step <= 3; step++) { + if (step != 3) { + while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { + // wait + } + } else { + while ((performance.now() - activated_time) < all_time_in_sec * 1000 + 10) { + // wait + } + } + + if (step != 3) { + await testContract.isClosedStatusActive(); + expect(await testContract.closedStatusActive()).to.equal(false) + } else { + await expect(testContract.isClosedStatusActive()) + .to.emit(testContract, 'UpgradeModeClosedStatusActivated') + .withArgs(0); + expect(await testContract.closedStatusActive()).to.equal(true) + } + } + + // finish + await expect(testContract.finish()) + .to.emit(testContract, 'UpgradeCompleted') + .withArgs(0); + + + // one more activate and cancel with version equal to 1 + await expect(testContract.activate()) + .to.emit(testContract, 'UpgradeModeActivated') + .withArgs(1); + await expect(testContract.cancel()) + .to.emit(testContract, 'UpgradeCanceled') + .withArgs(1); + }); + } + +}); From 76e761ad1164f10b563d0c4721bfbbc762ff526d Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Mon, 24 Feb 2020 15:20:58 +0200 Subject: [PATCH 006/186] Added force cancellation of upgrade --- bin/prepare-test-contracts.sh | 15 ++++-- contracts/contracts/Events.sol | 5 ++ .../{WaitUpgradeMode.sol => UpgradeMode.sol} | 33 +++++++++++-- ...pgradeMode_test.js => upgradeMode_test.js} | 46 ++++++++++++++++++- 4 files changed, 88 insertions(+), 11 deletions(-) rename contracts/contracts/{WaitUpgradeMode.sol => UpgradeMode.sol} (72%) rename contracts/test/unit_tests/{waitUpgradeMode_test.js => upgradeMode_test.js} (69%) diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index b9bb4511a2..0490d6064e 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -16,7 +16,7 @@ cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTest.sol cp $IN_DIR/Storage.sol $OUT_DIR/StorageTest.sol cp $IN_DIR/Config.sol $OUT_DIR/ConfigTest.sol cp $IN_DIR/Ownable.sol $OUT_DIR/OwnableTest.sol -cp $IN_DIR/WaitUpgradeMode.sol $OUT_DIR/WaitUpgradeModeTest.sol +cp $IN_DIR/UpgradeMode.sol $OUT_DIR/UpgradeModeTest.sol # Rename contracts ssed 's/Governance/GovernanceTest/' -i $OUT_DIR/*.sol @@ -26,9 +26,14 @@ ssed 's/Config/ConfigTest/' -i $OUT_DIR/*.sol ssed 's/PriorityQueue/PriorityQueueTest/' -i $OUT_DIR/*.sol ssed 's/Verifier/VerifierTest/' -i $OUT_DIR/*.sol ssed 's/Ownable/OwnableTest/' -i $OUT_DIR/*.sol -ssed 's/WaitUpgradeMode/WaitUpgradeModeTest/' -i $OUT_DIR/*.sol +ssed 's/UpgradeMode/UpgradeModeTest/' -i $OUT_DIR/*.sol # Workaround -> priority queue has FranklinTest in method names. ssed 's/FranklinTest/Franklin/' -i $OUT_DIR/PriorityQueueTest.sol +# Workaround -> upgrade mode has UpgradeMode in event and variable names. +ssed 's/UpgradeModeTestActivated/UpgradeModeActivated/' -i $OUT_DIR/UpgradeModeTest.sol +ssed 's/UpgradeModeTestClosedStatusActivated/UpgradeModeClosedStatusActivated/' -i $OUT_DIR/UpgradeModeTest.sol +ssed 's/UpgradeModeTestForciblyCanceled/UpgradeModeForciblyCanceled/' -i $OUT_DIR/UpgradeModeTest.sol +ssed 's/waitUpgradeModeTestActive/waitUpgradeModeActive/' -i $OUT_DIR/UpgradeModeTest.sol # Changes solidity constant to provided value @@ -45,9 +50,11 @@ create_constant_getter() { set_constant EXPECT_VERIFICATION_IN 8 $OUT_DIR/FranklinTest.sol set_constant MAX_UNVERIFIED_BLOCKS 4 $OUT_DIR/FranklinTest.sol set_constant PRIORITY_EXPIRATION 16 $OUT_DIR/ConfigTest.sol -set_constant WAIT_UPGRADE_MODE_PERIOD 3 $OUT_DIR/WaitUpgradeModeTest.sol +set_constant MAX_UPGRADE_PERIOD 5 $OUT_DIR/UpgradeModeTest.sol +set_constant WAIT_UPGRADE_MODE_PERIOD 3 $OUT_DIR/UpgradeModeTest.sol -create_constant_getter WAIT_UPGRADE_MODE_PERIOD $OUT_DIR/WaitUpgradeModeTest.sol +create_constant_getter MAX_UPGRADE_PERIOD $OUT_DIR/UpgradeModeTest.sol +create_constant_getter WAIT_UPGRADE_MODE_PERIOD $OUT_DIR/UpgradeModeTest.sol # Verify always true set_constant DUMMY_VERIFIER true $OUT_DIR/VerifierTest.sol diff --git a/contracts/contracts/Events.sol b/contracts/contracts/Events.sol index 7753453401..d1987660b6 100644 --- a/contracts/contracts/Events.sol +++ b/contracts/contracts/Events.sol @@ -72,6 +72,11 @@ contract UpgradeModeEvents { uint64 version ); + /// @notice Upgrade mode force cancellation event + event UpgradeForciblyCanceled( + uint64 version + ); + /// @notice Upgrade mode complete event event UpgradeCompleted( uint64 version diff --git a/contracts/contracts/WaitUpgradeMode.sol b/contracts/contracts/UpgradeMode.sol similarity index 72% rename from contracts/contracts/WaitUpgradeMode.sol rename to contracts/contracts/UpgradeMode.sol index 5c124fc4ff..604fa1dadc 100644 --- a/contracts/contracts/WaitUpgradeMode.sol +++ b/contracts/contracts/UpgradeMode.sol @@ -4,12 +4,16 @@ import "./Events.sol"; import "./Ownable.sol"; -/// @title WaitUpgradeMode Contract +/// @title UpgradeMode Contract /// @author Matter Labs -contract WaitUpgradeMode is UpgradeModeEvents, Ownable { +contract UpgradeMode is UpgradeModeEvents, Ownable { + + /// @notice Maximal upgrade time (in seconds) + /// @dev After this period from the start of the upgrade anyone can cancel it forcibly + uint256 constant MAX_UPGRADE_PERIOD = 60 * 60 * 24 * 14; /// 14 days /// @notice Waiting period to activate closed status mode (in seconds) - uint256 constant WAIT_UPGRADE_MODE_PERIOD = 60 * 60 * 24 * 7 * 2; /// two weeks + uint256 constant WAIT_UPGRADE_MODE_PERIOD = 60 * 60 * 24 * 10; /// 10 days /// @notice Version of upgradeable field uint64 public version; @@ -77,18 +81,37 @@ contract WaitUpgradeMode is UpgradeModeEvents, Ownable { return closedStatusActive; } + /// @notice Force cancellation + function forceCancel() external { + requireMaster(msg.sender); + + require( + waitUpgradeModeActive, + "ucf11" + ); // ucf11 - unable to cancel not active mode + + require( + now >= activationTime + MAX_UPGRADE_PERIOD, + "ucf12" + ); // ucf12 - unable to force cancel upgrade until MAX_UPGRADE_PERIOD passes + + waitUpgradeModeActive = false; + closedStatusActive = false; + activationTime = 0; + emit UpgradeForciblyCanceled(version); + } + /// @notice Finishes upgrade function finish() external { requireMaster(msg.sender); require( - closedStatusActive, + isClosedStatusActive(), "umf11" ); // umf11 - unable to finish upgrade without closed status active waitUpgradeModeActive = false; closedStatusActive = false; activationTime = 0; - emit UpgradeCompleted(version); version++; } diff --git a/contracts/test/unit_tests/waitUpgradeMode_test.js b/contracts/test/unit_tests/upgradeMode_test.js similarity index 69% rename from contracts/test/unit_tests/waitUpgradeMode_test.js rename to contracts/test/unit_tests/upgradeMode_test.js index c9b898f87f..e3ba24df00 100644 --- a/contracts/test/unit_tests/waitUpgradeMode_test.js +++ b/contracts/test/unit_tests/upgradeMode_test.js @@ -5,12 +5,12 @@ const { wallet1, wallet2, deployTestContract, getCallRevertReason, SKIP_TEST } = const { performance } = require('perf_hooks'); -describe("WaitUpgradeMode unit test", function () { +describe("UpgradeMode unit test", function () { this.timeout(50000); let testContract before(async () => { - testContract = await deployContract(wallet1, require('../../build/WaitUpgradeModeTest'), [], { + testContract = await deployContract(wallet1, require('../../build/UpgradeModeTest'), [], { gasLimit: 6000000, }) }); @@ -20,6 +20,7 @@ describe("WaitUpgradeMode unit test", function () { expect(await getCallRevertReason( () => testContract_with_wallet2_signer.activate() )).equal("oro11") expect(await getCallRevertReason( () => testContract_with_wallet2_signer.cancel() )).equal("oro11") expect(await getCallRevertReason( () => testContract_with_wallet2_signer.isClosedStatusActive() )).equal("VM did not revert") + expect(await getCallRevertReason( () => testContract_with_wallet2_signer.forceCancel() )).equal("oro11") expect(await getCallRevertReason( () => testContract_with_wallet2_signer.finish() )).equal("oro11") }); @@ -102,4 +103,45 @@ describe("WaitUpgradeMode unit test", function () { }); } + if (SKIP_TEST) { + it.skip("checking that force cancellation works correctly", async () => {}); + } + else { + it("checking that force cancellation works correctly", async () => { + let start_time = performance.now(); + + // activate + await expect(testContract.activate()) + .to.emit(testContract, 'UpgradeModeActivated') + .withArgs(1); + + let activated_time = performance.now(); + + // wait and force cancel + let all_time_in_sec = parseInt(await testContract.get_MAX_UPGRADE_PERIOD()); + for (let step = 1; step <= 5; step++) { + if (step != 5) { + while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { + // wait + } + } else { + while ((performance.now() - activated_time) < all_time_in_sec * 1000 + 10) { + // wait + } + } + + if (step != 5) { + expect(await getCallRevertReason( () => testContract.forceCancel() )).equal("ucf12") + } else { + await expect(testContract.forceCancel()) + .to.emit(testContract, 'UpgradeForciblyCanceled') + .withArgs(1); + expect(await testContract.waitUpgradeModeActive()).to.equal(false) + } + } + + expect(await getCallRevertReason( () => testContract.forceCancel() )).equal("ucf11") + }); + } + }); From 9beab62be9f606c35e21dc9c4e810bdc5fdad033 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Wed, 26 Feb 2020 11:16:38 +0200 Subject: [PATCH 007/186] Added Upgradeable.sol, Proxy.sol and tests for them --- bin/prepare-test-contracts.sh | 10 +- contracts/contracts/Ownable.sol | 2 + contracts/contracts/Proxy.sol | 55 +++++ contracts/contracts/UpgradeMode.sol | 40 ++-- contracts/contracts/Upgradeable.sol | 202 ++++++++++++++++++ contracts/contracts/test/DummyTarget.sol | 59 +++++ contracts/test/unit_tests/common.js | 2 +- contracts/test/unit_tests/proxy_test.js | 165 ++++++++++++++ contracts/test/unit_tests/upgradeMode_test.js | 24 +-- 9 files changed, 524 insertions(+), 35 deletions(-) create mode 100644 contracts/contracts/Proxy.sol create mode 100644 contracts/contracts/Upgradeable.sol create mode 100644 contracts/contracts/test/DummyTarget.sol create mode 100644 contracts/test/unit_tests/proxy_test.js diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index 0490d6064e..14114c9003 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -17,6 +17,8 @@ cp $IN_DIR/Storage.sol $OUT_DIR/StorageTest.sol cp $IN_DIR/Config.sol $OUT_DIR/ConfigTest.sol cp $IN_DIR/Ownable.sol $OUT_DIR/OwnableTest.sol cp $IN_DIR/UpgradeMode.sol $OUT_DIR/UpgradeModeTest.sol +cp $IN_DIR/Upgradeable.sol $OUT_DIR/UpgradeableTest.sol +cp $IN_DIR/Proxy.sol $OUT_DIR/ProxyTest.sol # Rename contracts ssed 's/Governance/GovernanceTest/' -i $OUT_DIR/*.sol @@ -26,10 +28,16 @@ ssed 's/Config/ConfigTest/' -i $OUT_DIR/*.sol ssed 's/PriorityQueue/PriorityQueueTest/' -i $OUT_DIR/*.sol ssed 's/Verifier/VerifierTest/' -i $OUT_DIR/*.sol ssed 's/Ownable/OwnableTest/' -i $OUT_DIR/*.sol -ssed 's/UpgradeMode/UpgradeModeTest/' -i $OUT_DIR/*.sol +ssed 's/UpgradeMode/UpgradeModeTest/g' -i $OUT_DIR/*.sol +ssed 's/Upgradeable/UpgradeableTest/' -i $OUT_DIR/*.sol +ssed 's/Proxy/ProxyTest/' -i $OUT_DIR/*.sol # Workaround -> priority queue has FranklinTest in method names. ssed 's/FranklinTest/Franklin/' -i $OUT_DIR/PriorityQueueTest.sol +# Workaround -> ownable and upgradeable has Storage in comments. +ssed 's/StorageTest/Storage/' -i $OUT_DIR/OwnableTest.sol +ssed 's/StorageTest/Storage/' -i $OUT_DIR/UpgradeableTest.sol # Workaround -> upgrade mode has UpgradeMode in event and variable names. +ssed 's/UpgradeModeTestEvents/UpgradeModeEvents/' -i $OUT_DIR/UpgradeModeTest.sol ssed 's/UpgradeModeTestActivated/UpgradeModeActivated/' -i $OUT_DIR/UpgradeModeTest.sol ssed 's/UpgradeModeTestClosedStatusActivated/UpgradeModeClosedStatusActivated/' -i $OUT_DIR/UpgradeModeTest.sol ssed 's/UpgradeModeTestForciblyCanceled/UpgradeModeForciblyCanceled/' -i $OUT_DIR/UpgradeModeTest.sol diff --git a/contracts/contracts/Ownable.sol b/contracts/contracts/Ownable.sol index fce2af6415..1c4c17c2fd 100644 --- a/contracts/contracts/Ownable.sol +++ b/contracts/contracts/Ownable.sol @@ -3,6 +3,7 @@ pragma solidity 0.5.16; /// @title Ownable Contract /// @author Matter Labs contract Ownable { + /// @notice Storage position of the masters address bytes32 private constant masterPosition = keccak256("master"); @@ -49,4 +50,5 @@ contract Ownable { ); // otp11 - new masters address can't be zero address setMaster(_newMaster); } + } diff --git a/contracts/contracts/Proxy.sol b/contracts/contracts/Proxy.sol new file mode 100644 index 0000000000..23ade838b4 --- /dev/null +++ b/contracts/contracts/Proxy.sol @@ -0,0 +1,55 @@ +pragma solidity 0.5.16; + +import "./Upgradeable.sol"; + + +/// @title Proxy Contract +/// @author Matter Labs +contract Proxy is Upgradeable { + + /// @notice Contract constructor + /// @dev Calls Upgradeable contract constructor + constructor() Upgradeable() public {} + + /// @notice Performs a delegatecall to the contract implementation + /// @dev Fallback function allowing to perform a delegatecall to the given implementation + /// This function will return whatever the implementation call returns + function() external payable { + require( + msg.data.length > 0, + "pfb11" + ); // pfb11 - calldata must not be empty + + address _target = getTarget(); + assembly { + // The pointer to the free memory slot + let ptr := mload(0x40) + // Copy function signature and arguments from calldata at zero position into memory at pointer position + calldatacopy(ptr, 0x0, calldatasize) + // Delegatecall method of the implementation contract, returns 0 on error + let result := delegatecall( + gas, + _target, + ptr, + calldatasize, + 0x0, + 0 + ) + // Get the size of the last return data + let size := returndatasize + // Copy the size length of bytes from return data at zero position to pointer position + returndatacopy(ptr, 0x0, size) + // Depending on result value + switch result + case 0 { + // End execution and revert state changes + revert(ptr, size) + } + default { + // Return data with length of size at pointers position + return(ptr, size) + } + } + } + +} diff --git a/contracts/contracts/UpgradeMode.sol b/contracts/contracts/UpgradeMode.sol index 604fa1dadc..c7976d7ab1 100644 --- a/contracts/contracts/UpgradeMode.sol +++ b/contracts/contracts/UpgradeMode.sol @@ -31,7 +31,7 @@ contract UpgradeMode is UpgradeModeEvents, Ownable { /// @notice Contract constructor /// @dev Calls Ownable contract constructor constructor() Ownable() public { - version = 0; + version = 1; waitUpgradeModeActive = false; closedStatusActive = false; activationTime = 0; @@ -65,6 +65,24 @@ contract UpgradeMode is UpgradeModeEvents, Ownable { emit UpgradeCanceled(version); } + /// @notice Force upgrade cancellation + function forceCancel() external { + requireMaster(msg.sender); + require( + waitUpgradeModeActive, + "ufc11" + ); // ufc11 - unable to cancel not active mode + require( + now >= activationTime + MAX_UPGRADE_PERIOD, + "ufc12" + ); // ufc12 - unable to force cancel upgrade until MAX_UPGRADE_PERIOD passes + + waitUpgradeModeActive = false; + closedStatusActive = false; + activationTime = 0; + emit UpgradeForciblyCanceled(version); + } + /// @notice Checks that closed status is active and activates it if needed /// @return Bool flag indicating that closed status is active function isClosedStatusActive() public returns (bool) { @@ -81,26 +99,6 @@ contract UpgradeMode is UpgradeModeEvents, Ownable { return closedStatusActive; } - /// @notice Force cancellation - function forceCancel() external { - requireMaster(msg.sender); - - require( - waitUpgradeModeActive, - "ucf11" - ); // ucf11 - unable to cancel not active mode - - require( - now >= activationTime + MAX_UPGRADE_PERIOD, - "ucf12" - ); // ucf12 - unable to force cancel upgrade until MAX_UPGRADE_PERIOD passes - - waitUpgradeModeActive = false; - closedStatusActive = false; - activationTime = 0; - emit UpgradeForciblyCanceled(version); - } - /// @notice Finishes upgrade function finish() external { requireMaster(msg.sender); diff --git a/contracts/contracts/Upgradeable.sol b/contracts/contracts/Upgradeable.sol new file mode 100644 index 0000000000..194601cabe --- /dev/null +++ b/contracts/contracts/Upgradeable.sol @@ -0,0 +1,202 @@ +pragma solidity 0.5.16; + +import "./Ownable.sol"; +import "./UpgradeMode.sol"; + + +/// @title Upgradeable contract +/// @author Matter Labs +contract Upgradeable is Ownable { + + /// @notice Storage position of contract version index + bytes32 private constant versionPosition = keccak256("version"); + + /// @notice Storage position of "target" (actual implementation address) + bytes32 private constant targetPosition = keccak256("target"); + + /// @notice Storage position of next "target" (in case the contract is in status of waiting to upgrade) + /// @dev Will store zero in case of not active upgrade mode + bytes32 private constant nextTargetPosition = keccak256("nextTarget"); + + /// @notice Storage position of UpgradeMode contract address + bytes32 private constant upgradeModeAddressPosition = keccak256("UpgradeModeAddress"); + + /// @notice Contract constructor + /// @dev Calls Ownable contract constructor and creates UpgradeMode contract + constructor() Ownable() public { + setVersion(0); + setTarget(address(0)); + setNextTarget(address(0)); + setUpgradeModeAddress(address(new UpgradeMode())); + } + + /// @notice Upgradeable contract initialization + /// @param _target Initial implementation address + /// @param _targetInitializationParameters Target initialization parameters + function initialize(address _target, bytes calldata _targetInitializationParameters) external { + requireMaster(msg.sender); + require( + getVersion() == 0, + "uin11" + ); // uin11 - upgradeable contract already initialized + + setVersion(1); + + setTarget(_target); + (bool initializationSuccess, ) = getTarget().delegatecall( + abi.encodeWithSignature("initialize(address,bytes)", getUpgradeModeAddress(), _targetInitializationParameters) + ); + require( + initializationSuccess, + "uin12" + ); // uin12 - target initialization failed + } + + /// @notice Returns contract version index + /// @return Contract version index + function getVersion() public view returns (uint64 version) { + bytes32 position = versionPosition; + assembly { + version := sload(position) + } + } + + /// @notice Sets new contract version index + /// @param _newVersion New contract version index + function setVersion(uint64 _newVersion) internal { + bytes32 position = versionPosition; + assembly { + sstore(position, _newVersion) + } + } + + /// @notice Returns target of contract + /// @return Actual implementation address + function getTarget() public view returns (address target) { + bytes32 position = targetPosition; + assembly { + target := sload(position) + } + } + + /// @notice Sets new target of contract + /// @param _newTarget New actual implementation address + function setTarget(address _newTarget) internal { + bytes32 position = targetPosition; + assembly { + sstore(position, _newTarget) + } + } + + /// @notice Returns next target + /// @return Next target address + function getNextTarget() public view returns (address nextTarget) { + bytes32 position = nextTargetPosition; + assembly { + nextTarget := sload(position) + } + } + + /// @notice Sets new next target + /// @param _newNextTarget New next target value + function setNextTarget(address _newNextTarget) internal { + bytes32 position = nextTargetPosition; + assembly { + sstore(position, _newNextTarget) + } + } + + /// @notice Returns UpgradeMode contract address + /// @return UpgradeMode contract address + function getUpgradeModeAddress() public view returns (address upgradeModeAddress) { + bytes32 position = upgradeModeAddressPosition; + assembly { + upgradeModeAddress := sload(position) + } + } + + /// @notice Sets new UpgradeMode contract address + /// @param _newUpgradeModeAddress New UpgradeMode contract address + function setUpgradeModeAddress(address _newUpgradeModeAddress) internal { + bytes32 position = upgradeModeAddressPosition; + assembly { + sstore(position, _newUpgradeModeAddress) + } + } + + /// @notice Starts upgrade + /// @param _newTarget Next actual implementation address + function upgradeTarget(address _newTarget) external { + requireMaster(msg.sender); + require( + _newTarget != address(0), + "uut11" + ); // uut11 - new actual implementation address can't be zero address + require( + getTarget() != _newTarget, + "uut12" + ); // uut12 - new actual implementation address can't be equal to previous + + UpgradeMode UpgradeMode = UpgradeMode(getUpgradeModeAddress()); + UpgradeMode.activate(); + + setNextTarget(_newTarget); + } + + /// @notice Cancels upgrade + function cancelUpgradeTarget() external { + requireMaster(msg.sender); + + UpgradeMode UpgradeMode = UpgradeMode(getUpgradeModeAddress()); + UpgradeMode.cancel(); + + setNextTarget(address(0)); + } + + /// @notice Force upgrade cancellation + function forceCancelUpgradeTarget() external { + UpgradeMode UpgradeMode = UpgradeMode(getUpgradeModeAddress()); + UpgradeMode.forceCancel(); + + setNextTarget(address(0)); + } + + /// @notice Checks that target is ready to be upgraded + /// @return Bool flag indicating that target is ready to be upgraded + function targetReadyToBeUpgraded() public returns (bool) { + (bool success, bytes memory result) = getTarget().delegatecall(abi.encodeWithSignature("readyToBeUpgraded()")); + require( + success, + "utr11" + ); // utr11 - target readyToBeUpgraded() call failed + + return abi.decode(result, (bool)); + } + + /// @notice Finishes upgrade + /// @param _newTargetInitializationParameters New target initialization parameters + function finishTargetUpgrade(bytes calldata _newTargetInitializationParameters) external { + requireMaster(msg.sender); + require( + targetReadyToBeUpgraded(), + "ufu11" + ); // ufu11 - target is not ready to be upgraded + + UpgradeMode UpgradeMode = UpgradeMode(getUpgradeModeAddress()); + UpgradeMode.finish(); + + setVersion(getVersion() + 1); + + setTarget(getNextTarget()); + setNextTarget(address(0)); + + (bool initializationSuccess, ) = getTarget().delegatecall( + abi.encodeWithSignature("initialize(address,bytes)", getUpgradeModeAddress(), _newTargetInitializationParameters) + ); + require( + initializationSuccess, + "ufu12" + ); // ufu12 - target initialization failed + } + +} diff --git a/contracts/contracts/test/DummyTarget.sol b/contracts/contracts/test/DummyTarget.sol new file mode 100644 index 0000000000..fe8ed743a5 --- /dev/null +++ b/contracts/contracts/test/DummyTarget.sol @@ -0,0 +1,59 @@ +pragma solidity 0.5.16; + +interface DummyTarget { + + function get_DUMMY_INDEX() external view returns (uint256); + + function initialize(address _address, bytes calldata _initializationParameters) external; + + function readyToBeUpgraded() external returns (bool); + +} + +contract DummyFirst is DummyTarget { + + uint256 private constant DUMMY_INDEX = 1; + function get_DUMMY_INDEX() external view returns (uint256) { + return DUMMY_INDEX; + } + + function initialize(address _address, bytes calldata _initializationParameters) external { + bytes memory _initializationParameters = _initializationParameters; + bytes32 byte_0 = bytes32(uint256(uint8(_initializationParameters[0]))); + bytes32 byte_1 = bytes32(uint256(uint8(_initializationParameters[1]))); + assembly { + sstore(0, _address) + sstore(1, byte_0) + sstore(2, byte_1) + } + } + + function readyToBeUpgraded() external returns (bool) { + return true; + } + +} + +contract DummySecond is DummyTarget { + + uint256 private constant DUMMY_INDEX = 2; + function get_DUMMY_INDEX() external view returns (uint256) { + return DUMMY_INDEX; + } + + function initialize(address _address, bytes calldata _initializationParameters) external { + bytes memory _initializationParameters = _initializationParameters; + bytes32 byte_0 = bytes32(uint256(uint8(_initializationParameters[0]))); + bytes32 byte_1 = bytes32(uint256(uint8(_initializationParameters[1]))); + assembly { + sstore(0, _address) + sstore(2, byte_0) + sstore(3, byte_1) + } + } + + function readyToBeUpgraded() external returns (bool) { + return false; + } + +} diff --git a/contracts/test/unit_tests/common.js b/contracts/test/unit_tests/common.js index 0b70750b27..8d330c53a8 100644 --- a/contracts/test/unit_tests/common.js +++ b/contracts/test/unit_tests/common.js @@ -3,7 +3,7 @@ const { expect, use } = require("chai") const { createMockProvider, getWallets, solidity, deployContract } = require("ethereum-waffle"); const { bigNumberify, parseEther, hexlify, formatEther } = require("ethers/utils"); -const SKIP_TEST = true; +const SKIP_TEST = false; // For: geth diff --git a/contracts/test/unit_tests/proxy_test.js b/contracts/test/unit_tests/proxy_test.js new file mode 100644 index 0000000000..fcfbaf6aa5 --- /dev/null +++ b/contracts/test/unit_tests/proxy_test.js @@ -0,0 +1,165 @@ +const ethers = require("ethers") +const { expect } = require("chai") +const { deployContract } = require("ethereum-waffle"); +const { provider, wallet, wallet2, deployTestContract, getCallRevertReason, SKIP_TEST } = require("./common") + +const { performance } = require('perf_hooks'); + +const proxyTestContractCode = require('../../build/ProxyTest'); + +// some random constants for checking write and read from storage +const bytes = [133, 174, 97, 255] + +describe("Proxy unit test", function () { + this.timeout(50000); + + let proxyTestContract + let proxyDummyInterface + let upgradeModeTestContract + let DummyFirst + let DummySecond + before(async () => { + proxyTestContract = await deployTestContract('../../build/ProxyTest') + proxyDummyInterface = new ethers.Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); + upgradeModeTestContract = new ethers.Contract(proxyTestContract.getUpgradeModeTestAddress(), require('../../build/UpgradeModeTest').interface, wallet); + DummyFirst = await deployTestContract('../../build/DummyFirst') + DummySecond = await deployTestContract('../../build/DummySecond') + await proxyTestContract.initialize(DummyFirst.address, [bytes[0], bytes[1]]); + }); + + it("checking Proxy creation", async () => { + // check version + expect(await proxyTestContract.getVersion()).to.equal(1) + + // check target storage + expect((await provider.getStorageAt(proxyTestContract.address, ethers.utils.id("target"))).toLowerCase()) + .equal(DummyFirst.address.toLowerCase()); + expect((await proxyTestContract.getTarget()).toLowerCase()) + .equal(DummyFirst.address.toLowerCase()); + + // check dummy index + expect(await proxyDummyInterface.get_DUMMY_INDEX()) + .to.equal(1); + + // check initial storage + expect((await provider.getStorageAt(proxyTestContract.address, 0)).toLowerCase()) + .equal((await proxyTestContract.getUpgradeModeTestAddress()).toLowerCase()); + expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 1))) + .to.equal(bytes[0]); + expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 2))) + .to.equal(bytes[1]); + }); + + it("checking that requireMaster calls present", async () => { + let proxyTestContract_with_wallet2_signer = await proxyTestContract.connect(wallet2); + expect(await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.upgradeTarget(DummySecond.address) )).equal("oro11") + expect(await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.cancelUpgradeTarget() )).equal("oro11") + expect(await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.finishTargetUpgrade([]) )).equal("oro11") + + // bonus: check that force cancellation do not have requireMaster call + expect(await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.forceCancelUpgradeTarget() )).to.not.equal("oro11") + }); + + it("check Proxy reverts", async () => { + expect(await getCallRevertReason( () => proxyTestContract.initialize(DummyFirst.address, []) )).equal("uin11"); + expect(await getCallRevertReason( () => proxyTestContract.upgradeTarget("0x0000000000000000000000000000000000000000") )).equal("uut11"); + expect(await getCallRevertReason( () => proxyTestContract.upgradeTarget(DummyFirst.address) )).equal("uut12"); + }); + + it("check upgrade canceling", async () => { + // activate and cancel + await proxyTestContract.upgradeTarget(DummySecond.address); + await proxyTestContract.cancelUpgradeTarget(); + }); + + if (SKIP_TEST) { + it.skip("checking that the upgrade is done correctly", async () => {}); + } + else { + it("checking that the upgrade is done correctly", async () => { + let start_time = performance.now(); + + // activate + await proxyTestContract.upgradeTarget(DummySecond.address); + + let activated_time = performance.now(); + + // wait and finish upgrade + let all_time_in_sec = parseInt(await upgradeModeTestContract.get_WAIT_UPGRADE_MODE_PERIOD()); + for (let step = 1; step <= 3; step++) { + if (step != 3) { + while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { + // wait + } + } else { + while ((performance.now() - activated_time) < all_time_in_sec * 1000 + 10) { + // wait + } + } + + if (step != 3) { + expect(await getCallRevertReason( () => proxyTestContract.finishTargetUpgrade([]))).equal("umf11"); + } else { + await proxyTestContract.finishTargetUpgrade([bytes[2], bytes[3]]); + } + } + + // check dummy index + expect(await proxyDummyInterface.get_DUMMY_INDEX()) + .to.equal(2); + + // check updated storage + expect((await provider.getStorageAt(proxyTestContract.address, 0)).toLowerCase()) + .equal((await proxyTestContract.getUpgradeModeTestAddress()).toLowerCase()); + expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 1))) + .to.equal(bytes[0]); + expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 2))) + .to.equal(bytes[2]); + expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 3))) + .to.equal(bytes[3]); + }); + } + + if (SKIP_TEST) { + it.skip("checking that force cancellation works correctly", async () => {}); + } + else { + it("checking that force cancellation works correctly", async () => { + expect(await getCallRevertReason( () => proxyTestContract.forceCancelUpgradeTarget())).equal("ufc11"); + + let start_time = performance.now(); + + // activate + await proxyTestContract.upgradeTarget(DummyFirst.address); + + let activated_time = performance.now(); + + // wait and finish upgrade + let all_time_in_sec = parseInt(await upgradeModeTestContract.get_MAX_UPGRADE_PERIOD()); + for (let step = 1; step <= 3; step++) { + if (step != 3) { + while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { + // wait + } + } else { + while ((performance.now() - activated_time) < all_time_in_sec * 1000 + 10) { + // wait + } + } + + if (step != 3) { + expect(await getCallRevertReason( () => proxyTestContract.forceCancelUpgradeTarget())).equal("ufc12"); + } else { + expect(await getCallRevertReason( () => proxyTestContract.finishTargetUpgrade([]))).equal("ufu11"); + await proxyTestContract.forceCancelUpgradeTarget(); + } + } + + expect(await upgradeModeTestContract.waitUpgradeModeActive()).to.equal(false) + // check dummy index + expect(await proxyDummyInterface.get_DUMMY_INDEX()) + .to.equal(2); + }); + } + +}); diff --git a/contracts/test/unit_tests/upgradeMode_test.js b/contracts/test/unit_tests/upgradeMode_test.js index e3ba24df00..a411844aba 100644 --- a/contracts/test/unit_tests/upgradeMode_test.js +++ b/contracts/test/unit_tests/upgradeMode_test.js @@ -28,7 +28,7 @@ describe("UpgradeMode unit test", function () { // activate await expect(testContract.activate()) .to.emit(testContract, 'UpgradeModeActivated') - .withArgs(0); + .withArgs(1); expect(await testContract.waitUpgradeModeActive()).to.equal(true) await testContract.isClosedStatusActive(); @@ -39,7 +39,7 @@ describe("UpgradeMode unit test", function () { // cancel await expect(testContract.cancel()) .to.emit(testContract, 'UpgradeCanceled') - .withArgs(0); + .withArgs(1); expect(await testContract.waitUpgradeModeActive()).to.equal(false) @@ -59,7 +59,7 @@ describe("UpgradeMode unit test", function () { // activate await expect(testContract.activate()) .to.emit(testContract, 'UpgradeModeActivated') - .withArgs(0); + .withArgs(1); let activated_time = performance.now(); @@ -82,7 +82,7 @@ describe("UpgradeMode unit test", function () { } else { await expect(testContract.isClosedStatusActive()) .to.emit(testContract, 'UpgradeModeClosedStatusActivated') - .withArgs(0); + .withArgs(1); expect(await testContract.closedStatusActive()).to.equal(true) } } @@ -90,16 +90,16 @@ describe("UpgradeMode unit test", function () { // finish await expect(testContract.finish()) .to.emit(testContract, 'UpgradeCompleted') - .withArgs(0); + .withArgs(1); - // one more activate and cancel with version equal to 1 + // one more activate and cancel with version equal to 2 await expect(testContract.activate()) .to.emit(testContract, 'UpgradeModeActivated') - .withArgs(1); + .withArgs(2); await expect(testContract.cancel()) .to.emit(testContract, 'UpgradeCanceled') - .withArgs(1); + .withArgs(2); }); } @@ -113,7 +113,7 @@ describe("UpgradeMode unit test", function () { // activate await expect(testContract.activate()) .to.emit(testContract, 'UpgradeModeActivated') - .withArgs(1); + .withArgs(2); let activated_time = performance.now(); @@ -131,16 +131,16 @@ describe("UpgradeMode unit test", function () { } if (step != 5) { - expect(await getCallRevertReason( () => testContract.forceCancel() )).equal("ucf12") + expect(await getCallRevertReason( () => testContract.forceCancel() )).equal("ufc12") } else { await expect(testContract.forceCancel()) .to.emit(testContract, 'UpgradeForciblyCanceled') - .withArgs(1); + .withArgs(2); expect(await testContract.waitUpgradeModeActive()).to.equal(false) } } - expect(await getCallRevertReason( () => testContract.forceCancel() )).equal("ucf11") + expect(await getCallRevertReason( () => testContract.forceCancel() )).equal("ufc11") }); } From 9fbeb7e90b68ae73d6768e75bade18613437d3af Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 28 Feb 2020 15:06:10 +0200 Subject: [PATCH 008/186] Added proxy with initialization into contracts and deploying in testnet-deploy.ts and deploy.ts --- Makefile | 1 + contracts/contracts/Franklin.sol | 28 ++++--- contracts/contracts/Governance.sol | 23 +++++- contracts/contracts/PriorityQueue.sol | 9 ++- contracts/contracts/Storage.sol | 5 ++ contracts/contracts/Verifier.sol | 19 +++++ contracts/package.json | 1 + contracts/scripts/testnet-deploy.ts | 98 +++++++++++++++++------- contracts/src.ts/deploy.ts | 104 +++++++++++++++++++------- contracts/test/fails_tests.ts | 37 +++++++-- contracts/test/integration_new.ts | 39 +++++++++- 11 files changed, 285 insertions(+), 79 deletions(-) diff --git a/Makefile b/Makefile index e51505db2e..21e4a11e1d 100644 --- a/Makefile +++ b/Makefile @@ -152,6 +152,7 @@ endef # Flatten contract source flatten: prepare-contracts @mkdir -p contracts/flat + $(call flatten_file,Proxy.sol) $(call flatten_file,Franklin.sol) $(call flatten_file,Governance.sol) $(call flatten_file,PriorityQueue.sol) diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index cb02b6d99c..ca13100f7f 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -29,18 +29,28 @@ contract Franklin is Storage, Config, Events { // mapping (uint32 => bool) tokenMigrated; - /// @notice Constructs Franklin contract - /// @param _governanceAddress The address of Governance contract - /// @param _verifierAddress The address of Verifier contract - /// _genesisAccAddress The address of single account, that exists in genesis block - /// @param _genesisRoot Genesis blocks (first block) root - constructor( + /// @notice Franklin contract initialization + /// @param upgradeModeAddress Address of UpgradeMode contract + /// @param initializationParameters Encoded representation of initialization parameters: + /// _governanceAddress The address of Governance contract + /// _verifierAddress The address of Verifier contract + /// _ // FIXME: remove _priorityQueueAddress in tests + /// _ // FIXME: remove _genesisAccAddress + /// _genesisRoot Genesis blocks (first block) root + function initialize( + address upgradeModeAddress, + bytes calldata initializationParameters + ) external { + upgradeMode = UpgradeMode(upgradeModeAddress); + + ( address _governanceAddress, address _verifierAddress, - address, // FIXME: remove _priorityQueueAddress in tests - address, // FIXME: remove _genesisAccAddress + , + , bytes32 _genesisRoot - ) public { + ) = abi.decode(initializationParameters, (address, address, address, address, bytes32)); + verifier = Verifier(_verifierAddress); governance = Governance(_governanceAddress); diff --git a/contracts/contracts/Governance.sol b/contracts/contracts/Governance.sol index b86f9b4700..20f13734d0 100644 --- a/contracts/contracts/Governance.sol +++ b/contracts/contracts/Governance.sol @@ -1,9 +1,15 @@ pragma solidity 0.5.16; +import "./UpgradeMode.sol"; + + /// @title Governance Contract /// @author Matter Labs contract Governance { + /// @notice UpgradeMode contract + UpgradeMode upgradeMode; + /// @notice Token added to Franklin net event TokenAdded( address token, @@ -25,9 +31,20 @@ contract Governance { /// @notice List of permitted validators mapping(address => bool) public validators; - /// @notice Construct Governance contract - /// @param _networkGovernor The address of network governor - constructor(address _networkGovernor) public { + /// @notice Governance contract initialization + /// @param upgradeModeAddress Address of UpgradeMode contract + /// @param initializationParameters Encoded representation of initialization parameters: + /// _networkGovernor The address of network governor + function initialize( + address upgradeModeAddress, + bytes calldata initializationParameters + ) external { + upgradeMode = UpgradeMode(upgradeModeAddress); + + ( + address _networkGovernor + ) = abi.decode(initializationParameters, (address)); + networkGovernor = _networkGovernor; validators[_networkGovernor] = true; } diff --git a/contracts/contracts/PriorityQueue.sol b/contracts/contracts/PriorityQueue.sol index 99fbe910b1..7b802712db 100644 --- a/contracts/contracts/PriorityQueue.sol +++ b/contracts/contracts/PriorityQueue.sol @@ -7,7 +7,14 @@ import "./Governance.sol"; // FIXME: remove this from tests and delete contract PriorityQueue { - constructor(address _governanceAddress) public { + constructor() public { + + } + + function initialize( + address upgradeModeAddress, + bytes calldata initializationParameters + ) external { } diff --git a/contracts/contracts/Storage.sol b/contracts/contracts/Storage.sol index 61d7e6dd3d..42486a04f0 100644 --- a/contracts/contracts/Storage.sol +++ b/contracts/contracts/Storage.sol @@ -6,11 +6,16 @@ import "./Governance.sol"; import "./Verifier.sol"; import "./PriorityQueue.sol"; +import "./UpgradeMode.sol"; + /// @title zkSync storage contract /// @author Matter Labs contract Storage { + /// @notice UpgradeMode contract + UpgradeMode upgradeMode; + /// @notice Verifier contract. Used to verify block proof and exit proof Verifier internal verifier; diff --git a/contracts/contracts/Verifier.sol b/contracts/contracts/Verifier.sol index 2d01b4f388..f7fec7c800 100644 --- a/contracts/contracts/Verifier.sol +++ b/contracts/contracts/Verifier.sol @@ -3,14 +3,33 @@ pragma solidity 0.5.16; import "./VerificationKey.sol"; import "./VerificationKeyExit.sol"; +import "./UpgradeMode.sol"; + + /// @title Verifier Contract /// @notice Based on https://github.com/HarryR/ethsnarks/blob/master/contracts/Verifier.sol /// @dev TODO: - remove DUMMY_VERIFIER variable for production /// @author Matter Labs contract Verifier is VerificationKey, VerificationKeyExit { + + /// @notice UpgradeMode contract + UpgradeMode upgradeMode; + /// @notice If this flag is true - dummy verification is used instead of full verifier bool constant DUMMY_VERIFIER = false; + /// @notice Verifier contract initialization + /// @param upgradeModeAddress Address of UpgradeMode contract + /// @param initializationParameters Encoded representation of initialization parameters + function initialize( + address upgradeModeAddress, + bytes calldata initializationParameters + ) external { + upgradeMode = UpgradeMode(upgradeModeAddress); + + // parameters are not used during initialization + } + /// @notice Rollup block proof verification /// @param _proof Block proof /// @param _commitment Block commitment diff --git a/contracts/package.json b/contracts/package.json index 013d879f28..7250959601 100644 --- a/contracts/package.json +++ b/contracts/package.json @@ -10,6 +10,7 @@ "axios": "^0.19.0", "chai": "^4.2.0", "ethereum-waffle": "2.0.12", + "ethereumjs-abi": "^0.6.8", "ethers": "4.0.33", "ethjs": "^0.4.0", "fs": "^0.0.1-security", diff --git a/contracts/scripts/testnet-deploy.ts b/contracts/scripts/testnet-deploy.ts index c588101621..0382c5f82d 100644 --- a/contracts/scripts/testnet-deploy.ts +++ b/contracts/scripts/testnet-deploy.ts @@ -20,7 +20,11 @@ import { governanceTestContractCode, priorityQueueTestContractCode, verifierTestContractCode, - franklinTestContractCode + franklinTestContractCode, + proxyContractCode, + proxyContractSourceCode, + proxyTestContractCode, + deployProxy, } from "../src.ts/deploy"; async function main() { @@ -55,48 +59,81 @@ async function main() { let verifierAddress = process.env.VERIFIER_ADDR; let franklinAddress = process.env.CONTRACT_ADDR; - let governanceConstructorArgs = [wallet.address]; - let priorityQueueConstructorArgs = [governanceAddress]; - let verifierConstructorArgs = []; - let franklinConstructorArgs = [ - governanceAddress, - verifierAddress, - priorityQueueAddress, - process.env.OPERATOR_FRANKLIN_ADDRESS, - process.env.GENESIS_ROOT || ethers.constants.HashZero, - ]; + let governanceInitArgs = ["address"]; + let governanceInitArgsValues = [wallet.address]; + let priorityQueueInitArgs = ["address"]; + let priorityQueueInitArgsValues = [governanceAddress]; + let verifierInitArgs = []; + let verifierInitArgsValues = []; if (args.deploy) { let timer = new Date().getTime(); + const proxyCode = args.test ? proxyTestContractCode : proxyContractCode; + const governanceCode = args.test ? governanceTestContractCode : governanceContractCode; - const governance = await deployGovernance(wallet, governanceCode, governanceConstructorArgs); + let governance, governanceAddressDeployed; + [governance, governanceAddressDeployed] = await deployGovernance( + wallet, + proxyCode, + governanceCode, + governanceInitArgs, + governanceInitArgsValues, + ); console.log(`Governance contract deployed, time: ${(new Date().getTime() - timer) / 1000} secs`); - governanceAddress = governance.address; + governanceAddress = governanceAddressDeployed; timer = new Date().getTime(); const priorityQueueCode = args.test ? priorityQueueTestContractCode : priorityQueueContractCode; - const priorityQueue = await deployPriorityQueue(wallet, priorityQueueCode, priorityQueueConstructorArgs); + let priorityQueue, priorityQueueAddressDeployed; + [priorityQueue, priorityQueueAddressDeployed] = await deployPriorityQueue( + wallet, + proxyCode, + priorityQueueCode, + priorityQueueInitArgs, + priorityQueueInitArgsValues, + ); console.log(`Priority queue contract deployed, time: ${(new Date().getTime() - timer) / 1000} secs`); - priorityQueueAddress = priorityQueue.address; + priorityQueueAddress = priorityQueueAddressDeployed; timer = new Date().getTime(); const verifierCode = args.test ? verifierTestContractCode : verifierContractCode; - const verifier = await deployVerifier(wallet, verifierCode, verifierConstructorArgs); + let verifier, verifierAddressDeployed; + [verifier, verifierAddressDeployed] = await deployVerifier( + wallet, + proxyCode, + verifierCode, + verifierInitArgs, + verifierInitArgsValues, + ); console.log(`Verifier contract deployed, time: ${(new Date().getTime() - timer) / 1000} secs`); - verifierAddress = verifier.address; + verifierAddress = verifierAddressDeployed; - franklinConstructorArgs = [ - governanceAddress, - verifierAddress, - priorityQueueAddress, - process.env.OPERATOR_FRANKLIN_ADDRESS.replace('sync:', '0x'), + let franklinInitArgs = [ + "address", + "address", + "address", + "address", + "bytes32", + ]; + let franklinInitArgsValues = [ + governance.address, + verifier.address, + priorityQueue.address, + process.env.OPERATOR_FRANKLIN_ADDRESS.replace("sync:", "0x"), process.env.GENESIS_ROOT || ethers.constants.HashZero, ]; timer = new Date().getTime(); const franklinCode = args.test ? franklinTestContractCode : franklinContractCode; - const franklin = await deployFranklin(wallet, franklinCode, franklinConstructorArgs); + let franklin, franklinAddressDeployed; + [franklin, franklinAddressDeployed] = await deployFranklin( + wallet, + proxyCode, + franklinCode, + franklinInitArgs, + franklinInitArgsValues, + ); console.log(`Main contract deployed, time: ${(new Date().getTime() - timer) / 1000} secs`); - franklinAddress = franklin.address; + franklinAddress = franklinAddressDeployed; await governance.setValidator(process.env.OPERATOR_ETH_ADDRESS, true); @@ -114,10 +151,15 @@ async function main() { ]); } else { await Promise.all([ - publishSourceCodeToEtherscan('Governance', governanceAddress, governanceContractSourceCode, governanceContractCode, governanceConstructorArgs), - publishSourceCodeToEtherscan('PriorityQueue', priorityQueueAddress, priorityQueueContractSourceCode, priorityQueueContractCode, priorityQueueConstructorArgs), - publishSourceCodeToEtherscan('Verifier', verifierAddress, verifierContractSourceCode, verifierContractCode, verifierConstructorArgs), - publishSourceCodeToEtherscan('Franklin', franklinAddress, franklinContractSourceCode, franklinContractCode, franklinConstructorArgs), + publishSourceCodeToEtherscan('GovernanceProxy', governance.address, proxyContractSourceCode, proxyContractCode, []), + publishSourceCodeToEtherscan('PriorityQueueProxy', priorityQueue.address, proxyContractSourceCode, proxyContractCode, []), + publishSourceCodeToEtherscan('VerifierProxy', verifier.address, proxyContractSourceCode, proxyContractCode, []), + publishSourceCodeToEtherscan('FranklinProxy', franklin.address, proxyContractSourceCode, proxyContractCode, []), + + publishSourceCodeToEtherscan('Governance', governanceAddress, governanceContractSourceCode, governanceContractCode, []), + publishSourceCodeToEtherscan('PriorityQueue', priorityQueueAddress, priorityQueueContractSourceCode, priorityQueueContractCode, []), + publishSourceCodeToEtherscan('Verifier', verifierAddress, verifierContractSourceCode, verifierContractCode, []), + publishSourceCodeToEtherscan('Franklin', franklinAddress, franklinContractSourceCode, franklinContractCode, []), ]); } } catch (e) { diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index ddd59e0e4d..f9d370da92 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -7,7 +7,7 @@ import * as url from 'url'; import * as fs from 'fs'; import * as path from 'path'; - +const abi = require('ethereumjs-abi') const sleep = async ms => await new Promise(resolve => setTimeout(resolve, ms)); export const ERC20MintableContract = function () { @@ -16,16 +16,19 @@ export const ERC20MintableContract = function () { return contract }(); +export const proxyContractCode = require(`../flat_build/Proxy`); export const franklinContractCode = require(`../flat_build/Franklin`); export const verifierContractCode = require(`../flat_build/Verifier`); export const governanceContractCode = require(`../flat_build/Governance`); export const priorityQueueContractCode = require(`../flat_build/PriorityQueue`); +export const proxyContractSourceCode = fs.readFileSync('flat/Proxy.sol', 'utf8'); export const franklinContractSourceCode = fs.readFileSync('flat/Franklin.sol', 'utf8'); export const verifierContractSourceCode = fs.readFileSync('flat/Verifier.sol', 'utf8'); export const governanceContractSourceCode = fs.readFileSync('flat/Governance.sol', 'utf8'); export const priorityQueueContractSourceCode = fs.readFileSync('flat/PriorityQueue.sol', 'utf8'); +export const proxyTestContractCode = require('../build/ProxyTest'); export const franklinTestContractCode = require('../build/FranklinTest'); export const verifierTestContractCode = require('../build/VerifierTest'); export const governanceTestContractCode = require('../build/GovernanceTest'); @@ -94,19 +97,41 @@ export async function publishSourceCodeToEtherscan(contractname, contractaddress } } +export async function deployProxy( + wallet, + proxyCode, +) { + try { + const proxy = await deployContract(wallet, proxyCode, [], { + gasLimit: 3000000, + }); + + return proxy; + } catch (err) { + console.log("Proxy deploy error:" + err); + } +} + export async function deployGovernance( wallet, + proxyCode, governanceCode, - constructorArgs + initArgs, + initArgsValues, ) { try { - let governance = await deployContract(wallet, governanceCode, constructorArgs, { + const proxy = await deployProxy(wallet, proxyCode); + const governance = await deployContract(wallet, governanceCode, [], { gasLimit: 3000000, }); - console.log(`GOVERNANCE_GENESIS_TX_HASH=${governance.deployTransaction.hash}`); - console.log(`GOVERNANCE_ADDR=${governance.address}`); + const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); + const tx = await proxy.initialize(governance.address, initArgsInBytes); + await tx.wait(); - return governance; + const returnContract = new ethers.Contract(proxy.address, governanceCode.interface, wallet); + console.log(`GOVERNANCE_GENESIS_TX_HASH=${tx.hash}`); + console.log(`GOVERNANCE_ADDR=${proxy.address}`); + return [returnContract, governance.address]; } catch (err) { console.log("Governance deploy error:" + err); } @@ -114,16 +139,23 @@ export async function deployGovernance( export async function deployPriorityQueue( wallet, + proxyCode, priorityQueueCode, - constructorArgs + initArgs, + initArgsValues, ) { try { - let priorityQueue = await deployContract(wallet, priorityQueueCode, constructorArgs, { - gasLimit: 5000000, + const proxy = await deployProxy(wallet, proxyCode); + const priorityQueue = await deployContract(wallet, priorityQueueCode, [], { + gasLimit: 3000000, }); - console.log(`PRIORITY_QUEUE_ADDR=${priorityQueue.address}`); + const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); + const tx = await proxy.initialize(priorityQueue.address, initArgsInBytes); + await tx.wait(); - return priorityQueue; + const returnContract = new ethers.Contract(proxy.address, priorityQueueCode.interface, wallet); + console.log(`PRIORITY_QUEUE_ADDR=${proxy.address}`); + return [returnContract, priorityQueue.address]; } catch (err) { console.log("Priority queue deploy error:" + err); } @@ -131,16 +163,23 @@ export async function deployPriorityQueue( export async function deployVerifier( wallet, + proxyCode, verifierCode, - constructorArgs + initArgs, + initArgsValues, ) { try { - let verifier = await deployContract(wallet, verifierCode, constructorArgs, { - gasLimit: 2000000, + const proxy = await deployProxy(wallet, proxyCode); + const verifier = await deployContract(wallet, verifierCode, [], { + gasLimit: 3000000, }); - console.log(`VERIFIER_ADDR=${verifier.address}`); + const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); + const tx = await proxy.initialize(verifier.address, initArgsInBytes); + await tx.wait(); - return verifier; + const returnContract = new ethers.Contract(proxy.address, verifierCode.interface, wallet); + console.log(`VERIFIER_ADDR=${proxy.address}`); + return [returnContract, verifier.address]; } catch (err) { console.log("Verifier deploy error:" + err); } @@ -148,33 +187,40 @@ export async function deployVerifier( export async function deployFranklin( wallet, + proxyCode, franklinCode, - constructorArgs + initArgs, + initArgsValues, ) { try { let [ - governanceAddress, - verifierAddress, - priorityQueueAddress, + governanceProxyAddress, + verifierProxyAddress, + priorityQueueProxyAddress, genesisAddress, genesisRoot - ] = constructorArgs; + ] = initArgsValues; - let contract = await deployContract( + const proxy = await deployProxy(wallet, proxyCode); + const contract = await deployContract( wallet, franklinCode, - constructorArgs, + [], { gasLimit: 6000000, }); - console.log(`CONTRACT_GENESIS_TX_HASH=${contract.deployTransaction.hash}`); - console.log(`CONTRACT_ADDR=${contract.address}`); + const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); + const initTx = await proxy.initialize(contract.address, initArgsInBytes); + await initTx.wait(); - const priorityQueueContract = new ethers.Contract(priorityQueueAddress, priorityQueueContractCode.interface, wallet); - const setAddressTx = await priorityQueueContract.setFranklinAddress(contract.address, { gasLimit: 1000000 }) + const priorityQueueProxyContract = new ethers.Contract(priorityQueueProxyAddress, priorityQueueContractCode.interface, wallet); + const setAddressTx = await priorityQueueProxyContract.setFranklinAddress(proxy.address, { gasLimit: 1000000 }) await setAddressTx.wait(); - - return contract; + + const returnContract = new ethers.Contract(proxy.address, franklinCode.interface, wallet); + console.log(`CONTRACT_GENESIS_TX_HASH=${initTx.hash}`); + console.log(`CONTRACT_ADDR=${proxy.address}`); + return [returnContract, contract.address]; } catch (err) { console.log("Franklin deploy error:" + err); } diff --git a/contracts/test/fails_tests.ts b/contracts/test/fails_tests.ts index 797835edc4..741526f1cb 100644 --- a/contracts/test/fails_tests.ts +++ b/contracts/test/fails_tests.ts @@ -11,6 +11,7 @@ import { mintTestERC20Token, priorityQueueTestContractCode, verifierTestContractCode, + proxyTestContractCode, } from "../src.ts/deploy"; import {expect, use} from "chai"; const { createMockProvider, getWallets, solidity } = require("ethereum-waffle"); @@ -62,16 +63,42 @@ describe("PLANNED FAILS", function () { beforeEach(async () => { console.log("---\n"); - verifierDeployedContract = await deployVerifier(wallet, verifierTestContractCode, []); - governanceDeployedContract = await deployGovernance(wallet, governanceTestContractCode, [wallet.address]); - priorityQueueDeployedContract = await deployPriorityQueue( + let verifierAddressDeployed; + [verifierDeployedContract, verifierAddressDeployed] = await deployVerifier( wallet, + proxyTestContractCode, + verifierTestContractCode, + [], + [], + ); + let governanceAddressDeployed; + [governanceDeployedContract, governanceAddressDeployed] = await deployGovernance( + wallet, + proxyTestContractCode, + governanceTestContractCode, + ["address"], + [wallet.address], + ); + let priorityQueueAddressDeployed; + [priorityQueueDeployedContract, priorityQueueAddressDeployed] = await deployPriorityQueue( + wallet, + proxyTestContractCode, priorityQueueTestContractCode, - [governanceDeployedContract.address] + ["address"], + [governanceDeployedContract.address], ); - franklinDeployedContract = await deployFranklin( + let franklinAddressDeployed; + [franklinDeployedContract, franklinAddressDeployed] = await deployFranklin( wallet, + proxyTestContractCode, franklinTestContractCode, + [ + "address", + "address", + "address", + "address", + "bytes32", + ], [ governanceDeployedContract.address, verifierDeployedContract.address, diff --git a/contracts/test/integration_new.ts b/contracts/test/integration_new.ts index 3b072e94b8..19cba51181 100644 --- a/contracts/test/integration_new.ts +++ b/contracts/test/integration_new.ts @@ -10,6 +10,7 @@ import { verifierTestContractCode, governanceTestContractCode, priorityQueueTestContractCode, + proxyTestContractCode, } from "../src.ts/deploy"; import { expect, use } from "chai"; @@ -54,12 +55,42 @@ describe("Integration test", async function () { before(async () => { //console.log("---\n"); - verifierDeployedContract = await deployVerifier(wallet, verifierTestContractCode, []); - governanceDeployedContract = await deployGovernance(wallet, governanceTestContractCode, [wallet.address]); - priorityQueueDeployedContract = await deployPriorityQueue(wallet, priorityQueueTestContractCode, [governanceDeployedContract.address]); - franklinDeployedContract = await deployFranklin( + let verifierAddressDeployed; + [verifierDeployedContract, verifierAddressDeployed] = await deployVerifier( wallet, + proxyTestContractCode, + verifierTestContractCode, + [], + [], + ); + let governanceAddressDeployed; + [governanceDeployedContract, governanceAddressDeployed] = await deployGovernance( + wallet, + proxyTestContractCode, + governanceTestContractCode, + ["address"], + [wallet.address], + ); + let priorityQueueAddressDeployed; + [priorityQueueDeployedContract, priorityQueueAddressDeployed] = await deployPriorityQueue( + wallet, + proxyTestContractCode, + priorityQueueTestContractCode, + ["address"], + [governanceDeployedContract.address], + ); + let franklinAddressDeployed; + [franklinDeployedContract, franklinAddressDeployed] = await deployFranklin( + wallet, + proxyTestContractCode, franklinTestContractCode, + [ + "address", + "address", + "address", + "address", + "bytes32", + ], [ governanceDeployedContract.address, verifierDeployedContract.address, From 077922712a801cedfa4f4f7588eefa47c45cf77b Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 28 Feb 2020 15:42:53 +0200 Subject: [PATCH 009/186] Added test of access to the function of initialization of the target --- contracts/test/unit_tests/proxy_test.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/contracts/test/unit_tests/proxy_test.js b/contracts/test/unit_tests/proxy_test.js index fcfbaf6aa5..3843cab1cc 100644 --- a/contracts/test/unit_tests/proxy_test.js +++ b/contracts/test/unit_tests/proxy_test.js @@ -62,6 +62,14 @@ describe("Proxy unit test", function () { it("check Proxy reverts", async () => { expect(await getCallRevertReason( () => proxyTestContract.initialize(DummyFirst.address, []) )).equal("uin11"); + expect(await getCallRevertReason( () => proxyDummyInterface.initialize(DummyFirst.address, []) )).equal("uin11"); + + let proxyTestContract_with_wallet2_signer = await proxyTestContract.connect(wallet2); + expect(await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.initialize(DummyFirst.address, []) )).equal("oro11"); + + let proxyDummyInterface_with_wallet2_signer = await proxyDummyInterface.connect(wallet2); + expect(await getCallRevertReason( () => proxyDummyInterface_with_wallet2_signer.initialize(DummyFirst.address, []) )).equal("oro11"); + expect(await getCallRevertReason( () => proxyTestContract.upgradeTarget("0x0000000000000000000000000000000000000000") )).equal("uut11"); expect(await getCallRevertReason( () => proxyTestContract.upgradeTarget(DummyFirst.address) )).equal("uut12"); }); From 2160d312b90e1c20d56aff98e73174fcdc8c1f7d Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Mon, 2 Mar 2020 15:52:21 +0200 Subject: [PATCH 010/186] Changes of tests after merging with dvush/commit-block-contract-refactor --- contracts/contracts/Franklin.sol | 2 +- contracts/contracts/test/ZKSyncUnitTest.sol | 11 +- contracts/src.ts/deploy.ts | 6 +- contracts/test/unit_tests/ownable_test.js | 4 +- contracts/test/unit_tests/proxy_test.js | 28 ++-- contracts/test/unit_tests/upgradeMode_test.js | 20 +-- contracts/test/unit_tests/zksync_test.ts | 129 ++++++++++++------ contracts/yarn.lock | 8 ++ 8 files changed, 134 insertions(+), 74 deletions(-) diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index 3b725ccbf9..3c2283739c 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -47,7 +47,7 @@ contract Franklin is Storage, Config, Events { address _verifierAddress, , bytes32 _genesisRoot - ) = abi.decode(initializationParameters, (address, address, address, address, bytes32)); + ) = abi.decode(initializationParameters, (address, address, address, bytes32)); verifier = Verifier(_verifierAddress); governance = Governance(_governanceAddress); diff --git a/contracts/contracts/test/ZKSyncUnitTest.sol b/contracts/contracts/test/ZKSyncUnitTest.sol index 6bc9b3b5a4..47c984c27f 100644 --- a/contracts/contracts/test/ZKSyncUnitTest.sol +++ b/contracts/contracts/test/ZKSyncUnitTest.sol @@ -1,6 +1,7 @@ pragma solidity 0.5.16; import "../generated/FranklinTest.sol"; +import "../generated/UpgradeModeTest.sol"; contract ZKSyncUnitTest is FranklinTest { @@ -10,7 +11,15 @@ contract ZKSyncUnitTest is FranklinTest { address _verifierAddress, address _genesisAccAddress, bytes32 _genesisRoot - ) FranklinTest(_governanceAddress, _verifierAddress, _genesisAccAddress, _genesisRoot) public{} + ) FranklinTest() public{ + /// initialization + upgradeMode = new UpgradeModeTest(); + + verifier = VerifierTest(_verifierAddress); + governance = GovernanceTest(_governanceAddress); + + blocks[0].stateRoot = _genesisRoot; + } function changePubkeySignatureCheck(bytes calldata _signature, bytes calldata _newPkHash, uint32 _nonce, address _ethAddress) external pure returns (bool) { return verifyChangePubkeySignature(_signature, _newPkHash, _nonce, _ethAddress); diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 40478851bc..553b810caa 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -126,7 +126,7 @@ export async function deployGovernance( await tx.wait(); const returnContract = new ethers.Contract(proxy.address, governanceCode.interface, wallet); - return [returnContract, governance.address]; + return [returnContract, governance.address, tx.hash]; } catch (err) { console.log("Governance deploy error:" + err); } @@ -149,7 +149,7 @@ export async function deployVerifier( await tx.wait(); const returnContract = new ethers.Contract(proxy.address, verifierCode.interface, wallet); - return [returnContract, verifier.address]; + return [returnContract, verifier.address, tx.hash]; } catch (err) { console.error("Verifier deploy error:" + err); } @@ -183,7 +183,7 @@ export async function deployFranklin( await initTx.wait(); const returnContract = new ethers.Contract(proxy.address, franklinCode.interface, wallet); - return [returnContract, contract.address]; + return [returnContract, contract.address, initTx.hash]; } catch (err) { console.log("Franklin deploy error:" + err); } diff --git a/contracts/test/unit_tests/ownable_test.js b/contracts/test/unit_tests/ownable_test.js index 832054377f..faa48cf182 100644 --- a/contracts/test/unit_tests/ownable_test.js +++ b/contracts/test/unit_tests/ownable_test.js @@ -18,7 +18,7 @@ describe("Ownable unit test", function () { }); it("checking correctness of transferring mastership to zero address", async () => { - let revertReason = await getCallRevertReason( () => testContract.transferMastership("0x0000000000000000000000000000000000000000") ); + let {revertReason} = await getCallRevertReason( () => testContract.transferMastership("0x0000000000000000000000000000000000000000") ); expect(revertReason).equal("otp11") }); @@ -28,7 +28,7 @@ describe("Ownable unit test", function () { expect(await testContract.getMaster()).to.equal(wallet2.address) /// try to transfer mastership to wallet1 by wallet1 call - let revertReason = await getCallRevertReason( () => testContract.transferMastership(wallet1.address) ); + let {revertReason} = await getCallRevertReason( () => testContract.transferMastership(wallet1.address) ); expect(revertReason).equal("oro11") /// transfer mastership back to wallet1 diff --git a/contracts/test/unit_tests/proxy_test.js b/contracts/test/unit_tests/proxy_test.js index 3843cab1cc..5ea3b5879c 100644 --- a/contracts/test/unit_tests/proxy_test.js +++ b/contracts/test/unit_tests/proxy_test.js @@ -52,26 +52,26 @@ describe("Proxy unit test", function () { it("checking that requireMaster calls present", async () => { let proxyTestContract_with_wallet2_signer = await proxyTestContract.connect(wallet2); - expect(await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.upgradeTarget(DummySecond.address) )).equal("oro11") - expect(await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.cancelUpgradeTarget() )).equal("oro11") - expect(await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.finishTargetUpgrade([]) )).equal("oro11") + expect((await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.upgradeTarget(DummySecond.address) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.cancelUpgradeTarget() )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.finishTargetUpgrade([]) )).revertReason).equal("oro11") // bonus: check that force cancellation do not have requireMaster call - expect(await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.forceCancelUpgradeTarget() )).to.not.equal("oro11") + expect((await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.forceCancelUpgradeTarget() )).revertReason).to.not.equal("oro11") }); it("check Proxy reverts", async () => { - expect(await getCallRevertReason( () => proxyTestContract.initialize(DummyFirst.address, []) )).equal("uin11"); - expect(await getCallRevertReason( () => proxyDummyInterface.initialize(DummyFirst.address, []) )).equal("uin11"); + expect((await getCallRevertReason( () => proxyTestContract.initialize(DummyFirst.address, []) )).revertReason).equal("uin11"); + expect((await getCallRevertReason( () => proxyDummyInterface.initialize(DummyFirst.address, []) )).revertReason).equal("uin11"); let proxyTestContract_with_wallet2_signer = await proxyTestContract.connect(wallet2); - expect(await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.initialize(DummyFirst.address, []) )).equal("oro11"); + expect((await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.initialize(DummyFirst.address, []) )).revertReason).equal("oro11"); let proxyDummyInterface_with_wallet2_signer = await proxyDummyInterface.connect(wallet2); - expect(await getCallRevertReason( () => proxyDummyInterface_with_wallet2_signer.initialize(DummyFirst.address, []) )).equal("oro11"); + expect((await getCallRevertReason( () => proxyDummyInterface_with_wallet2_signer.initialize(DummyFirst.address, []) )).revertReason).equal("oro11"); - expect(await getCallRevertReason( () => proxyTestContract.upgradeTarget("0x0000000000000000000000000000000000000000") )).equal("uut11"); - expect(await getCallRevertReason( () => proxyTestContract.upgradeTarget(DummyFirst.address) )).equal("uut12"); + expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget("0x0000000000000000000000000000000000000000") )).revertReason).equal("uut11"); + expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(DummyFirst.address) )).revertReason).equal("uut12"); }); it("check upgrade canceling", async () => { @@ -106,7 +106,7 @@ describe("Proxy unit test", function () { } if (step != 3) { - expect(await getCallRevertReason( () => proxyTestContract.finishTargetUpgrade([]))).equal("umf11"); + expect((await getCallRevertReason( () => proxyTestContract.finishTargetUpgrade([]))).revertReason).equal("umf11"); } else { await proxyTestContract.finishTargetUpgrade([bytes[2], bytes[3]]); } @@ -133,7 +133,7 @@ describe("Proxy unit test", function () { } else { it("checking that force cancellation works correctly", async () => { - expect(await getCallRevertReason( () => proxyTestContract.forceCancelUpgradeTarget())).equal("ufc11"); + expect((await getCallRevertReason( () => proxyTestContract.forceCancelUpgradeTarget())).revertReason).equal("ufc11"); let start_time = performance.now(); @@ -156,9 +156,9 @@ describe("Proxy unit test", function () { } if (step != 3) { - expect(await getCallRevertReason( () => proxyTestContract.forceCancelUpgradeTarget())).equal("ufc12"); + expect((await getCallRevertReason( () => proxyTestContract.forceCancelUpgradeTarget())).revertReason).equal("ufc12"); } else { - expect(await getCallRevertReason( () => proxyTestContract.finishTargetUpgrade([]))).equal("ufu11"); + expect((await getCallRevertReason( () => proxyTestContract.finishTargetUpgrade([]))).revertReason).equal("ufu11"); await proxyTestContract.forceCancelUpgradeTarget(); } } diff --git a/contracts/test/unit_tests/upgradeMode_test.js b/contracts/test/unit_tests/upgradeMode_test.js index a411844aba..f576c49559 100644 --- a/contracts/test/unit_tests/upgradeMode_test.js +++ b/contracts/test/unit_tests/upgradeMode_test.js @@ -17,11 +17,11 @@ describe("UpgradeMode unit test", function () { it("checking that requireMaster calls present", async () => { let testContract_with_wallet2_signer = await testContract.connect(wallet2); - expect(await getCallRevertReason( () => testContract_with_wallet2_signer.activate() )).equal("oro11") - expect(await getCallRevertReason( () => testContract_with_wallet2_signer.cancel() )).equal("oro11") - expect(await getCallRevertReason( () => testContract_with_wallet2_signer.isClosedStatusActive() )).equal("VM did not revert") - expect(await getCallRevertReason( () => testContract_with_wallet2_signer.forceCancel() )).equal("oro11") - expect(await getCallRevertReason( () => testContract_with_wallet2_signer.finish() )).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.activate() )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.cancel() )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.isClosedStatusActive() )).revertReason).equal("VM did not revert") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.forceCancel() )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.finish() )).revertReason).equal("oro11") }); it("test activate, test cancel, test finish without closed status active", async () => { @@ -34,7 +34,7 @@ describe("UpgradeMode unit test", function () { await testContract.isClosedStatusActive(); expect(await testContract.closedStatusActive()).to.equal(false) - expect(await getCallRevertReason( () => testContract.activate() )).equal("uma11") + expect((await getCallRevertReason( () => testContract.activate() )).revertReason).equal("uma11") // cancel await expect(testContract.cancel()) @@ -43,10 +43,10 @@ describe("UpgradeMode unit test", function () { expect(await testContract.waitUpgradeModeActive()).to.equal(false) - expect(await getCallRevertReason( () => testContract.cancel() )).equal("umc11") + expect((await getCallRevertReason( () => testContract.cancel() )).revertReason).equal("umc11") // finish - expect(await getCallRevertReason( () => testContract.finish() )).equal("umf11") + expect((await getCallRevertReason( () => testContract.finish() )).revertReason).equal("umf11") }); if (SKIP_TEST) { @@ -131,7 +131,7 @@ describe("UpgradeMode unit test", function () { } if (step != 5) { - expect(await getCallRevertReason( () => testContract.forceCancel() )).equal("ufc12") + expect((await getCallRevertReason( () => testContract.forceCancel() )).revertReason).equal("ufc12") } else { await expect(testContract.forceCancel()) .to.emit(testContract, 'UpgradeForciblyCanceled') @@ -140,7 +140,7 @@ describe("UpgradeMode unit test", function () { } } - expect(await getCallRevertReason( () => testContract.forceCancel() )).equal("ufc11") + expect((await getCallRevertReason( () => testContract.forceCancel() )).revertReason).equal("ufc11") }); } diff --git a/contracts/test/unit_tests/zksync_test.ts b/contracts/test/unit_tests/zksync_test.ts index 3a33a64d3f..d4e69a9ef7 100644 --- a/contracts/test/unit_tests/zksync_test.ts +++ b/contracts/test/unit_tests/zksync_test.ts @@ -1,9 +1,8 @@ import { addTestERC20Token, addTestNotApprovedERC20Token, - deployFranklin, deployGovernance, deployVerifier, franklinTestContractCode, - governanceTestContractCode, mintTestERC20Token, + governanceTestContractCode, mintTestERC20Token, proxyTestContractCode, verifierTestContractCode } from "../../src.ts/deploy"; import {BigNumber, bigNumberify, BigNumberish, parseEther} from "ethers/utils"; @@ -27,7 +26,7 @@ describe("ZK Sync signature verification unit tests", function () { let randomWallet = ethers.Wallet.createRandom(); before(async () => { testContract = await deployContract(wallet, require('../../build/ZKSyncUnitTest'), [AddressZero, AddressZero, AddressZero, Buffer.alloc(32, 0)], { - gasLimit: 6000000, + gasLimit: 6500000, }); }); @@ -88,18 +87,29 @@ describe("ZK priority queue ops unit tests", function () { let ethProxy; let operationTestContract; before(async () => { - const verifierDeployedContract = await deployVerifier(wallet, verifierTestContractCode, []); - const governanceDeployedContract = await deployGovernance(wallet, governanceTestContractCode, [wallet.address]); - zksyncContract = await deployFranklin( + let verifierDeployedContract, verifierAddressDeployed; + [verifierDeployedContract, verifierAddressDeployed] = await deployVerifier( wallet, - franklinTestContractCode, - [ - governanceDeployedContract.address, - verifierDeployedContract.address, - wallet.address, - ethers.constants.HashZero, - ], + proxyTestContractCode, + verifierTestContractCode, + [], + [], ); + let governanceDeployedContract, governanceAddressDeployed; + [governanceDeployedContract, governanceAddressDeployed] = await deployGovernance( + wallet, + proxyTestContractCode, + governanceTestContractCode, + ["address"], + [wallet.address], + ); + zksyncContract = await deployContract( + wallet, + require('../../build/ZKSyncUnitTest'), + [governanceDeployedContract.address, verifierDeployedContract.address, wallet.address, ethers.constants.HashZero], + { + gasLimit: 6500000, + }); await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); await mintTestERC20Token(wallet, tokenContract); @@ -226,18 +236,29 @@ describe("ZK Sync withdraw unit tests", function () { let incorrectTokenContract; let ethProxy; before(async () => { - const verifierDeployedContract = await deployVerifier(wallet, verifierTestContractCode, []); - const governanceDeployedContract = await deployGovernance(wallet, governanceTestContractCode, [wallet.address]); - zksyncContract = await deployFranklin( + let verifierDeployedContract, verifierAddressDeployed; + [verifierDeployedContract, verifierAddressDeployed] = await deployVerifier( wallet, - require("../../build/ZKSyncUnitTest"), - [ - governanceDeployedContract.address, - verifierDeployedContract.address, - wallet.address, - ethers.constants.HashZero, - ], + proxyTestContractCode, + verifierTestContractCode, + [], + [], ); + let governanceDeployedContract, governanceAddressDeployed; + [governanceDeployedContract, governanceAddressDeployed] = await deployGovernance( + wallet, + proxyTestContractCode, + governanceTestContractCode, + ["address"], + [wallet.address], + ); + zksyncContract = await deployContract( + wallet, + require('../../build/ZKSyncUnitTest'), + [governanceDeployedContract.address, verifierDeployedContract.address, wallet.address, ethers.constants.HashZero], + { + gasLimit: 6500000, + }); await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); incorrectTokenContract = await addTestNotApprovedERC20Token(wallet); @@ -366,18 +387,29 @@ describe("ZK Sync auth pubkey onchain unit tests", function () { let incorrectTokenContract; let ethProxy; before(async () => { - const verifierDeployedContract = await deployVerifier(wallet, verifierTestContractCode, []); - const governanceDeployedContract = await deployGovernance(wallet, governanceTestContractCode, [wallet.address]); - zksyncContract = await deployFranklin( + let verifierDeployedContract, verifierAddressDeployed; + [verifierDeployedContract, verifierAddressDeployed] = await deployVerifier( wallet, - require("../../build/ZKSyncUnitTest"), - [ - governanceDeployedContract.address, - verifierDeployedContract.address, - wallet.address, - ethers.constants.HashZero, - ], + proxyTestContractCode, + verifierTestContractCode, + [], + [], ); + let governanceDeployedContract, governanceAddressDeployed; + [governanceDeployedContract, governanceAddressDeployed] = await deployGovernance( + wallet, + proxyTestContractCode, + governanceTestContractCode, + ["address"], + [wallet.address], + ); + zksyncContract = await deployContract( + wallet, + require('../../build/ZKSyncUnitTest'), + [governanceDeployedContract.address, verifierDeployedContract.address, wallet.address, ethers.constants.HashZero], + { + gasLimit: 6500000, + }); await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); incorrectTokenContract = await addTestNotApprovedERC20Token(wallet); @@ -440,18 +472,29 @@ describe("ZK Sync test process next operation", function () { let incorrectTokenContract; let ethProxy; before(async () => { - const verifierDeployedContract = await deployVerifier(wallet, verifierTestContractCode, []); - const governanceDeployedContract = await deployGovernance(wallet, governanceTestContractCode, [wallet.address]); - zksyncContract = await deployFranklin( + let verifierDeployedContract, verifierAddressDeployed; + [verifierDeployedContract, verifierAddressDeployed] = await deployVerifier( wallet, - require("../../build/ZKSyncUnitTest"), - [ - governanceDeployedContract.address, - verifierDeployedContract.address, - wallet.address, - ethers.constants.HashZero, - ], + proxyTestContractCode, + verifierTestContractCode, + [], + [], ); + let governanceDeployedContract, governanceAddressDeployed; + [governanceDeployedContract, governanceAddressDeployed] = await deployGovernance( + wallet, + proxyTestContractCode, + governanceTestContractCode, + ["address"], + [wallet.address], + ); + zksyncContract = await deployContract( + wallet, + require('../../build/ZKSyncUnitTest'), + [governanceDeployedContract.address, verifierDeployedContract.address, wallet.address, ethers.constants.HashZero], + { + gasLimit: 6500000, + }); await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); incorrectTokenContract = await addTestNotApprovedERC20Token(wallet); diff --git a/contracts/yarn.lock b/contracts/yarn.lock index 5e2f0aef7c..d0d9c010c1 100644 --- a/contracts/yarn.lock +++ b/contracts/yarn.lock @@ -2372,6 +2372,14 @@ ethereumjs-abi@0.6.5: bn.js "^4.10.0" ethereumjs-util "^4.3.0" +ethereumjs-abi@^0.6.8: + version "0.6.8" + resolved "https://registry.yarnpkg.com/ethereumjs-abi/-/ethereumjs-abi-0.6.8.tgz#71bc152db099f70e62f108b7cdfca1b362c6fcae" + integrity sha512-Tx0r/iXI6r+lRsdvkFDlut0N08jWMnKRZ6Gkq+Nmw75lZe4e6o3EkSnkaBP5NF6+m5PTGAr9JP43N3LyeoglsA== + dependencies: + bn.js "^4.11.8" + ethereumjs-util "^6.0.0" + "ethereumjs-abi@git+https://github.com/ethereumjs/ethereumjs-abi.git": version "0.6.7" resolved "git+https://github.com/ethereumjs/ethereumjs-abi.git#8431eab7b3384e65e8126a4602520b78031666fb" From 55ac81ad0ccdfb7052d66be8b96d6fd6020fc0fb Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Mon, 2 Mar 2020 16:04:50 +0200 Subject: [PATCH 011/186] Removed priorityQueue init params from testnet-deploy.ts --- contracts/scripts/testnet-deploy.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/contracts/scripts/testnet-deploy.ts b/contracts/scripts/testnet-deploy.ts index dad3679e4f..7c9f986d25 100644 --- a/contracts/scripts/testnet-deploy.ts +++ b/contracts/scripts/testnet-deploy.ts @@ -51,14 +51,11 @@ async function main() { const testWallet = ethers.Wallet.fromMnemonic(process.env.TEST_MNEMONIC, "m/44'/60'/0'/0/0").connect(provider); let governanceAddress = process.env.GOVERNANCE_ADDR; - let priorityQueueAddress = process.env.PRIORITY_QUEUE_ADDR; let verifierAddress = process.env.VERIFIER_ADDR; let franklinAddress = process.env.CONTRACT_ADDR; let governanceInitArgs = ["address"]; let governanceInitArgsValues = [wallet.address]; - let priorityQueueInitArgs = ["address"]; - let priorityQueueInitArgsValues = [governanceAddress]; let verifierInitArgs = []; let verifierInitArgsValues = []; From b50921b3cee384dfd61b2bd024c4d0407d576edc Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 3 Mar 2020 12:53:52 +0200 Subject: [PATCH 012/186] Added readyToBeUpgraded() to Franklin.sol + minor changes --- bin/prepare-test-contracts.sh | 4 +- contracts/contracts/Events.sol | 4 +- contracts/contracts/Franklin.sol | 15 +++++- contracts/contracts/Storage.sol | 3 ++ contracts/contracts/UpgradeMode.sol | 38 ++++++------- contracts/contracts/test/DummyTarget.sol | 18 +++---- contracts/src.ts/deploy.ts | 53 +++++++++++++++++++ contracts/test/unit_tests/bytes_test.js | 2 +- contracts/test/unit_tests/common.js | 1 + contracts/test/unit_tests/governance_test.js | 2 +- contracts/test/unit_tests/operations_test.js | 2 +- contracts/test/unit_tests/ownable_test.js | 2 +- contracts/test/unit_tests/proxy_test.js | 4 +- contracts/test/unit_tests/upgradeMode_test.js | 22 ++++---- 14 files changed, 120 insertions(+), 50 deletions(-) diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index a735a0c8a5..0842c24077 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -35,7 +35,7 @@ ssed 's/StorageTest/Storage/' -i $OUT_DIR/UpgradeableTest.sol # Workaround -> upgrade mode has UpgradeMode in event and variable names. ssed 's/UpgradeModeTestEvents/UpgradeModeEvents/' -i $OUT_DIR/UpgradeModeTest.sol ssed 's/UpgradeModeTestActivated/UpgradeModeActivated/' -i $OUT_DIR/UpgradeModeTest.sol -ssed 's/UpgradeModeTestClosedStatusActivated/UpgradeModeClosedStatusActivated/' -i $OUT_DIR/UpgradeModeTest.sol +ssed 's/UpgradeModeTestFinalizeStatusActivated/UpgradeModeFinalizeStatusActivated/' -i $OUT_DIR/UpgradeModeTest.sol ssed 's/UpgradeModeTestForciblyCanceled/UpgradeModeForciblyCanceled/' -i $OUT_DIR/UpgradeModeTest.sol ssed 's/waitUpgradeModeTestActive/waitUpgradeModeActive/' -i $OUT_DIR/UpgradeModeTest.sol @@ -47,7 +47,7 @@ set_constant() { ssed -E "s/(.*constant $1)(.*)\;/\1 = $2\;/" -i $3 } create_constant_getter() { - ssed -E "s/ (.*) (constant $1)(.*)\;(.*)/ \1 \2\3\;\4\n function get_$1() external view returns (\1) {\n return $1\;\n }/" -i $2 + ssed -E "s/ (.*) (constant $1)(.*)\;(.*)/ \1 \2\3\;\4\n function get_$1() external pure returns (\1) {\n return $1\;\n }/" -i $2 } # Change constants diff --git a/contracts/contracts/Events.sol b/contracts/contracts/Events.sol index d1987660b6..af3c28332d 100644 --- a/contracts/contracts/Events.sol +++ b/contracts/contracts/Events.sol @@ -67,8 +67,8 @@ contract UpgradeModeEvents { uint64 version ); - /// @notice Upgrade mode closed status event - event UpgradeModeClosedStatusActivated( + /// @notice Upgrade mode finalize status event + event UpgradeModeFinalizeStatusActivated( uint64 version ); diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index 3c2283739c..a81d8d1193 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -55,6 +55,13 @@ contract Franklin is Storage, Config, Events { blocks[0].stateRoot = _genesisRoot; } + /// @notice Checks that contract is ready to be upgraded + /// Ensures that all full exit requests received while waiting for the upgrade will be processed before finishing upgrade + /// @return Bool flag indicating that contract is ready to be upgraded + function readyToBeUpgraded() external view returns (bool) { + return !exodusMode && (firstPriorityRequestId >= requestsToProcessBeforeUpgrade); + } + /// @notice executes pending withdrawals /// @param _n The number of withdrawals to complete starting from oldest function completeWithdrawals(uint32 _n) external { @@ -695,7 +702,7 @@ contract Franklin is Storage, Config, Events { // Expiration block is: current block number + priority expiration delta uint256 expirationBlock = block.number + PRIORITY_EXPIRATION; - priorityRequests[firstPriorityRequestId+totalOpenPriorityRequests] = PriorityOperation({ + priorityRequests[firstPriorityRequestId + totalOpenPriorityRequests] = PriorityOperation({ opType: _opType, pubData: _pubData, expirationBlock: expirationBlock, @@ -704,7 +711,7 @@ contract Franklin is Storage, Config, Events { emit NewPriorityRequest( msg.sender, - firstPriorityRequestId+totalOpenPriorityRequests, + firstPriorityRequestId + totalOpenPriorityRequests, uint8(_opType), _pubData, expirationBlock, @@ -712,6 +719,10 @@ contract Franklin is Storage, Config, Events { ); totalOpenPriorityRequests++; + + if (_opType == Operations.OpType.FullExit && !upgradeMode.isFinalizeStatusActive()) { + requestsToProcessBeforeUpgrade = firstPriorityRequestId + totalOpenPriorityRequests; + } } /// @notice Collects fees from provided requests number for the block validator, store it on her diff --git a/contracts/contracts/Storage.sol b/contracts/contracts/Storage.sol index 63702d0132..cdc6173241 100644 --- a/contracts/contracts/Storage.sol +++ b/contracts/contracts/Storage.sol @@ -15,6 +15,9 @@ contract Storage { /// @notice UpgradeMode contract UpgradeMode upgradeMode; + /// @notice Number of priority requests that must be processed at the time of finishing upgrade + uint64 requestsToProcessBeforeUpgrade; + /// @notice Verifier contract. Used to verify block proof and exit proof Verifier internal verifier; diff --git a/contracts/contracts/UpgradeMode.sol b/contracts/contracts/UpgradeMode.sol index c7976d7ab1..ef6393a391 100644 --- a/contracts/contracts/UpgradeMode.sol +++ b/contracts/contracts/UpgradeMode.sol @@ -10,10 +10,10 @@ contract UpgradeMode is UpgradeModeEvents, Ownable { /// @notice Maximal upgrade time (in seconds) /// @dev After this period from the start of the upgrade anyone can cancel it forcibly - uint256 constant MAX_UPGRADE_PERIOD = 60 * 60 * 24 * 14; /// 14 days + uint256 constant MAX_UPGRADE_PERIOD = 2 weeks; - /// @notice Waiting period to activate closed status mode (in seconds) - uint256 constant WAIT_UPGRADE_MODE_PERIOD = 60 * 60 * 24 * 10; /// 10 days + /// @notice Waiting period to activate finalize status mode (in seconds) + uint256 constant WAIT_UPGRADE_MODE_PERIOD = 10 days; /// @notice Version of upgradeable field uint64 public version; @@ -21,8 +21,8 @@ contract UpgradeMode is UpgradeModeEvents, Ownable { /// @notice Flag indicating that wait upgrade mode is active bool public waitUpgradeModeActive; - /// @notice Flag indicating that closed status is active - bool public closedStatusActive; + /// @notice Flag indicating that finalize status is active + bool public finalizeStatusActive; /// @notice Time of activating waiting upgrade mode /// @dev Will be equal to zero in case of not active mode @@ -33,7 +33,7 @@ contract UpgradeMode is UpgradeModeEvents, Ownable { constructor() Ownable() public { version = 1; waitUpgradeModeActive = false; - closedStatusActive = false; + finalizeStatusActive = false; activationTime = 0; } @@ -46,7 +46,7 @@ contract UpgradeMode is UpgradeModeEvents, Ownable { ); // uma11 - unable to activate active mode waitUpgradeModeActive = true; - closedStatusActive = false; + finalizeStatusActive = false; activationTime = now; emit UpgradeModeActivated(version); } @@ -60,7 +60,7 @@ contract UpgradeMode is UpgradeModeEvents, Ownable { ); // umc11 - unable to cancel not active mode waitUpgradeModeActive = false; - closedStatusActive = false; + finalizeStatusActive = false; activationTime = 0; emit UpgradeCanceled(version); } @@ -78,37 +78,37 @@ contract UpgradeMode is UpgradeModeEvents, Ownable { ); // ufc12 - unable to force cancel upgrade until MAX_UPGRADE_PERIOD passes waitUpgradeModeActive = false; - closedStatusActive = false; + finalizeStatusActive = false; activationTime = 0; emit UpgradeForciblyCanceled(version); } - /// @notice Checks that closed status is active and activates it if needed - /// @return Bool flag indicating that closed status is active - function isClosedStatusActive() public returns (bool) { + /// @notice Checks that finalize status is active and activates it if needed + /// @return Bool flag indicating that finalize status is active + function isFinalizeStatusActive() public returns (bool) { if (!waitUpgradeModeActive) { return false; } - if (closedStatusActive) { + if (finalizeStatusActive) { return true; } if (now >= activationTime + WAIT_UPGRADE_MODE_PERIOD) { - closedStatusActive = true; - emit UpgradeModeClosedStatusActivated(version); + finalizeStatusActive = true; + emit UpgradeModeFinalizeStatusActivated(version); } - return closedStatusActive; + return finalizeStatusActive; } /// @notice Finishes upgrade function finish() external { requireMaster(msg.sender); require( - isClosedStatusActive(), + isFinalizeStatusActive(), "umf11" - ); // umf11 - unable to finish upgrade without closed status active + ); // umf11 - unable to finish upgrade without finalize status active waitUpgradeModeActive = false; - closedStatusActive = false; + finalizeStatusActive = false; activationTime = 0; emit UpgradeCompleted(version); version++; diff --git a/contracts/contracts/test/DummyTarget.sol b/contracts/contracts/test/DummyTarget.sol index fe8ed743a5..85c573f02a 100644 --- a/contracts/contracts/test/DummyTarget.sol +++ b/contracts/contracts/test/DummyTarget.sol @@ -2,7 +2,7 @@ pragma solidity 0.5.16; interface DummyTarget { - function get_DUMMY_INDEX() external view returns (uint256); + function get_DUMMY_INDEX() external pure returns (uint256); function initialize(address _address, bytes calldata _initializationParameters) external; @@ -13,14 +13,14 @@ interface DummyTarget { contract DummyFirst is DummyTarget { uint256 private constant DUMMY_INDEX = 1; - function get_DUMMY_INDEX() external view returns (uint256) { + function get_DUMMY_INDEX() external pure returns (uint256) { return DUMMY_INDEX; } function initialize(address _address, bytes calldata _initializationParameters) external { - bytes memory _initializationParameters = _initializationParameters; - bytes32 byte_0 = bytes32(uint256(uint8(_initializationParameters[0]))); - bytes32 byte_1 = bytes32(uint256(uint8(_initializationParameters[1]))); + bytes memory initializationParameters = _initializationParameters; + bytes32 byte_0 = bytes32(uint256(uint8(initializationParameters[0]))); + bytes32 byte_1 = bytes32(uint256(uint8(initializationParameters[1]))); assembly { sstore(0, _address) sstore(1, byte_0) @@ -37,14 +37,14 @@ contract DummyFirst is DummyTarget { contract DummySecond is DummyTarget { uint256 private constant DUMMY_INDEX = 2; - function get_DUMMY_INDEX() external view returns (uint256) { + function get_DUMMY_INDEX() external pure returns (uint256) { return DUMMY_INDEX; } function initialize(address _address, bytes calldata _initializationParameters) external { - bytes memory _initializationParameters = _initializationParameters; - bytes32 byte_0 = bytes32(uint256(uint8(_initializationParameters[0]))); - bytes32 byte_1 = bytes32(uint256(uint8(_initializationParameters[1]))); + bytes memory initializationParameters = _initializationParameters; + bytes32 byte_0 = bytes32(uint256(uint8(initializationParameters[0]))); + bytes32 byte_1 = bytes32(uint256(uint8(initializationParameters[1]))); assembly { sstore(0, _address) sstore(2, byte_0) diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 553b810caa..1a05c6edc2 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -109,6 +109,59 @@ export async function deployProxy( } } +// note that this upgrade must be finished later +export async function upgradeContract( + wallet, + proxyContract, + newTargetAddress, +) { + try { + const proxy = new ethers.Contract(proxyContract.address, proxyContractCode.interface, wallet); + const tx = await proxy.upgradeTarget(newTargetAddress); + await tx.wait(); + + return true; + } catch (err) { + return false; + } +} + +// cancels upgrade +export async function cancelUpgrade( + wallet, + proxyContract, +) { + try { + const proxy = new ethers.Contract(proxyContract.address, proxyContractCode.interface, wallet); + const tx = await proxy.cancelUpgradeTarget(); + await tx.wait(); + + return true; + } catch (err) { + return false; + } +} + +// trying to finish the upgrade +export async function finishUpgrade( + wallet, + proxyContract, + initArgs, + initArgsValues, +) { + try { + const proxy = new ethers.Contract(proxyContract.address, proxyContractCode.interface, wallet); + + const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); + const tx = await proxy.finishTargetUpgrade(initArgsInBytes); + await tx.wait(); + + return true; + } catch (err) { + return false; + } +} + export async function deployGovernance( wallet, proxyCode, diff --git a/contracts/test/unit_tests/bytes_test.js b/contracts/test/unit_tests/bytes_test.js index ae20af923e..3aa93825c3 100644 --- a/contracts/test/unit_tests/bytes_test.js +++ b/contracts/test/unit_tests/bytes_test.js @@ -2,7 +2,7 @@ const { expect } = require("chai") const { bigNumberify } = require("ethers/utils"); const { provider, wallet, deployTestContract, getCallRevertReason } = require("./common") -describe("Bytes unit test", function () { +describe("Bytes unit tests", function () { this.timeout(50000); let bytesTestContract diff --git a/contracts/test/unit_tests/common.js b/contracts/test/unit_tests/common.js index e311ea8ad3..028f670c24 100644 --- a/contracts/test/unit_tests/common.js +++ b/contracts/test/unit_tests/common.js @@ -6,6 +6,7 @@ const { bigNumberify, parseEther, hexlify, formatEther } = require("ethers/utils const IERC20_INTERFACE = require("openzeppelin-solidity/build/contracts/IERC20"); const abi = require('ethereumjs-abi') +/// Skipping slow tests const SKIP_TEST = false; // For: geth diff --git a/contracts/test/unit_tests/governance_test.js b/contracts/test/unit_tests/governance_test.js index 8129d0b76d..8b18908f55 100644 --- a/contracts/test/unit_tests/governance_test.js +++ b/contracts/test/unit_tests/governance_test.js @@ -1,7 +1,7 @@ const { expect } = require("chai") const { wallet, deployProxyContract, getCallRevertReason } = require("./common") -describe("Governance unit test", function () { +describe("Governance unit tests", function () { this.timeout(50000); let testContract diff --git a/contracts/test/unit_tests/operations_test.js b/contracts/test/unit_tests/operations_test.js index 2f4a1ac080..7e20d7f4eb 100644 --- a/contracts/test/unit_tests/operations_test.js +++ b/contracts/test/unit_tests/operations_test.js @@ -1,7 +1,7 @@ const { expect } = require("chai") const { provider, wallet, deployTestContract, getCallRevertReason } = require("./common") -describe("Operations unit test", function () { +describe("Operations unit tests", function () { this.timeout(50000); let testContract diff --git a/contracts/test/unit_tests/ownable_test.js b/contracts/test/unit_tests/ownable_test.js index faa48cf182..b55822c6f2 100644 --- a/contracts/test/unit_tests/ownable_test.js +++ b/contracts/test/unit_tests/ownable_test.js @@ -3,7 +3,7 @@ const { deployContract } = require("ethereum-waffle"); const { wallet1, wallet2, deployTestContract, getCallRevertReason } = require("./common") -describe("Ownable unit test", function () { +describe("Ownable unit tests", function () { this.timeout(50000); let testContract diff --git a/contracts/test/unit_tests/proxy_test.js b/contracts/test/unit_tests/proxy_test.js index 5ea3b5879c..943893ffd1 100644 --- a/contracts/test/unit_tests/proxy_test.js +++ b/contracts/test/unit_tests/proxy_test.js @@ -10,7 +10,7 @@ const proxyTestContractCode = require('../../build/ProxyTest'); // some random constants for checking write and read from storage const bytes = [133, 174, 97, 255] -describe("Proxy unit test", function () { +describe("Proxy unit tests", function () { this.timeout(50000); let proxyTestContract @@ -77,7 +77,9 @@ describe("Proxy unit test", function () { it("check upgrade canceling", async () => { // activate and cancel await proxyTestContract.upgradeTarget(DummySecond.address); + expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(DummySecond.address))).revertReason).equal("uma11"); await proxyTestContract.cancelUpgradeTarget(); + expect((await getCallRevertReason( () => proxyTestContract.cancelUpgradeTarget())).revertReason).equal("umc11"); }); if (SKIP_TEST) { diff --git a/contracts/test/unit_tests/upgradeMode_test.js b/contracts/test/unit_tests/upgradeMode_test.js index f576c49559..77bc51075c 100644 --- a/contracts/test/unit_tests/upgradeMode_test.js +++ b/contracts/test/unit_tests/upgradeMode_test.js @@ -5,7 +5,7 @@ const { wallet1, wallet2, deployTestContract, getCallRevertReason, SKIP_TEST } = const { performance } = require('perf_hooks'); -describe("UpgradeMode unit test", function () { +describe("UpgradeMode unit tests", function () { this.timeout(50000); let testContract @@ -19,20 +19,20 @@ describe("UpgradeMode unit test", function () { let testContract_with_wallet2_signer = await testContract.connect(wallet2); expect((await getCallRevertReason( () => testContract_with_wallet2_signer.activate() )).revertReason).equal("oro11") expect((await getCallRevertReason( () => testContract_with_wallet2_signer.cancel() )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => testContract_with_wallet2_signer.isClosedStatusActive() )).revertReason).equal("VM did not revert") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.isFinalizeStatusActive() )).revertReason).equal("VM did not revert") expect((await getCallRevertReason( () => testContract_with_wallet2_signer.forceCancel() )).revertReason).equal("oro11") expect((await getCallRevertReason( () => testContract_with_wallet2_signer.finish() )).revertReason).equal("oro11") }); - it("test activate, test cancel, test finish without closed status active", async () => { + it("test activate, test cancel, test finish without finalize status active", async () => { // activate await expect(testContract.activate()) .to.emit(testContract, 'UpgradeModeActivated') .withArgs(1); expect(await testContract.waitUpgradeModeActive()).to.equal(true) - await testContract.isClosedStatusActive(); - expect(await testContract.closedStatusActive()).to.equal(false) + await testContract.isFinalizeStatusActive(); + expect(await testContract.finalizeStatusActive()).to.equal(false) expect((await getCallRevertReason( () => testContract.activate() )).revertReason).equal("uma11") @@ -63,7 +63,7 @@ describe("UpgradeMode unit test", function () { let activated_time = performance.now(); - // wait and activate closed status + // wait and activate finalize status let all_time_in_sec = parseInt(await testContract.get_WAIT_UPGRADE_MODE_PERIOD()); for (let step = 1; step <= 3; step++) { if (step != 3) { @@ -77,13 +77,13 @@ describe("UpgradeMode unit test", function () { } if (step != 3) { - await testContract.isClosedStatusActive(); - expect(await testContract.closedStatusActive()).to.equal(false) + await testContract.isFinalizeStatusActive(); + expect(await testContract.finalizeStatusActive()).to.equal(false) } else { - await expect(testContract.isClosedStatusActive()) - .to.emit(testContract, 'UpgradeModeClosedStatusActivated') + await expect(testContract.isFinalizeStatusActive()) + .to.emit(testContract, 'UpgradeModeFinalizeStatusActivated') .withArgs(1); - expect(await testContract.closedStatusActive()).to.equal(true) + expect(await testContract.finalizeStatusActive()).to.equal(true) } } From f413db51710c1f5f914a1799679fc4092f09ff85 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 6 Mar 2020 22:26:41 +0200 Subject: [PATCH 013/186] Changed the architecture of the upgrade mode in the direction of UpgradeModule.sol --- bin/prepare-test-contracts.sh | 21 +-- contracts/contracts/Events.sol | 16 +- contracts/contracts/Franklin.sol | 15 -- contracts/contracts/Governance.sol | 8 - contracts/contracts/Storage.sol | 7 - contracts/contracts/UpgradeMode.sol | 117 ------------ contracts/contracts/UpgradeModule.sol | 161 ++++++++++++++++ contracts/contracts/Upgradeable.sol | 166 +++-------------- contracts/contracts/Verifier.sol | 8 - contracts/contracts/test/DummyTarget.sol | 44 +++-- contracts/contracts/test/ZKSyncUnitTest.sol | 2 - contracts/src.ts/deploy.ts | 59 +----- contracts/test/unit_tests/common.js | 6 +- contracts/test/unit_tests/proxy_test.js | 175 ------------------ contracts/test/unit_tests/proxy_test.ts | 36 ++++ contracts/test/unit_tests/upgradeMode_test.js | 147 --------------- .../test/unit_tests/upgradeModule_test.ts | 127 +++++++++++++ 17 files changed, 403 insertions(+), 712 deletions(-) delete mode 100644 contracts/contracts/UpgradeMode.sol create mode 100644 contracts/contracts/UpgradeModule.sol delete mode 100644 contracts/test/unit_tests/proxy_test.js create mode 100644 contracts/test/unit_tests/proxy_test.ts delete mode 100644 contracts/test/unit_tests/upgradeMode_test.js create mode 100644 contracts/test/unit_tests/upgradeModule_test.ts diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index 0842c24077..db7bc73d99 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -15,9 +15,9 @@ cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTest.sol cp $IN_DIR/Storage.sol $OUT_DIR/StorageTest.sol cp $IN_DIR/Config.sol $OUT_DIR/ConfigTest.sol cp $IN_DIR/Ownable.sol $OUT_DIR/OwnableTest.sol -cp $IN_DIR/UpgradeMode.sol $OUT_DIR/UpgradeModeTest.sol cp $IN_DIR/Upgradeable.sol $OUT_DIR/UpgradeableTest.sol cp $IN_DIR/Proxy.sol $OUT_DIR/ProxyTest.sol +cp $IN_DIR/UpgradeModule.sol $OUT_DIR/UpgradeModuleTest.sol # Rename contracts ssed 's/Governance/GovernanceTest/' -i $OUT_DIR/*.sol @@ -26,18 +26,17 @@ ssed 's/Franklin/FranklinTest/' -i $OUT_DIR/*.sol ssed 's/Storage/StorageTest/' -i $OUT_DIR/*.sol ssed 's/Config/ConfigTest/' -i $OUT_DIR/*.sol ssed 's/Ownable/OwnableTest/' -i $OUT_DIR/*.sol -ssed 's/UpgradeMode/UpgradeModeTest/g' -i $OUT_DIR/*.sol ssed 's/Upgradeable/UpgradeableTest/' -i $OUT_DIR/*.sol ssed 's/Proxy/ProxyTest/' -i $OUT_DIR/*.sol +ssed 's/UpgradeModule/UpgradeModuleTest/' -i $OUT_DIR/*.sol # Workaround -> ownable and upgradeable has Storage in comments. ssed 's/StorageTest/Storage/' -i $OUT_DIR/OwnableTest.sol ssed 's/StorageTest/Storage/' -i $OUT_DIR/UpgradeableTest.sol -# Workaround -> upgrade mode has UpgradeMode in event and variable names. -ssed 's/UpgradeModeTestEvents/UpgradeModeEvents/' -i $OUT_DIR/UpgradeModeTest.sol -ssed 's/UpgradeModeTestActivated/UpgradeModeActivated/' -i $OUT_DIR/UpgradeModeTest.sol -ssed 's/UpgradeModeTestFinalizeStatusActivated/UpgradeModeFinalizeStatusActivated/' -i $OUT_DIR/UpgradeModeTest.sol -ssed 's/UpgradeModeTestForciblyCanceled/UpgradeModeForciblyCanceled/' -i $OUT_DIR/UpgradeModeTest.sol -ssed 's/waitUpgradeModeTestActive/waitUpgradeModeActive/' -i $OUT_DIR/UpgradeModeTest.sol +# Workaround -> upgrade module has Proxy in method name. +ssed 's/upgradeProxyTest/upgradeProxy/' -i $OUT_DIR/UpgradeModuleTest.sol +ssed 's/cancelProxyTestUpgrade/cancelProxyUpgrade/' -i $OUT_DIR/UpgradeModuleTest.sol +ssed 's/forceCancelProxyTestUpgrade/forceCancelProxyUpgrade/' -i $OUT_DIR/UpgradeModuleTest.sol +ssed 's/finishProxyTestUpgrade/finishProxyUpgrade/' -i $OUT_DIR/UpgradeModuleTest.sol # Changes solidity constant to provided value @@ -55,12 +54,10 @@ set_constant MAX_AMOUNT_OF_REGISTERED_TOKENS 4 $OUT_DIR/ConfigTest.sol set_constant EXPECT_VERIFICATION_IN 8 $OUT_DIR/ConfigTest.sol set_constant MAX_UNVERIFIED_BLOCKS 4 $OUT_DIR/ConfigTest.sol set_constant PRIORITY_EXPIRATION 16 $OUT_DIR/ConfigTest.sol -set_constant MAX_UPGRADE_PERIOD 5 $OUT_DIR/UpgradeModeTest.sol -set_constant WAIT_UPGRADE_MODE_PERIOD 3 $OUT_DIR/UpgradeModeTest.sol +set_constant WAIT_UPGRADE_MODE_PERIOD 4 $OUT_DIR/UpgradeModuleTest.sol create_constant_getter MAX_AMOUNT_OF_REGISTERED_TOKENS $OUT_DIR/ConfigTest.sol -create_constant_getter MAX_UPGRADE_PERIOD $OUT_DIR/UpgradeModeTest.sol -create_constant_getter WAIT_UPGRADE_MODE_PERIOD $OUT_DIR/UpgradeModeTest.sol +create_constant_getter WAIT_UPGRADE_MODE_PERIOD $OUT_DIR/UpgradeModuleTest.sol # Verify always true set_constant DUMMY_VERIFIER true $OUT_DIR/VerifierTest.sol diff --git a/contracts/contracts/Events.sol b/contracts/contracts/Events.sol index af3c28332d..fdce311c5e 100644 --- a/contracts/contracts/Events.sol +++ b/contracts/contracts/Events.sol @@ -53,33 +53,33 @@ contract Events { ); } -/// @title Upgrade mode events +/// @title Upgrade events /// @author Matter Labs -contract UpgradeModeEvents { +contract UpgradeEvents { /// @notice Upgrade mode enter event event UpgradeModeActivated( + address proxyAddress, uint64 version ); /// @notice Upgrade mode cancel event event UpgradeCanceled( + address proxyAddress, uint64 version ); /// @notice Upgrade mode finalize status event event UpgradeModeFinalizeStatusActivated( - uint64 version - ); - - /// @notice Upgrade mode force cancellation event - event UpgradeForciblyCanceled( + address proxyAddress, uint64 version ); /// @notice Upgrade mode complete event event UpgradeCompleted( - uint64 version + address proxyAddress, + uint64 version, + address newTargetAddress ); } diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index a81d8d1193..123cda457f 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -30,18 +30,14 @@ contract Franklin is Storage, Config, Events { // mapping (uint32 => bool) tokenMigrated; /// @notice Franklin contract initialization - /// @param upgradeModeAddress Address of UpgradeMode contract /// @param initializationParameters Encoded representation of initialization parameters: /// _governanceAddress The address of Governance contract /// _verifierAddress The address of Verifier contract /// _ // FIXME: remove _genesisAccAddress /// _genesisRoot Genesis blocks (first block) root function initialize( - address upgradeModeAddress, bytes calldata initializationParameters ) external { - upgradeMode = UpgradeMode(upgradeModeAddress); - ( address _governanceAddress, address _verifierAddress, @@ -55,13 +51,6 @@ contract Franklin is Storage, Config, Events { blocks[0].stateRoot = _genesisRoot; } - /// @notice Checks that contract is ready to be upgraded - /// Ensures that all full exit requests received while waiting for the upgrade will be processed before finishing upgrade - /// @return Bool flag indicating that contract is ready to be upgraded - function readyToBeUpgraded() external view returns (bool) { - return !exodusMode && (firstPriorityRequestId >= requestsToProcessBeforeUpgrade); - } - /// @notice executes pending withdrawals /// @param _n The number of withdrawals to complete starting from oldest function completeWithdrawals(uint32 _n) external { @@ -719,10 +708,6 @@ contract Franklin is Storage, Config, Events { ); totalOpenPriorityRequests++; - - if (_opType == Operations.OpType.FullExit && !upgradeMode.isFinalizeStatusActive()) { - requestsToProcessBeforeUpgrade = firstPriorityRequestId + totalOpenPriorityRequests; - } } /// @notice Collects fees from provided requests number for the block validator, store it on her diff --git a/contracts/contracts/Governance.sol b/contracts/contracts/Governance.sol index 9d7c31f934..3d957a3393 100644 --- a/contracts/contracts/Governance.sol +++ b/contracts/contracts/Governance.sol @@ -1,16 +1,12 @@ pragma solidity 0.5.16; import "./Config.sol"; -import "./UpgradeMode.sol"; /// @title Governance Contract /// @author Matter Labs contract Governance is Config { - /// @notice UpgradeMode contract - UpgradeMode upgradeMode; - /// @notice Token added to Franklin net event TokenAdded( address token, @@ -33,15 +29,11 @@ contract Governance is Config { mapping(address => bool) public validators; /// @notice Governance contract initialization - /// @param upgradeModeAddress Address of UpgradeMode contract /// @param initializationParameters Encoded representation of initialization parameters: /// _networkGovernor The address of network governor function initialize( - address upgradeModeAddress, bytes calldata initializationParameters ) external { - upgradeMode = UpgradeMode(upgradeModeAddress); - ( address _networkGovernor ) = abi.decode(initializationParameters, (address)); diff --git a/contracts/contracts/Storage.sol b/contracts/contracts/Storage.sol index cdc6173241..c17e18e6f8 100644 --- a/contracts/contracts/Storage.sol +++ b/contracts/contracts/Storage.sol @@ -6,18 +6,11 @@ import "./Governance.sol"; import "./Verifier.sol"; import "./Operations.sol"; -import "./UpgradeMode.sol"; /// @title zkSync storage contract /// @author Matter Labs contract Storage { - /// @notice UpgradeMode contract - UpgradeMode upgradeMode; - - /// @notice Number of priority requests that must be processed at the time of finishing upgrade - uint64 requestsToProcessBeforeUpgrade; - /// @notice Verifier contract. Used to verify block proof and exit proof Verifier internal verifier; diff --git a/contracts/contracts/UpgradeMode.sol b/contracts/contracts/UpgradeMode.sol deleted file mode 100644 index ef6393a391..0000000000 --- a/contracts/contracts/UpgradeMode.sol +++ /dev/null @@ -1,117 +0,0 @@ -pragma solidity 0.5.16; - -import "./Events.sol"; -import "./Ownable.sol"; - - -/// @title UpgradeMode Contract -/// @author Matter Labs -contract UpgradeMode is UpgradeModeEvents, Ownable { - - /// @notice Maximal upgrade time (in seconds) - /// @dev After this period from the start of the upgrade anyone can cancel it forcibly - uint256 constant MAX_UPGRADE_PERIOD = 2 weeks; - - /// @notice Waiting period to activate finalize status mode (in seconds) - uint256 constant WAIT_UPGRADE_MODE_PERIOD = 10 days; - - /// @notice Version of upgradeable field - uint64 public version; - - /// @notice Flag indicating that wait upgrade mode is active - bool public waitUpgradeModeActive; - - /// @notice Flag indicating that finalize status is active - bool public finalizeStatusActive; - - /// @notice Time of activating waiting upgrade mode - /// @dev Will be equal to zero in case of not active mode - uint256 public activationTime; - - /// @notice Contract constructor - /// @dev Calls Ownable contract constructor - constructor() Ownable() public { - version = 1; - waitUpgradeModeActive = false; - finalizeStatusActive = false; - activationTime = 0; - } - - /// @notice Activates wait upgrade mode - function activate() external { - requireMaster(msg.sender); - require( - !waitUpgradeModeActive, - "uma11" - ); // uma11 - unable to activate active mode - - waitUpgradeModeActive = true; - finalizeStatusActive = false; - activationTime = now; - emit UpgradeModeActivated(version); - } - - /// @notice Cancels upgrade - function cancel() external { - requireMaster(msg.sender); - require( - waitUpgradeModeActive, - "umc11" - ); // umc11 - unable to cancel not active mode - - waitUpgradeModeActive = false; - finalizeStatusActive = false; - activationTime = 0; - emit UpgradeCanceled(version); - } - - /// @notice Force upgrade cancellation - function forceCancel() external { - requireMaster(msg.sender); - require( - waitUpgradeModeActive, - "ufc11" - ); // ufc11 - unable to cancel not active mode - require( - now >= activationTime + MAX_UPGRADE_PERIOD, - "ufc12" - ); // ufc12 - unable to force cancel upgrade until MAX_UPGRADE_PERIOD passes - - waitUpgradeModeActive = false; - finalizeStatusActive = false; - activationTime = 0; - emit UpgradeForciblyCanceled(version); - } - - /// @notice Checks that finalize status is active and activates it if needed - /// @return Bool flag indicating that finalize status is active - function isFinalizeStatusActive() public returns (bool) { - if (!waitUpgradeModeActive) { - return false; - } - if (finalizeStatusActive) { - return true; - } - if (now >= activationTime + WAIT_UPGRADE_MODE_PERIOD) { - finalizeStatusActive = true; - emit UpgradeModeFinalizeStatusActivated(version); - } - return finalizeStatusActive; - } - - /// @notice Finishes upgrade - function finish() external { - requireMaster(msg.sender); - require( - isFinalizeStatusActive(), - "umf11" - ); // umf11 - unable to finish upgrade without finalize status active - - waitUpgradeModeActive = false; - finalizeStatusActive = false; - activationTime = 0; - emit UpgradeCompleted(version); - version++; - } - -} diff --git a/contracts/contracts/UpgradeModule.sol b/contracts/contracts/UpgradeModule.sol new file mode 100644 index 0000000000..b589e8360f --- /dev/null +++ b/contracts/contracts/UpgradeModule.sol @@ -0,0 +1,161 @@ +pragma solidity 0.5.16; + +import "./Events.sol"; +import "./Ownable.sol"; +import "./Proxy.sol"; + + +/// @title Upgrade Module Contract +/// @author Matter Labs +contract UpgradeModule is UpgradeEvents, Ownable { + + /// @notice Waiting period to activate finalize status mode (in seconds) + uint256 constant WAIT_UPGRADE_MODE_PERIOD = 2 weeks; + + /// @notice Versions of proxy contracts + mapping(address => uint64) public version; + + /// @notice Contract which processes priority operations + address public mainContractAddress; + + /// @notice Upgrade mode statuses + enum UpgradeStatus { + NotActive, + WaitUpgrade, + Finalize + } + + /// @notice Info for upgrade proxy + struct UpgradeInfo { + UpgradeStatus upgradeStatus; + + /// @notice Time of activating waiting upgrade mode + /// @dev Will be equal to zero in case of not active mode + uint256 activationTime; + + /// @notice Next target + /// @dev Will store zero in case of not active upgrade mode + address nextTarget; + + /// @notice Number of priority requests that must be verified at the time of finishing upgrade + /// @dev Will store zero in case of not active finalize status of upgrade mode + uint64 priorityRequestsToProcessBeforeUpgrade; + } + + /// @notice UpgradeInfo per each proxy + mapping(address => UpgradeInfo) public upgradeInfo; + + /// @notice Contract constructor + /// @param _mainContractAddress Address of contract which processes priority operations + /// @dev Calls Ownable contract constructor + constructor(address _mainContractAddress) Ownable() public { + mainContractAddress = _mainContractAddress; + } + + /// @notice Activates wait upgrade status + /// @param proxyAddress Address of proxy to process + /// @param newTarget New target + function upgradeProxy(address proxyAddress, address newTarget) external { + requireMaster(msg.sender); + require( + upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.NotActive, + "upa11" + ); // upa11 - unable to activate active upgrade mode + + Proxy(address(uint160(proxyAddress))).upgradeTarget(newTarget); + + upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.WaitUpgrade; + upgradeInfo[proxyAddress].activationTime = now; + upgradeInfo[proxyAddress].nextTarget = newTarget; + upgradeInfo[proxyAddress].priorityRequestsToProcessBeforeUpgrade = 0; + + emit UpgradeModeActivated(proxyAddress, version[proxyAddress]); + } + + /// @notice Cancels upgrade + /// @param proxyAddress Address of proxy to process + function cancelProxyUpgrade(address proxyAddress) external { + requireMaster(msg.sender); + require( + upgradeInfo[proxyAddress].upgradeStatus != UpgradeModule.UpgradeStatus.NotActive, + "umc11" + ); // umc11 - unable to cancel not active upgrade mode + + upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.NotActive; + upgradeInfo[proxyAddress].activationTime = 0; + upgradeInfo[proxyAddress].nextTarget = address(0); + upgradeInfo[proxyAddress].priorityRequestsToProcessBeforeUpgrade = 0; + + emit UpgradeCanceled(proxyAddress, version[proxyAddress]); + } + + /// @notice Checks that finalize status is active and activates it if needed + /// @param proxyAddress Address of proxy to process + /// @return Bool flag indicating that finalize status is active after this call + function activeFinalizeStatusOfUpgrade(address proxyAddress) public returns (bool) { + require( + upgradeInfo[proxyAddress].upgradeStatus != UpgradeModule.UpgradeStatus.NotActive, + "uaf11" + ); // uaf11 - unable to activate finalize status in case of not active upgrade mode + + if (upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.Finalize) { + return true; + } + + if (now >= upgradeInfo[proxyAddress].activationTime + WAIT_UPGRADE_MODE_PERIOD) { + upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.Finalize; + + (bool callSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( + abi.encodeWithSignature("registeredPriorityOperations()") + ); + require( + callSuccess, + "uaf12" + ); // uaf12 - main contract static call failed + uint64 registeredPriorityOperations = abi.decode(encodedResult, (uint64)); + upgradeInfo[proxyAddress].priorityRequestsToProcessBeforeUpgrade = registeredPriorityOperations; + + emit UpgradeModeFinalizeStatusActivated(proxyAddress, version[proxyAddress]); + return true; + } + else{ + return false; + } + } + + /// @notice Finishes upgrade + /// @param proxyAddress Address of proxy to process + /// @param newTargetInitializationParameters New target initialization parameters + function finishProxyUpgrade(address proxyAddress, bytes calldata newTargetInitializationParameters) external { + requireMaster(msg.sender); + require( + upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.Finalize, + "umf11" + ); // umf11 - unable to finish upgrade without finalize status active + + (bool callSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( + abi.encodeWithSignature("verifiedPriorityOperations()") + ); + require( + callSuccess, + "umf12" + ); // umf12 - main contract static call failed + uint64 verifiedPriorityOperations = abi.decode(encodedResult, (uint64)); + + require( + verifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityRequestsToProcessBeforeUpgrade, + "umf13" + ); // umf13 - can't finish upgrade before verifing all priority operations received before start of finalize status + + Proxy(address(uint160(proxyAddress))).finishTargetUpgrade(upgradeInfo[proxyAddress].nextTarget, newTargetInitializationParameters); + + emit UpgradeCompleted(proxyAddress, version[proxyAddress], upgradeInfo[proxyAddress].nextTarget); + version[proxyAddress]++; + + upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.NotActive; + upgradeInfo[proxyAddress].activationTime = 0; + upgradeInfo[proxyAddress].nextTarget = address(0); + upgradeInfo[proxyAddress].priorityRequestsToProcessBeforeUpgrade = 0; + } + +} diff --git a/contracts/contracts/Upgradeable.sol b/contracts/contracts/Upgradeable.sol index 194601cabe..e32928f50e 100644 --- a/contracts/contracts/Upgradeable.sol +++ b/contracts/contracts/Upgradeable.sol @@ -1,73 +1,40 @@ pragma solidity 0.5.16; import "./Ownable.sol"; -import "./UpgradeMode.sol"; /// @title Upgradeable contract /// @author Matter Labs contract Upgradeable is Ownable { - /// @notice Storage position of contract version index - bytes32 private constant versionPosition = keccak256("version"); - /// @notice Storage position of "target" (actual implementation address) bytes32 private constant targetPosition = keccak256("target"); - /// @notice Storage position of next "target" (in case the contract is in status of waiting to upgrade) - /// @dev Will store zero in case of not active upgrade mode - bytes32 private constant nextTargetPosition = keccak256("nextTarget"); - - /// @notice Storage position of UpgradeMode contract address - bytes32 private constant upgradeModeAddressPosition = keccak256("UpgradeModeAddress"); - /// @notice Contract constructor - /// @dev Calls Ownable contract constructor and creates UpgradeMode contract + /// @dev Calls Ownable contract constructor constructor() Ownable() public { - setVersion(0); - setTarget(address(0)); - setNextTarget(address(0)); - setUpgradeModeAddress(address(new UpgradeMode())); + + } + + /// @notice Intercepts initialization calls + function initialize(bytes calldata) external pure { + revert("ini11"); // ini11 - interception of initialization call } /// @notice Upgradeable contract initialization - /// @param _target Initial implementation address - /// @param _targetInitializationParameters Target initialization parameters - function initialize(address _target, bytes calldata _targetInitializationParameters) external { + /// @param target Initial implementation address + /// @param targetInitializationParameters Target initialization parameters + function initializeTarget(address target, bytes calldata targetInitializationParameters) external { requireMaster(msg.sender); - require( - getVersion() == 0, - "uin11" - ); // uin11 - upgradeable contract already initialized - - setVersion(1); - setTarget(_target); + setTarget(target); (bool initializationSuccess, ) = getTarget().delegatecall( - abi.encodeWithSignature("initialize(address,bytes)", getUpgradeModeAddress(), _targetInitializationParameters) + abi.encodeWithSignature("initialize(bytes)", targetInitializationParameters) ); require( initializationSuccess, - "uin12" - ); // uin12 - target initialization failed - } - - /// @notice Returns contract version index - /// @return Contract version index - function getVersion() public view returns (uint64 version) { - bytes32 position = versionPosition; - assembly { - version := sload(position) - } - } - - /// @notice Sets new contract version index - /// @param _newVersion New contract version index - function setVersion(uint64 _newVersion) internal { - bytes32 position = versionPosition; - assembly { - sstore(position, _newVersion) - } + "uin11" + ); // uin11 - target initialization failed } /// @notice Returns target of contract @@ -88,115 +55,34 @@ contract Upgradeable is Ownable { } } - /// @notice Returns next target - /// @return Next target address - function getNextTarget() public view returns (address nextTarget) { - bytes32 position = nextTargetPosition; - assembly { - nextTarget := sload(position) - } - } - - /// @notice Sets new next target - /// @param _newNextTarget New next target value - function setNextTarget(address _newNextTarget) internal { - bytes32 position = nextTargetPosition; - assembly { - sstore(position, _newNextTarget) - } - } - - /// @notice Returns UpgradeMode contract address - /// @return UpgradeMode contract address - function getUpgradeModeAddress() public view returns (address upgradeModeAddress) { - bytes32 position = upgradeModeAddressPosition; - assembly { - upgradeModeAddress := sload(position) - } - } - - /// @notice Sets new UpgradeMode contract address - /// @param _newUpgradeModeAddress New UpgradeMode contract address - function setUpgradeModeAddress(address _newUpgradeModeAddress) internal { - bytes32 position = upgradeModeAddressPosition; - assembly { - sstore(position, _newUpgradeModeAddress) - } - } - /// @notice Starts upgrade - /// @param _newTarget Next actual implementation address - function upgradeTarget(address _newTarget) external { + /// @param newTarget New actual implementation address + function upgradeTarget(address newTarget) external view { requireMaster(msg.sender); require( - _newTarget != address(0), + newTarget != address(0), "uut11" - ); // uut11 - new actual implementation address can't be zero address + ); // uut11 - new actual implementation address can't be equal to zero require( - getTarget() != _newTarget, + getTarget() != newTarget, "uut12" ); // uut12 - new actual implementation address can't be equal to previous - - UpgradeMode UpgradeMode = UpgradeMode(getUpgradeModeAddress()); - UpgradeMode.activate(); - - setNextTarget(_newTarget); - } - - /// @notice Cancels upgrade - function cancelUpgradeTarget() external { - requireMaster(msg.sender); - - UpgradeMode UpgradeMode = UpgradeMode(getUpgradeModeAddress()); - UpgradeMode.cancel(); - - setNextTarget(address(0)); - } - - /// @notice Force upgrade cancellation - function forceCancelUpgradeTarget() external { - UpgradeMode UpgradeMode = UpgradeMode(getUpgradeModeAddress()); - UpgradeMode.forceCancel(); - - setNextTarget(address(0)); - } - - /// @notice Checks that target is ready to be upgraded - /// @return Bool flag indicating that target is ready to be upgraded - function targetReadyToBeUpgraded() public returns (bool) { - (bool success, bytes memory result) = getTarget().delegatecall(abi.encodeWithSignature("readyToBeUpgraded()")); - require( - success, - "utr11" - ); // utr11 - target readyToBeUpgraded() call failed - - return abi.decode(result, (bool)); } /// @notice Finishes upgrade - /// @param _newTargetInitializationParameters New target initialization parameters - function finishTargetUpgrade(bytes calldata _newTargetInitializationParameters) external { + /// @param newTarget New target + /// @param newTargetInitializationParameters New target initialization parameters + function finishTargetUpgrade(address newTarget, bytes calldata newTargetInitializationParameters) external { requireMaster(msg.sender); - require( - targetReadyToBeUpgraded(), - "ufu11" - ); // ufu11 - target is not ready to be upgraded - - UpgradeMode UpgradeMode = UpgradeMode(getUpgradeModeAddress()); - UpgradeMode.finish(); - - setVersion(getVersion() + 1); - - setTarget(getNextTarget()); - setNextTarget(address(0)); + setTarget(newTarget); (bool initializationSuccess, ) = getTarget().delegatecall( - abi.encodeWithSignature("initialize(address,bytes)", getUpgradeModeAddress(), _newTargetInitializationParameters) + abi.encodeWithSignature("initialize(bytes)", newTargetInitializationParameters) ); require( initializationSuccess, - "ufu12" - ); // ufu12 - target initialization failed + "ufu11" + ); // ufu11 - target initialization failed } } diff --git a/contracts/contracts/Verifier.sol b/contracts/contracts/Verifier.sol index 766444214a..61aa047a1d 100644 --- a/contracts/contracts/Verifier.sol +++ b/contracts/contracts/Verifier.sol @@ -1,7 +1,6 @@ pragma solidity 0.5.16; import "./VerificationKey.sol"; -import "./UpgradeMode.sol"; /// @title Verifier Contract @@ -10,21 +9,14 @@ import "./UpgradeMode.sol"; /// @author Matter Labs contract Verifier is VerificationKey { - /// @notice UpgradeMode contract - UpgradeMode upgradeMode; - /// @notice If this flag is true - dummy verification is used instead of full verifier bool constant DUMMY_VERIFIER = false; /// @notice Verifier contract initialization - /// @param upgradeModeAddress Address of UpgradeMode contract /// @param initializationParameters Encoded representation of initialization parameters function initialize( - address upgradeModeAddress, bytes calldata initializationParameters ) external { - upgradeMode = UpgradeMode(upgradeModeAddress); - // parameters are not used during initialization } diff --git a/contracts/contracts/test/DummyTarget.sol b/contracts/contracts/test/DummyTarget.sol index 85c573f02a..012946dee2 100644 --- a/contracts/contracts/test/DummyTarget.sol +++ b/contracts/contracts/test/DummyTarget.sol @@ -4,9 +4,13 @@ interface DummyTarget { function get_DUMMY_INDEX() external pure returns (uint256); - function initialize(address _address, bytes calldata _initializationParameters) external; + function initialize(bytes calldata initializationParameters) external; - function readyToBeUpgraded() external returns (bool); + function verifiedPriorityOperations() external returns (uint64); + + function registeredPriorityOperations() external returns (uint64); + + function verifyPriorityOperation() external; } @@ -17,19 +21,27 @@ contract DummyFirst is DummyTarget { return DUMMY_INDEX; } - function initialize(address _address, bytes calldata _initializationParameters) external { - bytes memory initializationParameters = _initializationParameters; + uint64 _verifiedPriorityOperations; + + function initialize(bytes calldata initializationParameters) external { bytes32 byte_0 = bytes32(uint256(uint8(initializationParameters[0]))); bytes32 byte_1 = bytes32(uint256(uint8(initializationParameters[1]))); assembly { - sstore(0, _address) sstore(1, byte_0) sstore(2, byte_1) } } - function readyToBeUpgraded() external returns (bool) { - return true; + function verifiedPriorityOperations() external returns (uint64){ + return _verifiedPriorityOperations; + } + + function registeredPriorityOperations() external returns (uint64){ + return 1; + } + + function verifyPriorityOperation() external { + _verifiedPriorityOperations++; } } @@ -41,19 +53,27 @@ contract DummySecond is DummyTarget { return DUMMY_INDEX; } - function initialize(address _address, bytes calldata _initializationParameters) external { - bytes memory initializationParameters = _initializationParameters; + uint64 _verifiedPriorityOperations; + + function initialize(bytes calldata initializationParameters) external { bytes32 byte_0 = bytes32(uint256(uint8(initializationParameters[0]))); bytes32 byte_1 = bytes32(uint256(uint8(initializationParameters[1]))); assembly { - sstore(0, _address) sstore(2, byte_0) sstore(3, byte_1) } } - function readyToBeUpgraded() external returns (bool) { - return false; + function verifiedPriorityOperations() external returns (uint64){ + return _verifiedPriorityOperations; + } + + function registeredPriorityOperations() external returns (uint64){ + return 0; + } + + function verifyPriorityOperation() external { + _verifiedPriorityOperations++; } } diff --git a/contracts/contracts/test/ZKSyncUnitTest.sol b/contracts/contracts/test/ZKSyncUnitTest.sol index 47c984c27f..99631112d8 100644 --- a/contracts/contracts/test/ZKSyncUnitTest.sol +++ b/contracts/contracts/test/ZKSyncUnitTest.sol @@ -1,7 +1,6 @@ pragma solidity 0.5.16; import "../generated/FranklinTest.sol"; -import "../generated/UpgradeModeTest.sol"; contract ZKSyncUnitTest is FranklinTest { @@ -13,7 +12,6 @@ contract ZKSyncUnitTest is FranklinTest { bytes32 _genesisRoot ) FranklinTest() public{ /// initialization - upgradeMode = new UpgradeModeTest(); verifier = VerifierTest(_verifierAddress); governance = GovernanceTest(_governanceAddress); diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 1a05c6edc2..2d9f43ce51 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -109,59 +109,6 @@ export async function deployProxy( } } -// note that this upgrade must be finished later -export async function upgradeContract( - wallet, - proxyContract, - newTargetAddress, -) { - try { - const proxy = new ethers.Contract(proxyContract.address, proxyContractCode.interface, wallet); - const tx = await proxy.upgradeTarget(newTargetAddress); - await tx.wait(); - - return true; - } catch (err) { - return false; - } -} - -// cancels upgrade -export async function cancelUpgrade( - wallet, - proxyContract, -) { - try { - const proxy = new ethers.Contract(proxyContract.address, proxyContractCode.interface, wallet); - const tx = await proxy.cancelUpgradeTarget(); - await tx.wait(); - - return true; - } catch (err) { - return false; - } -} - -// trying to finish the upgrade -export async function finishUpgrade( - wallet, - proxyContract, - initArgs, - initArgsValues, -) { - try { - const proxy = new ethers.Contract(proxyContract.address, proxyContractCode.interface, wallet); - - const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); - const tx = await proxy.finishTargetUpgrade(initArgsInBytes); - await tx.wait(); - - return true; - } catch (err) { - return false; - } -} - export async function deployGovernance( wallet, proxyCode, @@ -175,7 +122,7 @@ export async function deployGovernance( gasLimit: 3000000, }); const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); - const tx = await proxy.initialize(governance.address, initArgsInBytes); + const tx = await proxy.initializeTarget(governance.address, initArgsInBytes); await tx.wait(); const returnContract = new ethers.Contract(proxy.address, governanceCode.interface, wallet); @@ -198,7 +145,7 @@ export async function deployVerifier( gasLimit: 3000000, }); const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); - const tx = await proxy.initialize(verifier.address, initArgsInBytes); + const tx = await proxy.initializeTarget(verifier.address, initArgsInBytes); await tx.wait(); const returnContract = new ethers.Contract(proxy.address, verifierCode.interface, wallet); @@ -232,7 +179,7 @@ export async function deployFranklin( gasLimit: 6000000, }); const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); - const initTx = await proxy.initialize(contract.address, initArgsInBytes); + const initTx = await proxy.initializeTarget(contract.address, initArgsInBytes); await initTx.wait(); const returnContract = new ethers.Contract(proxy.address, franklinCode.interface, wallet); diff --git a/contracts/test/unit_tests/common.js b/contracts/test/unit_tests/common.js index 028f670c24..f12a5e47fc 100644 --- a/contracts/test/unit_tests/common.js +++ b/contracts/test/unit_tests/common.js @@ -6,9 +6,6 @@ const { bigNumberify, parseEther, hexlify, formatEther } = require("ethers/utils const IERC20_INTERFACE = require("openzeppelin-solidity/build/contracts/IERC20"); const abi = require('ethereumjs-abi') -/// Skipping slow tests -const SKIP_TEST = false; - // For: geth // const provider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL); @@ -47,7 +44,7 @@ async function deployProxyContract( gasLimit: 3000000, }); const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); - const tx = await proxy.initialize(contract.address, initArgsInBytes); + const tx = await proxy.initializeTarget(contract.address, initArgsInBytes); await tx.wait(); const returnContract = new ethers.Contract(proxy.address, contractCode.interface, wallet); @@ -78,5 +75,4 @@ module.exports = { deployProxyContract, getCallRevertReason, IERC20_INTERFACE, - SKIP_TEST, } diff --git a/contracts/test/unit_tests/proxy_test.js b/contracts/test/unit_tests/proxy_test.js deleted file mode 100644 index 943893ffd1..0000000000 --- a/contracts/test/unit_tests/proxy_test.js +++ /dev/null @@ -1,175 +0,0 @@ -const ethers = require("ethers") -const { expect } = require("chai") -const { deployContract } = require("ethereum-waffle"); -const { provider, wallet, wallet2, deployTestContract, getCallRevertReason, SKIP_TEST } = require("./common") - -const { performance } = require('perf_hooks'); - -const proxyTestContractCode = require('../../build/ProxyTest'); - -// some random constants for checking write and read from storage -const bytes = [133, 174, 97, 255] - -describe("Proxy unit tests", function () { - this.timeout(50000); - - let proxyTestContract - let proxyDummyInterface - let upgradeModeTestContract - let DummyFirst - let DummySecond - before(async () => { - proxyTestContract = await deployTestContract('../../build/ProxyTest') - proxyDummyInterface = new ethers.Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); - upgradeModeTestContract = new ethers.Contract(proxyTestContract.getUpgradeModeTestAddress(), require('../../build/UpgradeModeTest').interface, wallet); - DummyFirst = await deployTestContract('../../build/DummyFirst') - DummySecond = await deployTestContract('../../build/DummySecond') - await proxyTestContract.initialize(DummyFirst.address, [bytes[0], bytes[1]]); - }); - - it("checking Proxy creation", async () => { - // check version - expect(await proxyTestContract.getVersion()).to.equal(1) - - // check target storage - expect((await provider.getStorageAt(proxyTestContract.address, ethers.utils.id("target"))).toLowerCase()) - .equal(DummyFirst.address.toLowerCase()); - expect((await proxyTestContract.getTarget()).toLowerCase()) - .equal(DummyFirst.address.toLowerCase()); - - // check dummy index - expect(await proxyDummyInterface.get_DUMMY_INDEX()) - .to.equal(1); - - // check initial storage - expect((await provider.getStorageAt(proxyTestContract.address, 0)).toLowerCase()) - .equal((await proxyTestContract.getUpgradeModeTestAddress()).toLowerCase()); - expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 1))) - .to.equal(bytes[0]); - expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 2))) - .to.equal(bytes[1]); - }); - - it("checking that requireMaster calls present", async () => { - let proxyTestContract_with_wallet2_signer = await proxyTestContract.connect(wallet2); - expect((await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.upgradeTarget(DummySecond.address) )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.cancelUpgradeTarget() )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.finishTargetUpgrade([]) )).revertReason).equal("oro11") - - // bonus: check that force cancellation do not have requireMaster call - expect((await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.forceCancelUpgradeTarget() )).revertReason).to.not.equal("oro11") - }); - - it("check Proxy reverts", async () => { - expect((await getCallRevertReason( () => proxyTestContract.initialize(DummyFirst.address, []) )).revertReason).equal("uin11"); - expect((await getCallRevertReason( () => proxyDummyInterface.initialize(DummyFirst.address, []) )).revertReason).equal("uin11"); - - let proxyTestContract_with_wallet2_signer = await proxyTestContract.connect(wallet2); - expect((await getCallRevertReason( () => proxyTestContract_with_wallet2_signer.initialize(DummyFirst.address, []) )).revertReason).equal("oro11"); - - let proxyDummyInterface_with_wallet2_signer = await proxyDummyInterface.connect(wallet2); - expect((await getCallRevertReason( () => proxyDummyInterface_with_wallet2_signer.initialize(DummyFirst.address, []) )).revertReason).equal("oro11"); - - expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget("0x0000000000000000000000000000000000000000") )).revertReason).equal("uut11"); - expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(DummyFirst.address) )).revertReason).equal("uut12"); - }); - - it("check upgrade canceling", async () => { - // activate and cancel - await proxyTestContract.upgradeTarget(DummySecond.address); - expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(DummySecond.address))).revertReason).equal("uma11"); - await proxyTestContract.cancelUpgradeTarget(); - expect((await getCallRevertReason( () => proxyTestContract.cancelUpgradeTarget())).revertReason).equal("umc11"); - }); - - if (SKIP_TEST) { - it.skip("checking that the upgrade is done correctly", async () => {}); - } - else { - it("checking that the upgrade is done correctly", async () => { - let start_time = performance.now(); - - // activate - await proxyTestContract.upgradeTarget(DummySecond.address); - - let activated_time = performance.now(); - - // wait and finish upgrade - let all_time_in_sec = parseInt(await upgradeModeTestContract.get_WAIT_UPGRADE_MODE_PERIOD()); - for (let step = 1; step <= 3; step++) { - if (step != 3) { - while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { - // wait - } - } else { - while ((performance.now() - activated_time) < all_time_in_sec * 1000 + 10) { - // wait - } - } - - if (step != 3) { - expect((await getCallRevertReason( () => proxyTestContract.finishTargetUpgrade([]))).revertReason).equal("umf11"); - } else { - await proxyTestContract.finishTargetUpgrade([bytes[2], bytes[3]]); - } - } - - // check dummy index - expect(await proxyDummyInterface.get_DUMMY_INDEX()) - .to.equal(2); - - // check updated storage - expect((await provider.getStorageAt(proxyTestContract.address, 0)).toLowerCase()) - .equal((await proxyTestContract.getUpgradeModeTestAddress()).toLowerCase()); - expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 1))) - .to.equal(bytes[0]); - expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 2))) - .to.equal(bytes[2]); - expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 3))) - .to.equal(bytes[3]); - }); - } - - if (SKIP_TEST) { - it.skip("checking that force cancellation works correctly", async () => {}); - } - else { - it("checking that force cancellation works correctly", async () => { - expect((await getCallRevertReason( () => proxyTestContract.forceCancelUpgradeTarget())).revertReason).equal("ufc11"); - - let start_time = performance.now(); - - // activate - await proxyTestContract.upgradeTarget(DummyFirst.address); - - let activated_time = performance.now(); - - // wait and finish upgrade - let all_time_in_sec = parseInt(await upgradeModeTestContract.get_MAX_UPGRADE_PERIOD()); - for (let step = 1; step <= 3; step++) { - if (step != 3) { - while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { - // wait - } - } else { - while ((performance.now() - activated_time) < all_time_in_sec * 1000 + 10) { - // wait - } - } - - if (step != 3) { - expect((await getCallRevertReason( () => proxyTestContract.forceCancelUpgradeTarget())).revertReason).equal("ufc12"); - } else { - expect((await getCallRevertReason( () => proxyTestContract.finishTargetUpgrade([]))).revertReason).equal("ufu11"); - await proxyTestContract.forceCancelUpgradeTarget(); - } - } - - expect(await upgradeModeTestContract.waitUpgradeModeActive()).to.equal(false) - // check dummy index - expect(await proxyDummyInterface.get_DUMMY_INDEX()) - .to.equal(2); - }); - } - -}); diff --git a/contracts/test/unit_tests/proxy_test.ts b/contracts/test/unit_tests/proxy_test.ts new file mode 100644 index 0000000000..f5906f8b87 --- /dev/null +++ b/contracts/test/unit_tests/proxy_test.ts @@ -0,0 +1,36 @@ +const { expect } = require("chai") +const { deployContract } = require("ethereum-waffle"); +const { wallet, wallet1, wallet2, deployTestContract, getCallRevertReason } = require("./common") + +import {Contract, ethers} from "ethers"; +import {AddressZero} from "ethers/constants"; + +describe("Proxy unit tests", function () { + this.timeout(50000); + + let proxyTestContract + let proxyDummyInterface + let DummyFirst + before(async () => { + proxyTestContract = await deployTestContract('../../build/ProxyTest') + proxyDummyInterface = new Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); + DummyFirst = await deployTestContract('../../build/DummyFirst') + await proxyTestContract.initializeTarget(DummyFirst.address, [1, 2]); + }); + + it("checking that requireMaster calls present", async () => { + let testContract_with_wallet2_signer = await proxyTestContract.connect(wallet2); + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.initializeTarget(AddressZero, []) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.upgradeTarget(AddressZero) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.finishTargetUpgrade(AddressZero, []) )).revertReason).equal("oro11") + }); + + it("check Proxy reverts", async () => { + expect((await getCallRevertReason( () => proxyTestContract.initialize([]) )).revertReason).equal("ini11") + expect((await getCallRevertReason( () => proxyTestContract.initializeTarget(proxyTestContract.address, []) )).revertReason).equal("uin11") + expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(AddressZero) )).revertReason).equal("uut11") + expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(DummyFirst.address) )).revertReason).equal("uut12") + expect((await getCallRevertReason( () => proxyTestContract.finishTargetUpgrade(proxyTestContract.address, []) )).revertReason).equal("ufu11") + }); + +}); diff --git a/contracts/test/unit_tests/upgradeMode_test.js b/contracts/test/unit_tests/upgradeMode_test.js deleted file mode 100644 index 77bc51075c..0000000000 --- a/contracts/test/unit_tests/upgradeMode_test.js +++ /dev/null @@ -1,147 +0,0 @@ -const { expect } = require("chai") -const { deployContract } = require("ethereum-waffle"); -const { wallet1, wallet2, deployTestContract, getCallRevertReason, SKIP_TEST } = require("./common") - -const { performance } = require('perf_hooks'); - - -describe("UpgradeMode unit tests", function () { - this.timeout(50000); - - let testContract - before(async () => { - testContract = await deployContract(wallet1, require('../../build/UpgradeModeTest'), [], { - gasLimit: 6000000, - }) - }); - - it("checking that requireMaster calls present", async () => { - let testContract_with_wallet2_signer = await testContract.connect(wallet2); - expect((await getCallRevertReason( () => testContract_with_wallet2_signer.activate() )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => testContract_with_wallet2_signer.cancel() )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => testContract_with_wallet2_signer.isFinalizeStatusActive() )).revertReason).equal("VM did not revert") - expect((await getCallRevertReason( () => testContract_with_wallet2_signer.forceCancel() )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => testContract_with_wallet2_signer.finish() )).revertReason).equal("oro11") - }); - - it("test activate, test cancel, test finish without finalize status active", async () => { - // activate - await expect(testContract.activate()) - .to.emit(testContract, 'UpgradeModeActivated') - .withArgs(1); - - expect(await testContract.waitUpgradeModeActive()).to.equal(true) - await testContract.isFinalizeStatusActive(); - expect(await testContract.finalizeStatusActive()).to.equal(false) - - expect((await getCallRevertReason( () => testContract.activate() )).revertReason).equal("uma11") - - // cancel - await expect(testContract.cancel()) - .to.emit(testContract, 'UpgradeCanceled') - .withArgs(1); - - expect(await testContract.waitUpgradeModeActive()).to.equal(false) - - expect((await getCallRevertReason( () => testContract.cancel() )).revertReason).equal("umc11") - - // finish - expect((await getCallRevertReason( () => testContract.finish() )).revertReason).equal("umf11") - }); - - if (SKIP_TEST) { - it.skip("checking that the upgrade is done correctly", async () => {}); - } - else { - it("checking that the upgrade is done correctly", async () => { - let start_time = performance.now(); - - // activate - await expect(testContract.activate()) - .to.emit(testContract, 'UpgradeModeActivated') - .withArgs(1); - - let activated_time = performance.now(); - - // wait and activate finalize status - let all_time_in_sec = parseInt(await testContract.get_WAIT_UPGRADE_MODE_PERIOD()); - for (let step = 1; step <= 3; step++) { - if (step != 3) { - while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { - // wait - } - } else { - while ((performance.now() - activated_time) < all_time_in_sec * 1000 + 10) { - // wait - } - } - - if (step != 3) { - await testContract.isFinalizeStatusActive(); - expect(await testContract.finalizeStatusActive()).to.equal(false) - } else { - await expect(testContract.isFinalizeStatusActive()) - .to.emit(testContract, 'UpgradeModeFinalizeStatusActivated') - .withArgs(1); - expect(await testContract.finalizeStatusActive()).to.equal(true) - } - } - - // finish - await expect(testContract.finish()) - .to.emit(testContract, 'UpgradeCompleted') - .withArgs(1); - - - // one more activate and cancel with version equal to 2 - await expect(testContract.activate()) - .to.emit(testContract, 'UpgradeModeActivated') - .withArgs(2); - await expect(testContract.cancel()) - .to.emit(testContract, 'UpgradeCanceled') - .withArgs(2); - }); - } - - if (SKIP_TEST) { - it.skip("checking that force cancellation works correctly", async () => {}); - } - else { - it("checking that force cancellation works correctly", async () => { - let start_time = performance.now(); - - // activate - await expect(testContract.activate()) - .to.emit(testContract, 'UpgradeModeActivated') - .withArgs(2); - - let activated_time = performance.now(); - - // wait and force cancel - let all_time_in_sec = parseInt(await testContract.get_MAX_UPGRADE_PERIOD()); - for (let step = 1; step <= 5; step++) { - if (step != 5) { - while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { - // wait - } - } else { - while ((performance.now() - activated_time) < all_time_in_sec * 1000 + 10) { - // wait - } - } - - if (step != 5) { - expect((await getCallRevertReason( () => testContract.forceCancel() )).revertReason).equal("ufc12") - } else { - await expect(testContract.forceCancel()) - .to.emit(testContract, 'UpgradeForciblyCanceled') - .withArgs(2); - expect(await testContract.waitUpgradeModeActive()).to.equal(false) - } - } - - expect((await getCallRevertReason( () => testContract.forceCancel() )).revertReason).equal("ufc11") - }); - } - -}); diff --git a/contracts/test/unit_tests/upgradeModule_test.ts b/contracts/test/unit_tests/upgradeModule_test.ts new file mode 100644 index 0000000000..295d22f560 --- /dev/null +++ b/contracts/test/unit_tests/upgradeModule_test.ts @@ -0,0 +1,127 @@ +import {AddressZero} from "ethers/constants"; + +const { expect } = require("chai") +const { deployContract } = require("ethereum-waffle"); +const { provider, wallet, wallet1, wallet2, deployTestContract, getCallRevertReason } = require("./common") + +const { performance } = require('perf_hooks'); + +// some random constants for checking write and read from storage +const bytes = [133, 174, 97, 255] + +import {Contract, ethers} from "ethers"; + +describe("UpgradeModule unit tests", function () { + this.timeout(50000); + + let upgradeModuleContract + let proxyTestContract + let proxyDummyInterface + let DummyFirst + let DummySecond + before(async () => { + proxyTestContract = await deployTestContract('../../build/ProxyTest') + proxyDummyInterface = new Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); + DummyFirst = await deployTestContract('../../build/DummyFirst') + DummySecond = await deployTestContract('../../build/DummySecond') + await proxyTestContract.initializeTarget(DummyFirst.address, [bytes[0], bytes[1]]); + upgradeModuleContract = await deployContract(wallet, require('../../build/UpgradeModuleTest'), [proxyTestContract.address], { + gasLimit: 6000000, + }) + proxyTestContract.transferMastership(upgradeModuleContract.address); + }); + + it("check initial dummy index and storage", async () => { + expect(await proxyDummyInterface.get_DUMMY_INDEX()) + .to.equal(1); + + expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 1))) + .to.equal(bytes[0]); + expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 2))) + .to.equal(bytes[1]); + }); + + it("checking that requireMaster calls present", async () => { + let upgradeModuleContract_with_wallet2_signer = await upgradeModuleContract.connect(wallet2); + expect((await getCallRevertReason( () => upgradeModuleContract_with_wallet2_signer.upgradeProxy(AddressZero, AddressZero) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => upgradeModuleContract_with_wallet2_signer.cancelProxyUpgrade(AddressZero) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => upgradeModuleContract_with_wallet2_signer.finishProxyUpgrade(AddressZero, []) )).revertReason).equal("oro11") + }); + + it("check UpgradeModule reverts; activate and cancel upgrade", async () => { + expect((await getCallRevertReason( () => upgradeModuleContract.cancelProxyUpgrade(proxyTestContract.address) )).revertReason).equal("umc11") + expect((await getCallRevertReason( () => upgradeModuleContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address) )).revertReason).equal("uaf11") + expect((await getCallRevertReason( () => upgradeModuleContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf11") + + expect((await getCallRevertReason( () => upgradeModuleContract.upgradeProxy(proxyTestContract.address, AddressZero) )).revertReason).equal("uut11") + expect((await getCallRevertReason( () => upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummyFirst.address) )).revertReason).equal("uut12") + await expect(upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummySecond.address)) + .to.emit(upgradeModuleContract, 'UpgradeModeActivated') + .withArgs(proxyTestContract.address, 0) + expect((await getCallRevertReason( () => upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummySecond.address) )).revertReason).equal("upa11") + await expect(upgradeModuleContract.cancelProxyUpgrade(proxyTestContract.address)) + .to.emit(upgradeModuleContract, 'UpgradeCanceled') + .withArgs(proxyTestContract.address, 0) + }); + + it("checking that the upgrade works correctly", async () => { + let start_time = performance.now(); + + // activate + await expect(upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummySecond.address)) + .to.emit(upgradeModuleContract, 'UpgradeModeActivated') + .withArgs(proxyTestContract.address, 0) + + let activated_time = performance.now(); + + // wait and activate finalize status + let all_time_in_sec = parseInt(await upgradeModuleContract.get_WAIT_UPGRADE_MODE_PERIOD()); + for (let step = 1; step <= 3; step++) { + if (step != 3) { + while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { + // wait + } + } else { + while ((performance.now() - activated_time) < all_time_in_sec * 1000 + 10) { + // wait + } + } + + if (step != 3) { + await upgradeModuleContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address); + } else { + await expect(upgradeModuleContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address)) + .to.emit(upgradeModuleContract, 'UpgradeModeFinalizeStatusActivated') + .withArgs(proxyTestContract.address, 0) + } + } + + // finish upgrade without verifying priority operations + expect((await getCallRevertReason( () => upgradeModuleContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf13") + // finish upgrade + await proxyDummyInterface.verifyPriorityOperation(); + await expect(upgradeModuleContract.finishProxyUpgrade(proxyTestContract.address, [bytes[2], bytes[3]])) + .to.emit(upgradeModuleContract, 'UpgradeCompleted') + .withArgs(proxyTestContract.address, 0, DummySecond.address) + + // check dummy index and updated storage + expect(await proxyDummyInterface.get_DUMMY_INDEX()) + .to.equal(2); + + expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 1))) + .to.equal(bytes[0]); + expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 2))) + .to.equal(bytes[2]); + expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 3))) + .to.equal(bytes[3]); + + // one more activate and cancel with version equal to 1 + await expect(upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummyFirst.address)) + .to.emit(upgradeModuleContract, 'UpgradeModeActivated') + .withArgs(proxyTestContract.address, 1) + await expect(upgradeModuleContract.cancelProxyUpgrade(proxyTestContract.address)) + .to.emit(upgradeModuleContract, 'UpgradeCanceled') + .withArgs(proxyTestContract.address, 1); + }); + +}); From 25e259c0eadefbab1f0f0b765a1bb84f1f2e116d Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 6 Mar 2020 23:34:06 +0200 Subject: [PATCH 014/186] Removed forceCancel from prepare contracts script --- bin/prepare-test-contracts.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index db7bc73d99..12bb06b958 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -35,7 +35,6 @@ ssed 's/StorageTest/Storage/' -i $OUT_DIR/UpgradeableTest.sol # Workaround -> upgrade module has Proxy in method name. ssed 's/upgradeProxyTest/upgradeProxy/' -i $OUT_DIR/UpgradeModuleTest.sol ssed 's/cancelProxyTestUpgrade/cancelProxyUpgrade/' -i $OUT_DIR/UpgradeModuleTest.sol -ssed 's/forceCancelProxyTestUpgrade/forceCancelProxyUpgrade/' -i $OUT_DIR/UpgradeModuleTest.sol ssed 's/finishProxyTestUpgrade/finishProxyUpgrade/' -i $OUT_DIR/UpgradeModuleTest.sol From b2bf9ebeaf6101ba5312eeefd8b5135bc49762bc Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 10 Mar 2020 15:58:06 +0200 Subject: [PATCH 015/186] Added to Franklin functions to call from UpgradeModule --- contracts/contracts/Franklin.sol | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index 56aeee8e5e..3452247c0e 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -53,6 +53,14 @@ contract Franklin is Storage, Config, Events { blocks[0].stateRoot = _genesisRoot; } + function registeredPriorityOperations() public view returns (uint64) { + return firstPriorityRequestId + totalOpenPriorityRequests; + } + + function verifiedPriorityOperations() public view returns (uint64) { + return firstPriorityRequestId; + } + /// @notice executes pending withdrawals /// @param _n The number of withdrawals to complete starting from oldest function completeWithdrawals(uint32 _n) external { From 9e491c5e64b63fd42c7214f373a71ce1037c209b Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Wed, 11 Mar 2020 18:53:02 +0200 Subject: [PATCH 016/186] Fixed integration-simple and tree restore --- core/data_restore/src/contract_functions.rs | 45 ++++++++++++--- core/data_restore/src/rollup_ops.rs | 64 +++++++++++---------- core/models/src/node/operations.rs | 2 +- js/tests/package.json | 2 +- js/tests/yarn.lock | 28 +++++++-- 5 files changed, 94 insertions(+), 47 deletions(-) diff --git a/core/data_restore/src/contract_functions.rs b/core/data_restore/src/contract_functions.rs index 34f8bffb1d..31eeaa0cc5 100644 --- a/core/data_restore/src/contract_functions.rs +++ b/core/data_restore/src/contract_functions.rs @@ -1,24 +1,53 @@ +extern crate ethabi; use crate::eth_tx_helpers::get_input_data_from_ethereum_transaction; use models::node::account::Account; -use models::params::{ETH_ADDRESS_BIT_WIDTH, INPUT_DATA_ROOT_HASH_BYTES_WIDTH}; +use models::params::{INPUT_DATA_ROOT_HASH_BYTES_WIDTH}; use web3::contract::{Contract, Options}; use web3::futures::Future; use web3::types::{Address, BlockNumber, Transaction, U256}; use web3::Transport; -/// Returns Rollup genesis (fees) account from the input of the Rollup contract creation transaction +/// Returns Rollup genesis (fees) account from the input of the Rollup contract initialization transaction /// /// # Arguments /// -/// * `transaction` - Ethereum Rollup contract creation transaction description +/// * `transaction` - Ethereum Rollup contract initialization transaction description /// pub fn get_genesis_account(genesis_transaction: &Transaction) -> Result { + // encoded target address and targetInitializationParameters let input_data = get_input_data_from_ethereum_transaction(&genesis_transaction)?; - let genesis_operator_address = Address::from_slice( - &input_data[input_data.len() - INPUT_DATA_ROOT_HASH_BYTES_WIDTH - ETH_ADDRESS_BIT_WIDTH / 8 - ..input_data.len() - INPUT_DATA_ROOT_HASH_BYTES_WIDTH], - ); - Ok(Account::default_with_address(&genesis_operator_address)) + // encoded targetInitializationParameters + let encoded_parameters; + if let Ok(parameters) = ethabi::decode(vec![ethabi::ParamType::Address, ethabi::ParamType::Bytes].as_slice(), input_data.as_slice()) { + if let ethabi::Token::Bytes(parameters) = ¶meters.clone()[1] { + encoded_parameters = (*parameters).clone().to_vec(); + } + else { + return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get encoded parameters from target initialization transaction").into()); + } + } + else{ + return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get encoded parameters from target initialization transaction").into()); + } + let input_types = vec![ + ethabi::ParamType::Address, + ethabi::ParamType::Address, + ethabi::ParamType::Address, + ethabi::ParamType::FixedBytes(INPUT_DATA_ROOT_HASH_BYTES_WIDTH), + ]; + let decoded_parameters; + if let Ok(parameters) = ethabi::decode(input_types.as_slice(), encoded_parameters.as_slice()) { + decoded_parameters = parameters; + } + else{ + return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get decode parameters of initialiation").into()); + } + if let Some(ethabi::Token::Address(genesis_operator_address)) = decoded_parameters.get(2) { + Ok(Account::default_with_address(&genesis_operator_address)) + } + else{ + Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get genesis operator address from decoded parameters").into()) + } } /// Returns total number of verified blocks on Rollup contract diff --git a/core/data_restore/src/rollup_ops.rs b/core/data_restore/src/rollup_ops.rs index 3c8b7f0825..b65c7d9fe4 100755 --- a/core/data_restore/src/rollup_ops.rs +++ b/core/data_restore/src/rollup_ops.rs @@ -1,16 +1,8 @@ use crate::eth_tx_helpers::{get_ethereum_transaction, get_input_data_from_ethereum_transaction}; use crate::events::BlockEvent; -use failure::format_err; use models::node::operations::FranklinOp; -use models::primitives::bytes_slice_to_uint32; use web3::{Transport, Web3}; -use models::params::{ - INPUT_DATA_BLOCK_NUMBER_BYTES_WIDTH, INPUT_DATA_EMPTY_BYTES_WIDTH, - INPUT_DATA_FEE_ACC_BYTES_WIDTH, INPUT_DATA_FEE_ACC_BYTES_WIDTH_WITH_EMPTY_OFFSET, - INPUT_DATA_ROOT_BYTES_WIDTH, -}; - /// Description of a Rollup operations block #[derive(Debug, Clone)] pub struct RollupOpsBlock { @@ -36,12 +28,39 @@ impl RollupOpsBlock { ) -> Result { let transaction = get_ethereum_transaction(web3, &event_data.transaction_hash)?; let input_data = get_input_data_from_ethereum_transaction(&transaction)?; - let commitment_data = &input_data[INPUT_DATA_BLOCK_NUMBER_BYTES_WIDTH - + INPUT_DATA_FEE_ACC_BYTES_WIDTH_WITH_EMPTY_OFFSET - + INPUT_DATA_ROOT_BYTES_WIDTH - + INPUT_DATA_EMPTY_BYTES_WIDTH..]; - let fee_account = RollupOpsBlock::get_fee_account_from_tx_input(&input_data)?; - let ops = RollupOpsBlock::get_rollup_ops_from_data(commitment_data)?; + let block_commitment_types = vec![ + ethabi::ParamType::Uint(32), + ethabi::ParamType::Uint(24), + ethabi::ParamType::FixedBytes(32), + ethabi::ParamType::Bytes, + ethabi::ParamType::Bytes, + ethabi::ParamType::Array(Box::new(ethabi::ParamType::Uint(32))), + ]; + + let decoded_commitment_parameters; + if let Ok(parameters) = ethabi::decode(block_commitment_types.as_slice(), input_data.as_slice()) { + decoded_commitment_parameters = parameters; + } + else { + return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get decoded parameters from commitment transaction").into()); + } + + let ops; + if let Some(ethabi::Token::Bytes(public_data)) = decoded_commitment_parameters.get(3) { + ops = RollupOpsBlock::get_rollup_ops_from_data(public_data.as_slice())?; + } + else { + return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get public data from decoded commitment parameters").into()); + } + + let fee_account; + if let Some(ethabi::Token::Uint(fee_acc)) = decoded_commitment_parameters.get(1) { + fee_account = fee_acc.as_u32(); + } + else { + return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get fee_account address from decoded commitment parameters").into()); + } + let block = RollupOpsBlock { block_num: event_data.block_num, ops, @@ -74,23 +93,6 @@ impl RollupOpsBlock { } Ok(ops) } - - /// Returns fee account from Ethereum transaction input data - /// - /// # Arguments - /// - /// * `input` - Ethereum transaction input - /// - fn get_fee_account_from_tx_input(input_data: &[u8]) -> Result { - Ok(bytes_slice_to_uint32( - &input_data[INPUT_DATA_BLOCK_NUMBER_BYTES_WIDTH - + INPUT_DATA_FEE_ACC_BYTES_WIDTH_WITH_EMPTY_OFFSET - - INPUT_DATA_FEE_ACC_BYTES_WIDTH - ..INPUT_DATA_BLOCK_NUMBER_BYTES_WIDTH - + INPUT_DATA_FEE_ACC_BYTES_WIDTH_WITH_EMPTY_OFFSET], - ) - .ok_or_else(|| format_err!("Cant convert bytes to fee account number"))?) - } } #[cfg(test)] diff --git a/core/models/src/node/operations.rs b/core/models/src/node/operations.rs index a850427dd8..5b85b7c79d 100644 --- a/core/models/src/node/operations.rs +++ b/core/models/src/node/operations.rs @@ -90,7 +90,7 @@ impl NoopOp { pub fn from_public_data(bytes: &[u8]) -> Result { ensure!( bytes == [0, 0, 0, 0, 0, 0, 0, 0], - "Wrong pubdata for noop operation" + format!("Wrong pubdata for noop operation {:?}", bytes) ); Ok(Self {}) } diff --git a/js/tests/package.json b/js/tests/package.json index 994caa420f..17969fbadd 100644 --- a/js/tests/package.json +++ b/js/tests/package.json @@ -7,7 +7,7 @@ "@types/node": "^12.12.14", "array-flat-polyfill": "^1.0.1", "cli-progress": "^3.5.0", - "ethers": "^4.0.45", + "ethers": "4.0.33", "openzeppelin-solidity": "^2.4.0", "ts-node": "^8.5.4", "typescript": "^3.7.4", diff --git a/js/tests/yarn.lock b/js/tests/yarn.lock index be4b8c7d57..e3ac94f1b5 100644 --- a/js/tests/yarn.lock +++ b/js/tests/yarn.lock @@ -2,6 +2,11 @@ # yarn lockfile v1 +"@types/node@^10.3.2": + version "10.17.17" + resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.17.tgz#7a183163a9e6ff720d86502db23ba4aade5999b8" + integrity sha512-gpNnRnZP3VWzzj5k3qrpRC6Rk3H/uclhAVo1aIvwzK5p5cOrs9yEyQ8H/HBsBY0u5rrWxXEiVPQ0dEB6pkjE8Q== + "@types/node@^12.12.14": version "12.12.25" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.25.tgz#792c0afb798f1dd681dce9c4b4c431f7245a0a42" @@ -126,7 +131,17 @@ diff@^4.0.1: resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== -elliptic@6.5.2, elliptic@^6.5.0: +elliptic@6.3.3: + version "6.3.3" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.3.3.tgz#5482d9646d54bcb89fd7d994fc9e2e9568876e3f" + integrity sha1-VILZZG1UvLif19mU/J4ulWiHbj8= + dependencies: + bn.js "^4.4.0" + brorand "^1.0.1" + hash.js "^1.0.0" + inherits "^2.0.1" + +elliptic@^6.5.0: version "6.5.2" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.2.tgz#05c5678d7173c049d8ca433552224a495d0e3762" integrity sha512-f4x70okzZbIQl/NSRLkI/+tteV/9WqL98zx+SQ69KbXxmVrmjwsNUPn/gYJJ0sHvEak24cZgHIPegRePAtA/xw== @@ -191,14 +206,15 @@ es6-symbol@^3.1.1, es6-symbol@~3.1.3: d "^1.0.1" ext "^1.1.2" -ethers@^4.0.45: - version "4.0.45" - resolved "https://registry.yarnpkg.com/ethers/-/ethers-4.0.45.tgz#8d4cd764d7c7690836b583d4849203c225eb56e2" - integrity sha512-N/Wmc6Mw4pQO+Sss1HnKDCSS6KSCx0luoBMiPNq+1GbOaO3YaZOyplBEhj+NEoYsizZYODtkITg2oecPeNnidQ== +ethers@4.0.33: + version "4.0.33" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-4.0.33.tgz#f7b88d2419d731a39aefc37843a3f293e396f918" + integrity sha512-lAHkSPzBe0Vj+JrhmkEHLtUEKEheVktIjGDyE9gbzF4zf1vibjYgB57LraDHu4/ItqWVkztgsm8GWqcDMN+6vQ== dependencies: + "@types/node" "^10.3.2" aes-js "3.0.0" bn.js "^4.4.0" - elliptic "6.5.2" + elliptic "6.3.3" hash.js "1.1.3" js-sha3 "0.5.7" scrypt-js "2.0.4" From c09153db043c06a2f9e5f5ab4e8c5057c04b42a2 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Thu, 12 Mar 2020 10:22:29 +0200 Subject: [PATCH 017/186] Change naming from prior request to prior operation --- contracts/contracts/UpgradeModule.sol | 14 +++++++------- contracts/contracts/Upgradeable.sol | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/contracts/contracts/UpgradeModule.sol b/contracts/contracts/UpgradeModule.sol index b589e8360f..4a59e7718b 100644 --- a/contracts/contracts/UpgradeModule.sol +++ b/contracts/contracts/UpgradeModule.sol @@ -37,9 +37,9 @@ contract UpgradeModule is UpgradeEvents, Ownable { /// @dev Will store zero in case of not active upgrade mode address nextTarget; - /// @notice Number of priority requests that must be verified at the time of finishing upgrade + /// @notice Number of priority operations that must be verified at the time of finishing upgrade /// @dev Will store zero in case of not active finalize status of upgrade mode - uint64 priorityRequestsToProcessBeforeUpgrade; + uint64 priorityOperationsToProcessBeforeUpgrade; } /// @notice UpgradeInfo per each proxy @@ -67,7 +67,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.WaitUpgrade; upgradeInfo[proxyAddress].activationTime = now; upgradeInfo[proxyAddress].nextTarget = newTarget; - upgradeInfo[proxyAddress].priorityRequestsToProcessBeforeUpgrade = 0; + upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; emit UpgradeModeActivated(proxyAddress, version[proxyAddress]); } @@ -84,7 +84,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.NotActive; upgradeInfo[proxyAddress].activationTime = 0; upgradeInfo[proxyAddress].nextTarget = address(0); - upgradeInfo[proxyAddress].priorityRequestsToProcessBeforeUpgrade = 0; + upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; emit UpgradeCanceled(proxyAddress, version[proxyAddress]); } @@ -113,7 +113,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { "uaf12" ); // uaf12 - main contract static call failed uint64 registeredPriorityOperations = abi.decode(encodedResult, (uint64)); - upgradeInfo[proxyAddress].priorityRequestsToProcessBeforeUpgrade = registeredPriorityOperations; + upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = registeredPriorityOperations; emit UpgradeModeFinalizeStatusActivated(proxyAddress, version[proxyAddress]); return true; @@ -143,7 +143,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { uint64 verifiedPriorityOperations = abi.decode(encodedResult, (uint64)); require( - verifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityRequestsToProcessBeforeUpgrade, + verifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, "umf13" ); // umf13 - can't finish upgrade before verifing all priority operations received before start of finalize status @@ -155,7 +155,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.NotActive; upgradeInfo[proxyAddress].activationTime = 0; upgradeInfo[proxyAddress].nextTarget = address(0); - upgradeInfo[proxyAddress].priorityRequestsToProcessBeforeUpgrade = 0; + upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; } } diff --git a/contracts/contracts/Upgradeable.sol b/contracts/contracts/Upgradeable.sol index e32928f50e..c797325a48 100644 --- a/contracts/contracts/Upgradeable.sol +++ b/contracts/contracts/Upgradeable.sol @@ -74,8 +74,8 @@ contract Upgradeable is Ownable { /// @param newTargetInitializationParameters New target initialization parameters function finishTargetUpgrade(address newTarget, bytes calldata newTargetInitializationParameters) external { requireMaster(msg.sender); - setTarget(newTarget); + setTarget(newTarget); (bool initializationSuccess, ) = getTarget().delegatecall( abi.encodeWithSignature("initialize(bytes)", newTargetInitializationParameters) ); From bf8918e3a6837afbbcd16ffe6c97c1e4cbd3df49 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Thu, 12 Mar 2020 15:17:15 +0200 Subject: [PATCH 018/186] Removed generating few test contracts from prepare-test-contract.sh --- bin/prepare-test-contracts.sh | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index 44469af9bf..3370015527 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -14,9 +14,6 @@ cp $IN_DIR/Verifier.sol $OUT_DIR/VerifierTest.sol cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTest.sol cp $IN_DIR/Storage.sol $OUT_DIR/StorageTest.sol cp $IN_DIR/Config.sol $OUT_DIR/ConfigTest.sol -cp $IN_DIR/Ownable.sol $OUT_DIR/OwnableTest.sol -cp $IN_DIR/Upgradeable.sol $OUT_DIR/UpgradeableTest.sol -cp $IN_DIR/Proxy.sol $OUT_DIR/ProxyTest.sol cp $IN_DIR/UpgradeModule.sol $OUT_DIR/UpgradeModuleTest.sol cp $IN_DIR/Bytes.sol $OUT_DIR/Bytes.sol cp $IN_DIR/Events.sol $OUT_DIR/Events.sol @@ -31,17 +28,7 @@ ssed 's/Verifier/VerifierTest/' -i $OUT_DIR/*.sol ssed 's/Franklin/FranklinTest/' -i $OUT_DIR/*.sol ssed 's/Storage/StorageTest/' -i $OUT_DIR/*.sol ssed 's/Config/ConfigTest/' -i $OUT_DIR/*.sol -ssed 's/Ownable/OwnableTest/' -i $OUT_DIR/*.sol -ssed 's/Upgradeable/UpgradeableTest/' -i $OUT_DIR/*.sol -ssed 's/Proxy/ProxyTest/' -i $OUT_DIR/*.sol ssed 's/UpgradeModule/UpgradeModuleTest/' -i $OUT_DIR/*.sol -# Workaround -> ownable and upgradeable has Storage in comments. -ssed 's/StorageTest/Storage/' -i $OUT_DIR/OwnableTest.sol -ssed 's/StorageTest/Storage/' -i $OUT_DIR/UpgradeableTest.sol -# Workaround -> upgrade module has Proxy in method name. -ssed 's/upgradeProxyTest/upgradeProxy/' -i $OUT_DIR/UpgradeModuleTest.sol -ssed 's/cancelProxyTestUpgrade/cancelProxyUpgrade/' -i $OUT_DIR/UpgradeModuleTest.sol -ssed 's/finishProxyTestUpgrade/finishProxyUpgrade/' -i $OUT_DIR/UpgradeModuleTest.sol # Changes solidity constant to provided value From 145360effe5ede0247d9bab3ec2342b2c1772ef2 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Fri, 13 Mar 2020 06:33:24 +0200 Subject: [PATCH 019/186] Wallet throws errors, fix WalletDecorator --- js/tests/WalletDecorator.ts | 85 ++++++++++++++----------------------- js/zksync.js/src/wallet.ts | 40 ++++++++++++++--- 2 files changed, 65 insertions(+), 60 deletions(-) diff --git a/js/tests/WalletDecorator.ts b/js/tests/WalletDecorator.ts index 5b570d537f..1a8dd5d041 100644 --- a/js/tests/WalletDecorator.ts +++ b/js/tests/WalletDecorator.ts @@ -1,8 +1,9 @@ import * as ethers from 'ethers'; -const zksync = require('zksync'); +import * as zksync from 'zksync'; import * as utils from './utils'; import { sleep } from 'zksync/build/utils'; -const contractCode = require('../../contracts/flat_build/Franklin'); +import { bigNumberify } from 'ethers/utils'; +const contractCode = require('../../contracts/build/Franklin'); const erc20ContractCode = require('openzeppelin-solidity/build/contracts/IERC20'); const ethersProvider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL); @@ -27,6 +28,13 @@ const contract = new ethers.Contract( ethersProvider, ); +export type TxResult = { + payload: any, + tx: any, + receipt: any, + error: any, +}; + export class WalletDecorator { syncNonce: number; ethNonce: number; @@ -34,7 +42,7 @@ export class WalletDecorator { constructor( public ethWallet, - public syncWallet, + public syncWallet: zksync.Wallet, ) { this.contract = new ethers.Contract( process.env.CONTRACT_ADDR, @@ -48,52 +56,28 @@ export class WalletDecorator { utils.rangearr(n).map( _ => this.cancelOutstandingDepositsForExodusMode(10, { gasLimit: 1000000 }) ) - .map(promise => promise.catch(WalletDecorator.revertReasonHandler)) ); } - async cancelOutstandingDepositsForExodusMode(numDeposits = 10, overrideOptions?) { + async cancelOutstandingDepositsForExodusMode(numDeposits = 10, ethTxOptions?) { const nonce = this.ethNonce++; const tx = await this.contract.cancelOutstandingDepositsForExodusMode( numDeposits, { nonce, - ...overrideOptions + ...ethTxOptions } ); return tx.wait(); } - static async replacementUnderpricedHandler(e) { - if (e.code == 'REPLACEMENT_UNDERPRICED') { - return { - hash: e.transactionHash, - code: e.code, - reason: 'replacement fee too low', - }; - } - - throw e; - } - - static async revertReasonHandler(e) { - const hash = e.transactionHash; - if (hash == undefined) throw e; - const revertReason = await WalletDecorator.revertReason(hash); - if (revertReason == 'tx null') throw e; - return { - hash, - revertReason, - }; - } - static async revertReason(hash) { const tx = await ethersProvider.getTransaction(hash); if (!tx) { return "tx not found"; } - + const receipt = await ethersProvider.getTransactionReceipt(hash); if (receipt.status) { @@ -152,10 +136,9 @@ export class WalletDecorator { } static async fromEthWallet(ethWallet) { - const syncWallet = await zksync.Wallet.fromEthSigner(ethWallet, syncProvider, ethProxy); + const syncWallet = await zksync.Wallet.fromEthSigner(ethWallet, syncProvider); const wallet = new WalletDecorator(ethWallet, syncWallet); - wallet.syncNonce = await syncWallet.getNonce(); - wallet.ethNonce = await ethWallet.getTransactionCount(); + await wallet.resetNonce(); console.log(`wallet ${syncWallet.address()} syncNonce ${wallet.syncNonce}, ethNonce ${wallet.ethNonce}`); return wallet; } @@ -178,7 +161,7 @@ export class WalletDecorator { if (await this.syncWallet.isSigningKeySet()) return; const startTime = new Date().getTime(); - await (await this.syncWallet.onchainAuthSigningKey(this.syncNonce++)).wait(); + await (await this.syncWallet.onchainAuthSigningKey(this.syncNonce)).wait(); const changePubkeyHandle = await this.syncWallet.setSigningKey(this.syncNonce++, true); console.log(`Change pubkey onchain posted: ${(new Date().getTime()) - startTime} ms`); await changePubkeyHandle.awaitReceipt(); @@ -231,7 +214,7 @@ export class WalletDecorator { } } - async emergencyWithdraw(tokens) { + async emergencyWithdraw(tokens): Promise { return await Promise.all( tokens.map(async token => { const ethNonce = this.ethNonce++; @@ -245,11 +228,11 @@ export class WalletDecorator { withdrawFrom: this.syncWallet, token, nonce: syncNonce, - overrideOptions: { + ethTxOptions: { nonce: ethNonce, }, }; - tx = await zksync.emergencyWithdraw(payload); + tx = await this.syncWallet.emergencyWithdraw(payload); receipt = tx.awaitReceipt(); } catch (e) { error = e; @@ -262,15 +245,10 @@ export class WalletDecorator { error, }; }) - .map(promise => promise - .catch(utils.jrpcErrorHandler("Emergency withdraw error")) - .catch(WalletDecorator.revertReasonHandler) - .catch(WalletDecorator.replacementUnderpricedHandler) - ) ); } - async deposit(amount, tokens) { + async deposit(amount, tokens): Promise { return await Promise.all( tokens.map(async token => { const nonce = this.ethNonce; @@ -280,15 +258,15 @@ export class WalletDecorator { let error = null; try { payload = { - depositFrom: this.ethWallet, - depositTo: this.syncWallet, - token: token, - amount: amount, - overrideOptions: { + depositTo: this.syncWallet.address(), + token, + amount, + // maxFeeInETHToken: bigNumberify(0), + ethTxOptions: { nonce, }, }; - tx = await zksync.depositFromETH(payload); + tx = await this.syncWallet.depositToSyncFromEthereum(payload); receipt = await tx.awaitReceipt(); } catch (e) { error = e; @@ -304,7 +282,7 @@ export class WalletDecorator { ); } - async transfer(wallet, amount, tokens) { + async transfer(wallet, amount, tokens): Promise { const fee = ethers.utils.bigNumberify(0); return await Promise.all( tokens @@ -337,7 +315,7 @@ export class WalletDecorator { ); } - async withdraw(amount, tokens) { + async withdraw(amount, tokens): Promise { const fee = ethers.utils.bigNumberify(0); const ethAddress = await this.ethWallet.getAddress(); return await Promise.all( @@ -355,7 +333,7 @@ export class WalletDecorator { fee, nonce, }; - tx = await this.syncWallet.withdrawTo(payload); + tx = await this.syncWallet.withdrawFromSyncToEthereum(payload); receipt = await tx.awaitReceipt(); } catch (e) { error = e; @@ -415,9 +393,8 @@ export class WalletDecorator { } async prettyPrintBalances(tokens) { - const ethAddress = await this.ethWallet.getAddress(); const syncAddress = this.syncWallet.address(); - console.log(`Balance of ${ethAddress} ( ${syncAddress} ):`); + console.log(`Balance of ${syncAddress}:`); console.table(await this.balances(tokens)); } } diff --git a/js/zksync.js/src/wallet.ts b/js/zksync.js/src/wallet.ts index 8fd44136ec..9a6db0dc17 100644 --- a/js/zksync.js/src/wallet.ts +++ b/js/zksync.js/src/wallet.ts @@ -207,7 +207,8 @@ export class Wallet { } async onchainAuthSigningKey( - nonce: Nonce = "committed" + nonce: Nonce = "committed", + ethTxOptions?: ethers.providers.TransactionRequest, ): Promise { if (!this.signer) { throw new Error("ZKSync signer is required for current pubkey calculation."); @@ -232,7 +233,8 @@ export class Wallet { newPubKeyHash.replace("sync:", "0x"), numNonce, { - gasLimit: utils.bigNumberify("200000") + gasLimit: utils.bigNumberify("200000"), + ...ethTxOptions, } ); @@ -299,6 +301,7 @@ export class Wallet { token: TokenLike; amount: utils.BigNumberish; maxFeeInETHToken?: utils.BigNumberish; + ethTxOptions?: ethers.providers.TransactionRequest; }): Promise { const gasPrice = await this.ethSigner.provider.getGasPrice(); @@ -333,7 +336,8 @@ export class Wallet { .bigNumberify(deposit.amount) .add(maxFeeInETHToken), gasLimit: utils.bigNumberify("200000"), - gasPrice + gasPrice, + ...deposit.ethTxOptions, } ); } else { @@ -348,7 +352,10 @@ export class Wallet { ); const approveTx = await erc20contract.approve( this.provider.contractAddress.mainContract, - deposit.amount + deposit.amount, + { + ...deposit.ethTxOptions, + } ); ethTransaction = await mainZkSyncContract.depositERC20( tokenAddress, @@ -370,7 +377,7 @@ export class Wallet { token: TokenLike; maxFeeInETHToken?: utils.BigNumberish; accountId?: number; - nonce?: Nonce; + ethTxOptions?: ethers.providers.TransactionRequest; }): Promise { const gasPrice = await this.ethSigner.provider.getGasPrice(); const ethProxy = new ETHProxy( @@ -415,7 +422,8 @@ export class Wallet { { gasLimit: utils.bigNumberify("500000"), value: maxFeeInETHToken, - gasPrice + gasPrice, + ...withdraw.ethTxOptions, } ); @@ -459,6 +467,11 @@ class ETHOperation { this.priorityOpId.toNumber(), "COMMIT" ); + + if (receipt.executed == false) { + throw receipt; + } + this.state = "Committed"; return receipt; } @@ -471,6 +484,11 @@ class ETHOperation { this.priorityOpId.toNumber(), "VERIFY" ); + + if (receipt.executed == false) { + throw receipt; + } + this.state = "Verified"; return receipt; } @@ -495,6 +513,11 @@ class Transaction { "COMMIT" ); this.state = "Committed"; + + if (!receipt.success) { + throw receipt; + } + return receipt; } @@ -505,6 +528,11 @@ class Transaction { "VERIFY" ); this.state = "Verified"; + + if (!receipt.success) { + throw receipt; + } + return receipt; } } From f7253d1403f37892f33a9a134d21c6bf7bc2a226 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 13 Mar 2020 14:16:48 +0200 Subject: [PATCH 020/186] Changed some parameters of func to one line format and removed unsound methonds --- contracts/contracts/Franklin.sol | 4 +- contracts/contracts/Governance.sol | 8 +--- contracts/contracts/Ownable.sol | 10 +---- contracts/contracts/Proxy.sol | 5 +-- contracts/contracts/UpgradeModule.sol | 39 ++++--------------- contracts/contracts/Upgradeable.sol | 28 ++----------- contracts/contracts/Verifier.sol | 4 +- contracts/test/unit_tests/ownable_test.js | 2 +- contracts/test/unit_tests/proxy_test.ts | 9 ++--- .../test/unit_tests/upgradeModule_test.ts | 6 +-- 10 files changed, 25 insertions(+), 90 deletions(-) diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index 3452247c0e..50b8250cf1 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -37,9 +37,7 @@ contract Franklin is Storage, Config, Events { /// _verifierAddress The address of Verifier contract /// _ // FIXME: remove _genesisAccAddress /// _genesisRoot Genesis blocks (first block) root - function initialize( - bytes calldata initializationParameters - ) external { + function initialize(bytes calldata initializationParameters) external { ( address _governanceAddress, address _verifierAddress, diff --git a/contracts/contracts/Governance.sol b/contracts/contracts/Governance.sol index abc7aac8b1..c44a33b6cc 100644 --- a/contracts/contracts/Governance.sol +++ b/contracts/contracts/Governance.sol @@ -33,12 +33,8 @@ contract Governance is Config { /// @notice Governance contract initialization /// @param initializationParameters Encoded representation of initialization parameters: /// _networkGovernor The address of network governor - function initialize( - bytes calldata initializationParameters - ) external { - ( - address _networkGovernor - ) = abi.decode(initializationParameters, (address)); + function initialize(bytes calldata initializationParameters) external { + address _networkGovernor = abi.decode(initializationParameters, (address)); networkGovernor = _networkGovernor; validators[_networkGovernor] = true; diff --git a/contracts/contracts/Ownable.sol b/contracts/contracts/Ownable.sol index 1c4c17c2fd..82a1c97b5a 100644 --- a/contracts/contracts/Ownable.sol +++ b/contracts/contracts/Ownable.sol @@ -16,10 +16,7 @@ contract Ownable { /// @notice Check if specified address is master /// @param _address Address to check function requireMaster(address _address) internal view { - require( - _address == getMaster(), - "oro11" - ); // oro11 - only by master + require(_address == getMaster(), "oro11"); // oro11 - only by master } /// @notice Returns contract masters address @@ -44,10 +41,7 @@ contract Ownable { /// @param _newMaster New masters address function transferMastership(address _newMaster) external { requireMaster(msg.sender); - require( - _newMaster != address(0), - "otp11" - ); // otp11 - new masters address can't be zero address + require(_newMaster != address(0), "otp11"); // otp11 - new masters address can't be zero address setMaster(_newMaster); } diff --git a/contracts/contracts/Proxy.sol b/contracts/contracts/Proxy.sol index 23ade838b4..2b88b2b13b 100644 --- a/contracts/contracts/Proxy.sol +++ b/contracts/contracts/Proxy.sol @@ -15,10 +15,7 @@ contract Proxy is Upgradeable { /// @dev Fallback function allowing to perform a delegatecall to the given implementation /// This function will return whatever the implementation call returns function() external payable { - require( - msg.data.length > 0, - "pfb11" - ); // pfb11 - calldata must not be empty + require(msg.data.length > 0, "pfb11"); // pfb11 - calldata must not be empty address _target = getTarget(); assembly { diff --git a/contracts/contracts/UpgradeModule.sol b/contracts/contracts/UpgradeModule.sol index 4a59e7718b..bb80003bca 100644 --- a/contracts/contracts/UpgradeModule.sol +++ b/contracts/contracts/UpgradeModule.sol @@ -57,12 +57,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { /// @param newTarget New target function upgradeProxy(address proxyAddress, address newTarget) external { requireMaster(msg.sender); - require( - upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.NotActive, - "upa11" - ); // upa11 - unable to activate active upgrade mode - - Proxy(address(uint160(proxyAddress))).upgradeTarget(newTarget); + require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.NotActive, "upa11"); // upa11 - unable to activate active upgrade mode upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.WaitUpgrade; upgradeInfo[proxyAddress].activationTime = now; @@ -76,10 +71,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { /// @param proxyAddress Address of proxy to process function cancelProxyUpgrade(address proxyAddress) external { requireMaster(msg.sender); - require( - upgradeInfo[proxyAddress].upgradeStatus != UpgradeModule.UpgradeStatus.NotActive, - "umc11" - ); // umc11 - unable to cancel not active upgrade mode + require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeModule.UpgradeStatus.NotActive, "umc11"); // umc11 - unable to cancel not active upgrade mode upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.NotActive; upgradeInfo[proxyAddress].activationTime = 0; @@ -93,10 +85,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { /// @param proxyAddress Address of proxy to process /// @return Bool flag indicating that finalize status is active after this call function activeFinalizeStatusOfUpgrade(address proxyAddress) public returns (bool) { - require( - upgradeInfo[proxyAddress].upgradeStatus != UpgradeModule.UpgradeStatus.NotActive, - "uaf11" - ); // uaf11 - unable to activate finalize status in case of not active upgrade mode + require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeModule.UpgradeStatus.NotActive, "uaf11"); // uaf11 - unable to activate finalize status in case of not active upgrade mode if (upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.Finalize) { return true; @@ -108,10 +97,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { (bool callSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("registeredPriorityOperations()") ); - require( - callSuccess, - "uaf12" - ); // uaf12 - main contract static call failed + require(callSuccess, "uaf12"); // uaf12 - main contract static call failed uint64 registeredPriorityOperations = abi.decode(encodedResult, (uint64)); upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = registeredPriorityOperations; @@ -128,26 +114,17 @@ contract UpgradeModule is UpgradeEvents, Ownable { /// @param newTargetInitializationParameters New target initialization parameters function finishProxyUpgrade(address proxyAddress, bytes calldata newTargetInitializationParameters) external { requireMaster(msg.sender); - require( - upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.Finalize, - "umf11" - ); // umf11 - unable to finish upgrade without finalize status active + require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.Finalize, "umf11"); // umf11 - unable to finish upgrade without finalize status active (bool callSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("verifiedPriorityOperations()") ); - require( - callSuccess, - "umf12" - ); // umf12 - main contract static call failed + require(callSuccess, "umf12"); // umf12 - main contract static call failed uint64 verifiedPriorityOperations = abi.decode(encodedResult, (uint64)); - require( - verifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, - "umf13" - ); // umf13 - can't finish upgrade before verifing all priority operations received before start of finalize status + require(verifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, "umf13"); // umf13 - can't finish upgrade before verifing all priority operations received before start of finalize status - Proxy(address(uint160(proxyAddress))).finishTargetUpgrade(upgradeInfo[proxyAddress].nextTarget, newTargetInitializationParameters); + Proxy(address(uint160(proxyAddress))).upgradeTarget(upgradeInfo[proxyAddress].nextTarget, newTargetInitializationParameters); emit UpgradeCompleted(proxyAddress, version[proxyAddress], upgradeInfo[proxyAddress].nextTarget); version[proxyAddress]++; diff --git a/contracts/contracts/Upgradeable.sol b/contracts/contracts/Upgradeable.sol index c797325a48..cf5c4cee69 100644 --- a/contracts/contracts/Upgradeable.sol +++ b/contracts/contracts/Upgradeable.sol @@ -31,10 +31,7 @@ contract Upgradeable is Ownable { (bool initializationSuccess, ) = getTarget().delegatecall( abi.encodeWithSignature("initialize(bytes)", targetInitializationParameters) ); - require( - initializationSuccess, - "uin11" - ); // uin11 - target initialization failed + require(initializationSuccess, "uin11"); // uin11 - target initialization failed } /// @notice Returns target of contract @@ -55,34 +52,17 @@ contract Upgradeable is Ownable { } } - /// @notice Starts upgrade - /// @param newTarget New actual implementation address - function upgradeTarget(address newTarget) external view { - requireMaster(msg.sender); - require( - newTarget != address(0), - "uut11" - ); // uut11 - new actual implementation address can't be equal to zero - require( - getTarget() != newTarget, - "uut12" - ); // uut12 - new actual implementation address can't be equal to previous - } - - /// @notice Finishes upgrade + /// @notice Upgrades target /// @param newTarget New target /// @param newTargetInitializationParameters New target initialization parameters - function finishTargetUpgrade(address newTarget, bytes calldata newTargetInitializationParameters) external { + function upgradeTarget(address newTarget, bytes calldata newTargetInitializationParameters) external { requireMaster(msg.sender); setTarget(newTarget); (bool initializationSuccess, ) = getTarget().delegatecall( abi.encodeWithSignature("initialize(bytes)", newTargetInitializationParameters) ); - require( - initializationSuccess, - "ufu11" - ); // ufu11 - target initialization failed + require(initializationSuccess, "ufu11"); // ufu11 - target initialization failed } } diff --git a/contracts/contracts/Verifier.sol b/contracts/contracts/Verifier.sol index 0a43a37b78..06f0c540ba 100644 --- a/contracts/contracts/Verifier.sol +++ b/contracts/contracts/Verifier.sol @@ -16,9 +16,7 @@ contract Verifier is VerificationKey { /// @notice Verifier contract initialization /// @param initializationParameters Encoded representation of initialization parameters - function initialize( - bytes calldata initializationParameters - ) external { + function initialize(bytes calldata initializationParameters) external { // parameters are not used during initialization } diff --git a/contracts/test/unit_tests/ownable_test.js b/contracts/test/unit_tests/ownable_test.js index b55822c6f2..572d146ed2 100644 --- a/contracts/test/unit_tests/ownable_test.js +++ b/contracts/test/unit_tests/ownable_test.js @@ -8,7 +8,7 @@ describe("Ownable unit tests", function () { let testContract before(async () => { - testContract = await deployContract(wallet1, require('../../build/OwnableTest'), [], { + testContract = await deployContract(wallet1, require('../../build/Ownable'), [], { gasLimit: 6000000, }) }); diff --git a/contracts/test/unit_tests/proxy_test.ts b/contracts/test/unit_tests/proxy_test.ts index f5906f8b87..8f188f8d81 100644 --- a/contracts/test/unit_tests/proxy_test.ts +++ b/contracts/test/unit_tests/proxy_test.ts @@ -12,7 +12,7 @@ describe("Proxy unit tests", function () { let proxyDummyInterface let DummyFirst before(async () => { - proxyTestContract = await deployTestContract('../../build/ProxyTest') + proxyTestContract = await deployTestContract('../../build/Proxy') proxyDummyInterface = new Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); DummyFirst = await deployTestContract('../../build/DummyFirst') await proxyTestContract.initializeTarget(DummyFirst.address, [1, 2]); @@ -21,16 +21,13 @@ describe("Proxy unit tests", function () { it("checking that requireMaster calls present", async () => { let testContract_with_wallet2_signer = await proxyTestContract.connect(wallet2); expect((await getCallRevertReason( () => testContract_with_wallet2_signer.initializeTarget(AddressZero, []) )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => testContract_with_wallet2_signer.upgradeTarget(AddressZero) )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => testContract_with_wallet2_signer.finishTargetUpgrade(AddressZero, []) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.upgradeTarget(AddressZero, []) )).revertReason).equal("oro11") }); it("check Proxy reverts", async () => { expect((await getCallRevertReason( () => proxyTestContract.initialize([]) )).revertReason).equal("ini11") expect((await getCallRevertReason( () => proxyTestContract.initializeTarget(proxyTestContract.address, []) )).revertReason).equal("uin11") - expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(AddressZero) )).revertReason).equal("uut11") - expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(DummyFirst.address) )).revertReason).equal("uut12") - expect((await getCallRevertReason( () => proxyTestContract.finishTargetUpgrade(proxyTestContract.address, []) )).revertReason).equal("ufu11") + expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(proxyTestContract.address, []) )).revertReason).equal("ufu11") }); }); diff --git a/contracts/test/unit_tests/upgradeModule_test.ts b/contracts/test/unit_tests/upgradeModule_test.ts index 295d22f560..b754e2f506 100644 --- a/contracts/test/unit_tests/upgradeModule_test.ts +++ b/contracts/test/unit_tests/upgradeModule_test.ts @@ -20,7 +20,7 @@ describe("UpgradeModule unit tests", function () { let DummyFirst let DummySecond before(async () => { - proxyTestContract = await deployTestContract('../../build/ProxyTest') + proxyTestContract = await deployTestContract('../../build/Proxy') proxyDummyInterface = new Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); DummyFirst = await deployTestContract('../../build/DummyFirst') DummySecond = await deployTestContract('../../build/DummySecond') @@ -53,8 +53,6 @@ describe("UpgradeModule unit tests", function () { expect((await getCallRevertReason( () => upgradeModuleContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address) )).revertReason).equal("uaf11") expect((await getCallRevertReason( () => upgradeModuleContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf11") - expect((await getCallRevertReason( () => upgradeModuleContract.upgradeProxy(proxyTestContract.address, AddressZero) )).revertReason).equal("uut11") - expect((await getCallRevertReason( () => upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummyFirst.address) )).revertReason).equal("uut12") await expect(upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummySecond.address)) .to.emit(upgradeModuleContract, 'UpgradeModeActivated') .withArgs(proxyTestContract.address, 0) @@ -118,7 +116,7 @@ describe("UpgradeModule unit tests", function () { // one more activate and cancel with version equal to 1 await expect(upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummyFirst.address)) .to.emit(upgradeModuleContract, 'UpgradeModeActivated') - .withArgs(proxyTestContract.address, 1) + .withArgs(proxyTestContract.address, 1); await expect(upgradeModuleContract.cancelProxyUpgrade(proxyTestContract.address)) .to.emit(upgradeModuleContract, 'UpgradeCanceled') .withArgs(proxyTestContract.address, 1); From bf56f203e5ac4a6a73a25b54d8602957db568e89 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 13 Mar 2020 18:42:06 +0200 Subject: [PATCH 021/186] Fix code format in get_rollup_ops_block function --- core/data_restore/src/rollup_ops.rs | 56 ++++++++++++----------------- 1 file changed, 23 insertions(+), 33 deletions(-) diff --git a/core/data_restore/src/rollup_ops.rs b/core/data_restore/src/rollup_ops.rs index b65c7d9fe4..fd994967ec 100755 --- a/core/data_restore/src/rollup_ops.rs +++ b/core/data_restore/src/rollup_ops.rs @@ -28,45 +28,35 @@ impl RollupOpsBlock { ) -> Result { let transaction = get_ethereum_transaction(web3, &event_data.transaction_hash)?; let input_data = get_input_data_from_ethereum_transaction(&transaction)?; - let block_commitment_types = vec![ - ethabi::ParamType::Uint(32), - ethabi::ParamType::Uint(24), - ethabi::ParamType::FixedBytes(32), - ethabi::ParamType::Bytes, - ethabi::ParamType::Bytes, - ethabi::ParamType::Array(Box::new(ethabi::ParamType::Uint(32))), - ]; - let decoded_commitment_parameters; - if let Ok(parameters) = ethabi::decode(block_commitment_types.as_slice(), input_data.as_slice()) { - decoded_commitment_parameters = parameters; - } - else { - return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get decoded parameters from commitment transaction").into()); - } + let decoded_commitment_parameters = ethabi::decode( + vec![ + ethabi::ParamType::Uint(32), + ethabi::ParamType::Uint(24), + ethabi::ParamType::FixedBytes(32), + ethabi::ParamType::Bytes, + ethabi::ParamType::Bytes, + ethabi::ParamType::Array(Box::new(ethabi::ParamType::Uint(32))), + ].as_slice(), + input_data.as_slice(), + ).map_err(|_| failure::Error::from_boxed_compat( + Box::new(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get decoded parameters from commitment transaction")) + ))?; - let ops; - if let Some(ethabi::Token::Bytes(public_data)) = decoded_commitment_parameters.get(3) { - ops = RollupOpsBlock::get_rollup_ops_from_data(public_data.as_slice())?; - } - else { - return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get public data from decoded commitment parameters").into()); - } + if let (ethabi::Token::Uint(fee_acc), ethabi::Token::Bytes(public_data)) = (&decoded_commitment_parameters[1], &decoded_commitment_parameters[3]) { + let ops = RollupOpsBlock::get_rollup_ops_from_data(public_data.as_slice())?; + let fee_account = fee_acc.as_u32(); - let fee_account; - if let Some(ethabi::Token::Uint(fee_acc)) = decoded_commitment_parameters.get(1) { - fee_account = fee_acc.as_u32(); + let block = RollupOpsBlock { + block_num: event_data.block_num, + ops, + fee_account, + }; + Ok(block) } else { - return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get fee_account address from decoded commitment parameters").into()); + Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't parse commitment parameters").into()) } - - let block = RollupOpsBlock { - block_num: event_data.block_num, - ops, - fee_account, - }; - Ok(block) } /// Returns a Rollup operations vector From e5372748011fc3a60d57a5172bdf34b64e718415 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 15 Mar 2020 16:28:19 +0200 Subject: [PATCH 022/186] Fix code format in get_genesis_account function --- core/data_restore/src/contract_functions.rs | 50 ++++++++++----------- 1 file changed, 23 insertions(+), 27 deletions(-) diff --git a/core/data_restore/src/contract_functions.rs b/core/data_restore/src/contract_functions.rs index 31eeaa0cc5..c99032a09a 100644 --- a/core/data_restore/src/contract_functions.rs +++ b/core/data_restore/src/contract_functions.rs @@ -14,40 +14,36 @@ use web3::Transport; /// * `transaction` - Ethereum Rollup contract initialization transaction description /// pub fn get_genesis_account(genesis_transaction: &Transaction) -> Result { - // encoded target address and targetInitializationParameters let input_data = get_input_data_from_ethereum_transaction(&genesis_transaction)?; - // encoded targetInitializationParameters - let encoded_parameters; - if let Ok(parameters) = ethabi::decode(vec![ethabi::ParamType::Address, ethabi::ParamType::Bytes].as_slice(), input_data.as_slice()) { - if let ethabi::Token::Bytes(parameters) = ¶meters.clone()[1] { - encoded_parameters = (*parameters).clone().to_vec(); - } - else { - return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get encoded parameters from target initialization transaction").into()); - } - } - else{ - return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get encoded parameters from target initialization transaction").into()); - } + + // target address and targetInitializationParameters + let input_parameters = ethabi::decode( + vec![ethabi::ParamType::Address, ethabi::ParamType::Bytes].as_slice(), + input_data.as_slice() + ).map_err(|_| failure::Error::from_boxed_compat( + Box::new(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get input parameters from target initialization transaction")) + ))?; + let encoded_parameters = input_parameters[1].clone().to_bytes() + .ok_or_else(|| Err("Invalid token in parameters")) + .map_err(|_: Result::, _>| failure::Error::from_boxed_compat( + Box::new(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get initialization parameters from target initialization transaction")) + ))?; + let input_types = vec![ ethabi::ParamType::Address, ethabi::ParamType::Address, ethabi::ParamType::Address, ethabi::ParamType::FixedBytes(INPUT_DATA_ROOT_HASH_BYTES_WIDTH), ]; - let decoded_parameters; - if let Ok(parameters) = ethabi::decode(input_types.as_slice(), encoded_parameters.as_slice()) { - decoded_parameters = parameters; - } - else{ - return Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get decode parameters of initialiation").into()); - } - if let Some(ethabi::Token::Address(genesis_operator_address)) = decoded_parameters.get(2) { - Ok(Account::default_with_address(&genesis_operator_address)) - } - else{ - Result::Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get genesis operator address from decoded parameters").into()) - } + let decoded_parameters = ethabi::decode(input_types.as_slice(), encoded_parameters.as_slice()).map_err(|_| failure::Error::from_boxed_compat( + Box::new(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get decoded parameters from target initialization transaction")) + ))?; + match &decoded_parameters[2] { + ethabi::Token::Address(genesis_operator_address) => Some(Account::default_with_address(&genesis_operator_address)), + _ => None + }.ok_or_else(|| Err("Invalid token in parameters")).map_err(|_: Result::| failure::Error::from_boxed_compat( + Box::new(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get decoded parameter from target initialization transaction")) + )) } /// Returns total number of verified blocks on Rollup contract From 2fae0959385383f52b2477d51f4bd9abd5e43521 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 15 Mar 2020 18:02:20 +0200 Subject: [PATCH 023/186] Fixed using ProxyTest in governance unit test --- contracts/test/unit_tests/governance_test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contracts/test/unit_tests/governance_test.js b/contracts/test/unit_tests/governance_test.js index 8b18908f55..37afeabbe3 100644 --- a/contracts/test/unit_tests/governance_test.js +++ b/contracts/test/unit_tests/governance_test.js @@ -9,7 +9,7 @@ describe("Governance unit tests", function () { let governanceAddressDeployed; [testContract, governanceAddressDeployed] = await deployProxyContract( wallet, - require('../../build/ProxyTest'), + require('../../build/Proxy'), require('../../build/GovernanceTest'), ["address"], [wallet.address], From a8dfb5164a555459bfdd433129d4e85d42b1b7f7 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 15 Mar 2020 18:47:52 +0200 Subject: [PATCH 024/186] Removed using ProxyTest code --- contracts/src.ts/deploy.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 4ba9898d4b..55751950d0 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -21,7 +21,6 @@ export const franklinContractCode = require(`../build/Franklin`); export const verifierContractCode = require(`../build/Verifier`); export const governanceContractCode = require(`../build/Governance`); -export const proxyTestContractCode = require('../build/ProxyTest'); export const franklinTestContractCode = require('../build/FranklinTest'); export const verifierTestContractCode = require('../build/VerifierTest'); export const governanceTestContractCode = require('../build/GovernanceTest'); @@ -69,9 +68,9 @@ export class Deployer { GovernanceTarget: isTest ? governanceTestContractCode : governanceContractCode, VerifierTarget: isTest ? verifierTestContractCode : verifierContractCode, FranklinTarget: isTest ? franklinTestContractCode : franklinContractCode, - Governance: isTest ? proxyTestContractCode : proxyContractCode, - Verifier: isTest ? proxyTestContractCode : proxyContractCode, - Franklin: isTest ? proxyTestContractCode : proxyContractCode, + Governance: proxyContractCode, + Verifier: proxyContractCode, + Franklin: proxyContractCode, }; this.addresses = { From add6031852433171252bc46ca2ea6e870a5c4de5 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 15 Mar 2020 19:35:27 +0200 Subject: [PATCH 025/186] Formatting code of tree restore functions --- core/data_restore/src/contract_functions.rs | 54 ++++++++++++++------- core/data_restore/src/rollup_ops.rs | 27 ++++++++--- 2 files changed, 56 insertions(+), 25 deletions(-) diff --git a/core/data_restore/src/contract_functions.rs b/core/data_restore/src/contract_functions.rs index c99032a09a..3bf88f01ab 100644 --- a/core/data_restore/src/contract_functions.rs +++ b/core/data_restore/src/contract_functions.rs @@ -1,7 +1,7 @@ extern crate ethabi; use crate::eth_tx_helpers::get_input_data_from_ethereum_transaction; use models::node::account::Account; -use models::params::{INPUT_DATA_ROOT_HASH_BYTES_WIDTH}; +use models::params::INPUT_DATA_ROOT_HASH_BYTES_WIDTH; use web3::contract::{Contract, Options}; use web3::futures::Future; use web3::types::{Address, BlockNumber, Transaction, U256}; @@ -19,15 +19,24 @@ pub fn get_genesis_account(genesis_transaction: &Transaction) -> Result, _>| failure::Error::from_boxed_compat( - Box::new(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get initialization parameters from target initialization transaction")) - ))?; + .map_err(|_: Result, _>| { + failure::Error::from_boxed_compat(Box::new(std::io::Error::new( + std::io::ErrorKind::NotFound, + "can't get initialization parameters from target initialization transaction", + ))) + })?; let input_types = vec![ ethabi::ParamType::Address, @@ -35,15 +44,26 @@ pub fn get_genesis_account(genesis_transaction: &Transaction) -> Result Some(Account::default_with_address(&genesis_operator_address)), - _ => None - }.ok_or_else(|| Err("Invalid token in parameters")).map_err(|_: Result::| failure::Error::from_boxed_compat( - Box::new(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get decoded parameter from target initialization transaction")) - )) + ethabi::Token::Address(genesis_operator_address) => { + Some(Account::default_with_address(&genesis_operator_address)) + } + _ => None, + } + .ok_or_else(|| Err("Invalid token in parameters")) + .map_err(|_: Result| { + failure::Error::from_boxed_compat(Box::new(std::io::Error::new( + std::io::ErrorKind::NotFound, + "can't get decoded parameter from target initialization transaction", + ))) + }) } /// Returns total number of verified blocks on Rollup contract diff --git a/core/data_restore/src/rollup_ops.rs b/core/data_restore/src/rollup_ops.rs index fd994967ec..77be2d1bcc 100755 --- a/core/data_restore/src/rollup_ops.rs +++ b/core/data_restore/src/rollup_ops.rs @@ -37,13 +37,21 @@ impl RollupOpsBlock { ethabi::ParamType::Bytes, ethabi::ParamType::Bytes, ethabi::ParamType::Array(Box::new(ethabi::ParamType::Uint(32))), - ].as_slice(), + ] + .as_slice(), input_data.as_slice(), - ).map_err(|_| failure::Error::from_boxed_compat( - Box::new(std::io::Error::new(std::io::ErrorKind::NotFound, "can't get decoded parameters from commitment transaction")) - ))?; + ) + .map_err(|_| { + failure::Error::from_boxed_compat(Box::new(std::io::Error::new( + std::io::ErrorKind::NotFound, + "can't get decoded parameters from commitment transaction", + ))) + })?; - if let (ethabi::Token::Uint(fee_acc), ethabi::Token::Bytes(public_data)) = (&decoded_commitment_parameters[1], &decoded_commitment_parameters[3]) { + if let (ethabi::Token::Uint(fee_acc), ethabi::Token::Bytes(public_data)) = ( + &decoded_commitment_parameters[1], + &decoded_commitment_parameters[3], + ) { let ops = RollupOpsBlock::get_rollup_ops_from_data(public_data.as_slice())?; let fee_account = fee_acc.as_u32(); @@ -53,9 +61,12 @@ impl RollupOpsBlock { fee_account, }; Ok(block) - } - else { - Err(std::io::Error::new(std::io::ErrorKind::NotFound, "can't parse commitment parameters").into()) + } else { + Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + "can't parse commitment parameters", + ) + .into()) } } From bec8a3d495892ff18dddc447fba3003aa68fed29 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 15 Mar 2020 23:04:36 +0200 Subject: [PATCH 026/186] Added setting mastership in Ownable from address parameter in constructor --- contracts/contracts/Ownable.sol | 5 +++-- contracts/contracts/UpgradeModule.sol | 2 +- contracts/contracts/Upgradeable.sol | 2 +- contracts/test/unit_tests/ownable_test.js | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/contracts/contracts/Ownable.sol b/contracts/contracts/Ownable.sol index 82a1c97b5a..19b9333e0d 100644 --- a/contracts/contracts/Ownable.sol +++ b/contracts/contracts/Ownable.sol @@ -9,8 +9,9 @@ contract Ownable { /// @notice Contract constructor /// @dev Sets msg sender address as masters address - constructor() public { - setMaster(msg.sender); + /// @param masterAddress Master address + constructor(address masterAddress) public { + setMaster(masterAddress); } /// @notice Check if specified address is master diff --git a/contracts/contracts/UpgradeModule.sol b/contracts/contracts/UpgradeModule.sol index bb80003bca..e6e96e7024 100644 --- a/contracts/contracts/UpgradeModule.sol +++ b/contracts/contracts/UpgradeModule.sol @@ -48,7 +48,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { /// @notice Contract constructor /// @param _mainContractAddress Address of contract which processes priority operations /// @dev Calls Ownable contract constructor - constructor(address _mainContractAddress) Ownable() public { + constructor(address _mainContractAddress) Ownable(msg.sender) public { mainContractAddress = _mainContractAddress; } diff --git a/contracts/contracts/Upgradeable.sol b/contracts/contracts/Upgradeable.sol index cf5c4cee69..83b93a3587 100644 --- a/contracts/contracts/Upgradeable.sol +++ b/contracts/contracts/Upgradeable.sol @@ -12,7 +12,7 @@ contract Upgradeable is Ownable { /// @notice Contract constructor /// @dev Calls Ownable contract constructor - constructor() Ownable() public { + constructor() Ownable(msg.sender) public { } diff --git a/contracts/test/unit_tests/ownable_test.js b/contracts/test/unit_tests/ownable_test.js index 572d146ed2..e5bd4c6d27 100644 --- a/contracts/test/unit_tests/ownable_test.js +++ b/contracts/test/unit_tests/ownable_test.js @@ -8,7 +8,7 @@ describe("Ownable unit tests", function () { let testContract before(async () => { - testContract = await deployContract(wallet1, require('../../build/Ownable'), [], { + testContract = await deployContract(wallet1, require('../../build/Ownable'), [wallet1.address], { gasLimit: 6000000, }) }); From 52f629213fb7861e08d292ee5183da3499f7e6f3 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Mon, 16 Mar 2020 09:35:44 +0200 Subject: [PATCH 027/186] Changed naming from UpgradeModule to UpgradeGatekeeper --- bin/prepare-test-contracts.sh | 8 +-- ...pgradeModule.sol => UpgradeGatekeeper.sol} | 22 +++---- ...dule_test.ts => upgradeGatekeeper_test.ts} | 60 +++++++++---------- 3 files changed, 45 insertions(+), 45 deletions(-) rename contracts/contracts/{UpgradeModule.sol => UpgradeGatekeeper.sol} (84%) rename contracts/test/unit_tests/{upgradeModule_test.ts => upgradeGatekeeper_test.ts} (51%) diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index 3370015527..132aa9b8b9 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -14,7 +14,7 @@ cp $IN_DIR/Verifier.sol $OUT_DIR/VerifierTest.sol cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTest.sol cp $IN_DIR/Storage.sol $OUT_DIR/StorageTest.sol cp $IN_DIR/Config.sol $OUT_DIR/ConfigTest.sol -cp $IN_DIR/UpgradeModule.sol $OUT_DIR/UpgradeModuleTest.sol +cp $IN_DIR/UpgradeGatekeeper.sol $OUT_DIR/UpgradeGatekeeperTest.sol cp $IN_DIR/Bytes.sol $OUT_DIR/Bytes.sol cp $IN_DIR/Events.sol $OUT_DIR/Events.sol cp $IN_DIR/Operations.sol $OUT_DIR/Operations.sol @@ -28,7 +28,7 @@ ssed 's/Verifier/VerifierTest/' -i $OUT_DIR/*.sol ssed 's/Franklin/FranklinTest/' -i $OUT_DIR/*.sol ssed 's/Storage/StorageTest/' -i $OUT_DIR/*.sol ssed 's/Config/ConfigTest/' -i $OUT_DIR/*.sol -ssed 's/UpgradeModule/UpgradeModuleTest/' -i $OUT_DIR/*.sol +ssed 's/UpgradeGatekeeper/UpgradeGatekeeperTest/' -i $OUT_DIR/*.sol # Changes solidity constant to provided value @@ -46,10 +46,10 @@ set_constant MAX_AMOUNT_OF_REGISTERED_TOKENS 4 $OUT_DIR/ConfigTest.sol set_constant EXPECT_VERIFICATION_IN 8 $OUT_DIR/ConfigTest.sol set_constant MAX_UNVERIFIED_BLOCKS 4 $OUT_DIR/ConfigTest.sol set_constant PRIORITY_EXPIRATION 16 $OUT_DIR/ConfigTest.sol -set_constant WAIT_UPGRADE_MODE_PERIOD 4 $OUT_DIR/UpgradeModuleTest.sol +set_constant WAIT_UPGRADE_MODE_PERIOD 4 $OUT_DIR/UpgradeGatekeeperTest.sol create_constant_getter MAX_AMOUNT_OF_REGISTERED_TOKENS $OUT_DIR/ConfigTest.sol -create_constant_getter WAIT_UPGRADE_MODE_PERIOD $OUT_DIR/UpgradeModuleTest.sol +create_constant_getter WAIT_UPGRADE_MODE_PERIOD $OUT_DIR/UpgradeGatekeeperTest.sol # Verify always true set_constant DUMMY_VERIFIER true $OUT_DIR/VerifierTest.sol diff --git a/contracts/contracts/UpgradeModule.sol b/contracts/contracts/UpgradeGatekeeper.sol similarity index 84% rename from contracts/contracts/UpgradeModule.sol rename to contracts/contracts/UpgradeGatekeeper.sol index e6e96e7024..1ea25a6649 100644 --- a/contracts/contracts/UpgradeModule.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -5,9 +5,9 @@ import "./Ownable.sol"; import "./Proxy.sol"; -/// @title Upgrade Module Contract +/// @title Upgrade Gatekeeper Contract /// @author Matter Labs -contract UpgradeModule is UpgradeEvents, Ownable { +contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @notice Waiting period to activate finalize status mode (in seconds) uint256 constant WAIT_UPGRADE_MODE_PERIOD = 2 weeks; @@ -57,9 +57,9 @@ contract UpgradeModule is UpgradeEvents, Ownable { /// @param newTarget New target function upgradeProxy(address proxyAddress, address newTarget) external { requireMaster(msg.sender); - require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.NotActive, "upa11"); // upa11 - unable to activate active upgrade mode + require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.NotActive, "upa11"); // upa11 - unable to activate active upgrade mode - upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.WaitUpgrade; + upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.WaitUpgrade; upgradeInfo[proxyAddress].activationTime = now; upgradeInfo[proxyAddress].nextTarget = newTarget; upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; @@ -71,9 +71,9 @@ contract UpgradeModule is UpgradeEvents, Ownable { /// @param proxyAddress Address of proxy to process function cancelProxyUpgrade(address proxyAddress) external { requireMaster(msg.sender); - require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeModule.UpgradeStatus.NotActive, "umc11"); // umc11 - unable to cancel not active upgrade mode + require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeGatekeeper.UpgradeStatus.NotActive, "umc11"); // umc11 - unable to cancel not active upgrade mode - upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.NotActive; + upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.NotActive; upgradeInfo[proxyAddress].activationTime = 0; upgradeInfo[proxyAddress].nextTarget = address(0); upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; @@ -85,14 +85,14 @@ contract UpgradeModule is UpgradeEvents, Ownable { /// @param proxyAddress Address of proxy to process /// @return Bool flag indicating that finalize status is active after this call function activeFinalizeStatusOfUpgrade(address proxyAddress) public returns (bool) { - require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeModule.UpgradeStatus.NotActive, "uaf11"); // uaf11 - unable to activate finalize status in case of not active upgrade mode + require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeGatekeeper.UpgradeStatus.NotActive, "uaf11"); // uaf11 - unable to activate finalize status in case of not active upgrade mode - if (upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.Finalize) { + if (upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Finalize) { return true; } if (now >= upgradeInfo[proxyAddress].activationTime + WAIT_UPGRADE_MODE_PERIOD) { - upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.Finalize; + upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Finalize; (bool callSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("registeredPriorityOperations()") @@ -114,7 +114,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { /// @param newTargetInitializationParameters New target initialization parameters function finishProxyUpgrade(address proxyAddress, bytes calldata newTargetInitializationParameters) external { requireMaster(msg.sender); - require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeModule.UpgradeStatus.Finalize, "umf11"); // umf11 - unable to finish upgrade without finalize status active + require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Finalize, "umf11"); // umf11 - unable to finish upgrade without finalize status active (bool callSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("verifiedPriorityOperations()") @@ -129,7 +129,7 @@ contract UpgradeModule is UpgradeEvents, Ownable { emit UpgradeCompleted(proxyAddress, version[proxyAddress], upgradeInfo[proxyAddress].nextTarget); version[proxyAddress]++; - upgradeInfo[proxyAddress].upgradeStatus = UpgradeModule.UpgradeStatus.NotActive; + upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.NotActive; upgradeInfo[proxyAddress].activationTime = 0; upgradeInfo[proxyAddress].nextTarget = address(0); upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; diff --git a/contracts/test/unit_tests/upgradeModule_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts similarity index 51% rename from contracts/test/unit_tests/upgradeModule_test.ts rename to contracts/test/unit_tests/upgradeGatekeeper_test.ts index b754e2f506..eb8302b952 100644 --- a/contracts/test/unit_tests/upgradeModule_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -11,10 +11,10 @@ const bytes = [133, 174, 97, 255] import {Contract, ethers} from "ethers"; -describe("UpgradeModule unit tests", function () { +describe("UpgradeGatekeeper unit tests", function () { this.timeout(50000); - let upgradeModuleContract + let UpgradeGatekeeperContract let proxyTestContract let proxyDummyInterface let DummyFirst @@ -25,10 +25,10 @@ describe("UpgradeModule unit tests", function () { DummyFirst = await deployTestContract('../../build/DummyFirst') DummySecond = await deployTestContract('../../build/DummySecond') await proxyTestContract.initializeTarget(DummyFirst.address, [bytes[0], bytes[1]]); - upgradeModuleContract = await deployContract(wallet, require('../../build/UpgradeModuleTest'), [proxyTestContract.address], { + UpgradeGatekeeperContract = await deployContract(wallet, require('../../build/UpgradeGatekeeperTest'), [proxyTestContract.address], { gasLimit: 6000000, }) - proxyTestContract.transferMastership(upgradeModuleContract.address); + proxyTestContract.transferMastership(UpgradeGatekeeperContract.address); }); it("check initial dummy index and storage", async () => { @@ -42,23 +42,23 @@ describe("UpgradeModule unit tests", function () { }); it("checking that requireMaster calls present", async () => { - let upgradeModuleContract_with_wallet2_signer = await upgradeModuleContract.connect(wallet2); - expect((await getCallRevertReason( () => upgradeModuleContract_with_wallet2_signer.upgradeProxy(AddressZero, AddressZero) )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => upgradeModuleContract_with_wallet2_signer.cancelProxyUpgrade(AddressZero) )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => upgradeModuleContract_with_wallet2_signer.finishProxyUpgrade(AddressZero, []) )).revertReason).equal("oro11") + let UpgradeGatekeeperContract_with_wallet2_signer = await UpgradeGatekeeperContract.connect(wallet2); + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.upgradeProxy(AddressZero, AddressZero) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.cancelProxyUpgrade(AddressZero) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.finishProxyUpgrade(AddressZero, []) )).revertReason).equal("oro11") }); - it("check UpgradeModule reverts; activate and cancel upgrade", async () => { - expect((await getCallRevertReason( () => upgradeModuleContract.cancelProxyUpgrade(proxyTestContract.address) )).revertReason).equal("umc11") - expect((await getCallRevertReason( () => upgradeModuleContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address) )).revertReason).equal("uaf11") - expect((await getCallRevertReason( () => upgradeModuleContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf11") + it("check UpgradeGatekeeper reverts; activate and cancel upgrade", async () => { + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address) )).revertReason).equal("umc11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address) )).revertReason).equal("uaf11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf11") - await expect(upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummySecond.address)) - .to.emit(upgradeModuleContract, 'UpgradeModeActivated') + await expect(UpgradeGatekeeperContract.upgradeProxy(proxyTestContract.address, DummySecond.address)) + .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') .withArgs(proxyTestContract.address, 0) - expect((await getCallRevertReason( () => upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummySecond.address) )).revertReason).equal("upa11") - await expect(upgradeModuleContract.cancelProxyUpgrade(proxyTestContract.address)) - .to.emit(upgradeModuleContract, 'UpgradeCanceled') + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.upgradeProxy(proxyTestContract.address, DummySecond.address) )).revertReason).equal("upa11") + await expect(UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address)) + .to.emit(UpgradeGatekeeperContract, 'UpgradeCanceled') .withArgs(proxyTestContract.address, 0) }); @@ -66,14 +66,14 @@ describe("UpgradeModule unit tests", function () { let start_time = performance.now(); // activate - await expect(upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummySecond.address)) - .to.emit(upgradeModuleContract, 'UpgradeModeActivated') + await expect(UpgradeGatekeeperContract.upgradeProxy(proxyTestContract.address, DummySecond.address)) + .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') .withArgs(proxyTestContract.address, 0) let activated_time = performance.now(); // wait and activate finalize status - let all_time_in_sec = parseInt(await upgradeModuleContract.get_WAIT_UPGRADE_MODE_PERIOD()); + let all_time_in_sec = parseInt(await UpgradeGatekeeperContract.get_WAIT_UPGRADE_MODE_PERIOD()); for (let step = 1; step <= 3; step++) { if (step != 3) { while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { @@ -86,20 +86,20 @@ describe("UpgradeModule unit tests", function () { } if (step != 3) { - await upgradeModuleContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address); + await UpgradeGatekeeperContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address); } else { - await expect(upgradeModuleContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address)) - .to.emit(upgradeModuleContract, 'UpgradeModeFinalizeStatusActivated') + await expect(UpgradeGatekeeperContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address)) + .to.emit(UpgradeGatekeeperContract, 'UpgradeModeFinalizeStatusActivated') .withArgs(proxyTestContract.address, 0) } } // finish upgrade without verifying priority operations - expect((await getCallRevertReason( () => upgradeModuleContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf13") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf13") // finish upgrade await proxyDummyInterface.verifyPriorityOperation(); - await expect(upgradeModuleContract.finishProxyUpgrade(proxyTestContract.address, [bytes[2], bytes[3]])) - .to.emit(upgradeModuleContract, 'UpgradeCompleted') + await expect(UpgradeGatekeeperContract.finishProxyUpgrade(proxyTestContract.address, [bytes[2], bytes[3]])) + .to.emit(UpgradeGatekeeperContract, 'UpgradeCompleted') .withArgs(proxyTestContract.address, 0, DummySecond.address) // check dummy index and updated storage @@ -114,11 +114,11 @@ describe("UpgradeModule unit tests", function () { .to.equal(bytes[3]); // one more activate and cancel with version equal to 1 - await expect(upgradeModuleContract.upgradeProxy(proxyTestContract.address, DummyFirst.address)) - .to.emit(upgradeModuleContract, 'UpgradeModeActivated') + await expect(UpgradeGatekeeperContract.upgradeProxy(proxyTestContract.address, DummyFirst.address)) + .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') .withArgs(proxyTestContract.address, 1); - await expect(upgradeModuleContract.cancelProxyUpgrade(proxyTestContract.address)) - .to.emit(upgradeModuleContract, 'UpgradeCanceled') + await expect(UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address)) + .to.emit(UpgradeGatekeeperContract, 'UpgradeCanceled') .withArgs(proxyTestContract.address, 1); }); From 592fabddeee3c1c64cdf465195e3319b0437ed6c Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Mon, 16 Mar 2020 10:10:35 +0200 Subject: [PATCH 028/186] fix format in UpgradeGatekeeper.sol --- contracts/contracts/UpgradeGatekeeper.sol | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 1ea25a6649..1d48082b05 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -103,8 +103,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { emit UpgradeModeFinalizeStatusActivated(proxyAddress, version[proxyAddress]); return true; - } - else{ + } else { return false; } } From a3df8ed9fa730524334ef74e0071c1e42a7ec649 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Mon, 16 Mar 2020 12:15:15 +0200 Subject: [PATCH 029/186] Bundling for node works, but not for web --- js/client/vue.config.js | 5 + js/client/yarn.lock | 14 + js/tests/loading.ts | 2 + js/tests/yarn.lock | 14 + js/zksync-crypto/.gitignore | 6 + js/zksync-crypto/Cargo.toml | 40 + js/zksync-crypto/indexx.js | 1 + js/zksync-crypto/package.json | 33 + js/zksync-crypto/rollup.config.js | 27 + js/zksync-crypto/src/lib.rs | 115 + js/zksync-crypto/src/utils.rs | 111 + js/zksync-crypto/tests/web.rs | 13 + js/zksync-crypto/webpack.config.js | 36 + js/zksync-crypto/yarn.lock | 3546 ++++++++++++++++++++++++++++ js/zksync.js/package.json | 4 +- js/zksync.js/src/crypto.ts | 410 +--- js/zksync.js/src/signer.ts | 30 +- js/zksync.js/src/wallet.ts | 12 +- js/zksync.js/yarn.lock | 14 + 19 files changed, 4029 insertions(+), 404 deletions(-) create mode 100644 js/tests/loading.ts create mode 100644 js/zksync-crypto/.gitignore create mode 100644 js/zksync-crypto/Cargo.toml create mode 100644 js/zksync-crypto/indexx.js create mode 100644 js/zksync-crypto/package.json create mode 100644 js/zksync-crypto/rollup.config.js create mode 100644 js/zksync-crypto/src/lib.rs create mode 100644 js/zksync-crypto/src/utils.rs create mode 100644 js/zksync-crypto/tests/web.rs create mode 100644 js/zksync-crypto/webpack.config.js create mode 100644 js/zksync-crypto/yarn.lock diff --git a/js/client/vue.config.js b/js/client/vue.config.js index cfd8e84a6d..84f181bbab 100644 --- a/js/client/vue.config.js +++ b/js/client/vue.config.js @@ -2,6 +2,11 @@ module.exports = { publicPath: process.env.NODE_ENV === 'production' ? '/client/' : '/', + configureWebpack: { + devServer: { + // mimeTypes: { 'application/wasm': ['wasm'] } + }, + }, chainWebpack: config => { config.optimization.minimize(process.env.NODE_ENV === 'production'); config.resolve.symlinks(false); diff --git a/js/client/yarn.lock b/js/client/yarn.lock index cb688dd94c..8049350250 100644 --- a/js/client/yarn.lock +++ b/js/client/yarn.lock @@ -1859,6 +1859,11 @@ browserslist@^4.3.4: electron-to-chromium "^1.3.247" node-releases "^1.1.29" +buffer-es6@^4.9.3: + version "4.9.3" + resolved "https://registry.yarnpkg.com/buffer-es6/-/buffer-es6-4.9.3.tgz#f26347b82df76fd37e18bcb5288c4970cfd5c404" + integrity sha1-8mNHuC33b9N+GLy1KIxJcM/VxAQ= + buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" @@ -3701,6 +3706,12 @@ evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: md5.js "^1.3.4" safe-buffer "^5.1.1" +example-node-wasm@../zksync-crypto: + version "0.0.0" + dependencies: + buffer-es6 "^4.9.3" + zksync-crypto "file:../../../../Library/Caches/Yarn/v6/npm-example-node-wasm-0.0.0-7801641d-ec0b-4594-a519-2b9a8eeb179e-1584117788720/node_modules/example-node-wasm/pkg" + execa@^0.8.0: version "0.8.0" resolved "https://registry.yarnpkg.com/execa/-/execa-0.8.0.tgz#d8d76bbc1b55217ed190fd6dd49d3c774ecfc8da" @@ -9672,6 +9683,9 @@ yorkie@^2.0.0: normalize-path "^1.0.0" strip-indent "^2.0.0" +zksync-crypto@../zksync-crypto/pkg, "zksync-crypto@file:../zksync-crypto/pkg": + version "0.1.0" + "zksync@link:../zksync.js": version "0.0.0" uid "" diff --git a/js/tests/loading.ts b/js/tests/loading.ts new file mode 100644 index 0000000000..31dad747fc --- /dev/null +++ b/js/tests/loading.ts @@ -0,0 +1,2 @@ +import example from "example-node-wasm"; +example.then(console.log); diff --git a/js/tests/yarn.lock b/js/tests/yarn.lock index e3ac94f1b5..4b8f025592 100644 --- a/js/tests/yarn.lock +++ b/js/tests/yarn.lock @@ -69,6 +69,11 @@ brorand@^1.0.1: resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= +buffer-es6@^4.9.3: + version "4.9.3" + resolved "https://registry.yarnpkg.com/buffer-es6/-/buffer-es6-4.9.3.tgz#f26347b82df76fd37e18bcb5288c4970cfd5c404" + integrity sha1-8mNHuC33b9N+GLy1KIxJcM/VxAQ= + buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" @@ -222,6 +227,12 @@ ethers@4.0.33: uuid "2.0.1" xmlhttprequest "1.8.0" +example-node-wasm@../zksync-crypto: + version "0.0.0" + dependencies: + buffer-es6 "^4.9.3" + zksync-crypto "file:../../../../Library/Caches/Yarn/v6/npm-example-node-wasm-0.0.0-9a0cf1ac-bdad-461e-97bb-b869071b95cc-1584116293040/node_modules/example-node-wasm/pkg" + ext@^1.1.2: version "1.4.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.4.0.tgz#89ae7a07158f79d35517882904324077e4379244" @@ -534,6 +545,9 @@ yn@3.1.1: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== +zksync-crypto@../zksync-crypto/pkg, "zksync-crypto@file:../zksync-crypto/pkg": + version "0.1.0" + "zksync@link:../zksync.js": version "0.0.0" uid "" diff --git a/js/zksync-crypto/.gitignore b/js/zksync-crypto/.gitignore new file mode 100644 index 0000000000..4e301317e5 --- /dev/null +++ b/js/zksync-crypto/.gitignore @@ -0,0 +1,6 @@ +/target +**/*.rs.bk +Cargo.lock +bin/ +pkg/ +wasm-pack.log diff --git a/js/zksync-crypto/Cargo.toml b/js/zksync-crypto/Cargo.toml new file mode 100644 index 0000000000..7d74d707e2 --- /dev/null +++ b/js/zksync-crypto/Cargo.toml @@ -0,0 +1,40 @@ +[workspace] + +[package] +name = "zksync-crypto" +version = "0.1.0" +authors = ["Vitalii Drohan "] +edition = "2018" + +[lib] +crate-type = ["cdylib", "rlib"] + +[features] +default = ["console_error_panic_hook"] + +[dependencies] +wasm-bindgen = "0.2.59" +franklin_crypto = { package = "franklin-crypto", git = "https://github.com/matter-labs/franklin-crypto.git", branch="plonk"} +ff = { package = "ff_ce", version = "0.6.0"} +hex = "0.3" +sha2 = "0.8" + +# The `console_error_panic_hook` crate provides better debugging of panics by +# logging them with `console.error`. This is great for development, but requires +# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for +# code size when deploying. +console_error_panic_hook = { version = "0.1.1", optional = true } + +# `wee_alloc` is a tiny allocator for wasm that is only ~1K in code size +# compared to the default allocator's ~10K. It is slower than the default +# allocator, however. +# +# Unfortunately, `wee_alloc` requires nightly Rust when targeting wasm for now. +wee_alloc = { version = "0.4.2", optional = true } + +[dev-dependencies] +wasm-bindgen-test = "0.2" + +[profile.release] +# Tell `rustc` to optimize for small code size. +opt-level = "s" diff --git a/js/zksync-crypto/indexx.js b/js/zksync-crypto/indexx.js new file mode 100644 index 0000000000..4f81fc4777 --- /dev/null +++ b/js/zksync-crypto/indexx.js @@ -0,0 +1 @@ +export default import("zksync-crypto"); diff --git a/js/zksync-crypto/package.json b/js/zksync-crypto/package.json new file mode 100644 index 0000000000..42e0cf9b50 --- /dev/null +++ b/js/zksync-crypto/package.json @@ -0,0 +1,33 @@ +{ + "name": "example-node-wasm", + "version": "0.0.0", + "browser": "dist/index.web.js", + "main": "dist/index.node.js", + "scripts": { + "build": "rollup -c", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "dependencies": { + "buffer-es6": "^4.9.3", + "zksync-crypto": "file:pkg" + }, + "devDependencies": { + "@babel/core": "^7.8.7", + "@babel/plugin-transform-modules-commonjs": "^7.8.3", + "@rollup/plugin-commonjs": "^11.0.2", + "@rollup/plugin-node-resolve": "^7.1.1", + "@rollup/plugin-typescript": "^4.0.0", + "@rollup/plugin-wasm": "^3.0.0", + "@wasm-tool/wasm-pack-plugin": "^1.1.0", + "html-webpack-plugin": "^3.2.0", + "rollup": "^2.0.6", + "rollup-plugin-dts": "^1.3.0", + "rollup-plugin-rust": "^1.2.0", + "rollup-plugin-terser": "^5.3.0", + "text-encoding": "^0.7.0", + "typescript": "^3.8.3", + "wasm-loader": "^1.3.0", + "webpack": "^4.42.0", + "webpack-cli": "^3.3.11" + } +} diff --git a/js/zksync-crypto/rollup.config.js b/js/zksync-crypto/rollup.config.js new file mode 100644 index 0000000000..2bcc254b09 --- /dev/null +++ b/js/zksync-crypto/rollup.config.js @@ -0,0 +1,27 @@ +import resolve from '@rollup/plugin-node-resolve'; +import commonjs from '@rollup/plugin-commonjs'; +import { terser } from 'rollup-plugin-terser'; +import wasm from '@rollup/plugin-wasm'; +import typescript from '@rollup/plugin-typescript'; +import dts from "rollup-plugin-dts"; + +// `npm run build` -> `production` is true +// `npm run dev` -> `production` is false +const production = !process.env.ROLLUP_WATCH; + +export default { + input: [ + "pkg/zksync_crypto.js", + ], + output: { + file: 'public/bundle.js', + format: 'cjs' + }, + plugins: [ + resolve(), // tells Rollup how to find date-fns in node_modules + dts(), + commonjs(), // converts date-fns to ES modules + wasm(), + production && terser() // minify, but only in production + ] +}; diff --git a/js/zksync-crypto/src/lib.rs b/js/zksync-crypto/src/lib.rs new file mode 100644 index 0000000000..8a60327bef --- /dev/null +++ b/js/zksync-crypto/src/lib.rs @@ -0,0 +1,115 @@ +mod utils; + +pub use franklin_crypto::bellman::pairing::bn256::{Bn256 as Engine, Fr}; +pub type Fs = ::Fs; +thread_local! { + pub static JUBJUB_PARAMS: AltJubjubBn256 = AltJubjubBn256::new(); +} + +use wasm_bindgen::prelude::*; + +use franklin_crypto::{ + alt_babyjubjub::{fs::FsRepr, AltJubjubBn256, FixedGenerators}, + bellman::pairing::ff::{PrimeField, PrimeFieldRepr}, + eddsa::{PrivateKey, PublicKey, Seed}, + jubjub::JubjubEngine, +}; + +use crate::utils::{pedersen_hash_tx_msg, pub_key_hash, set_panic_hook}; +use sha2::{Digest, Sha256}; + +// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global +// allocator. +#[cfg(feature = "wee_alloc")] +#[global_allocator] +static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; + +#[wasm_bindgen] +pub fn init() { + JUBJUB_PARAMS.with(|_| {}); + set_panic_hook(); +} + +#[wasm_bindgen] +pub fn private_key_from_seed(seed: &[u8]) -> Vec { + if seed.len() < 32 { + panic!("Seed is too short"); + }; + + let mut effective_seed = seed.to_vec(); + + loop { + let raw_priv_key = { + let mut hasher = Sha256::new(); + hasher.input(&effective_seed); + hasher.result().to_vec() + }; + let mut fs_repr = FsRepr::default(); + fs_repr.read_be(&raw_priv_key[..]).unwrap(); + if Fs::from_repr(fs_repr).is_ok() { + return raw_priv_key; + } else { + effective_seed = raw_priv_key; + } + } +} + +#[wasm_bindgen] +pub fn private_key_to_pubkey_hash(private_key: &[u8]) -> Vec { + let p_g = FixedGenerators::SpendingKeyGenerator; + + let sk = { + let mut fs_repr = FsRepr::default(); + fs_repr.read_be(private_key).unwrap(); + PrivateKey::(Fs::from_repr(fs_repr).unwrap()) + }; + + let pubkey = JUBJUB_PARAMS.with(|params| PublicKey::from_private(&sk, p_g, params)); + pub_key_hash(&pubkey) +} + +#[wasm_bindgen] +pub fn sign_musig_sha256(private_key: &[u8], msg: &[u8]) -> Vec { + let p_g = FixedGenerators::SpendingKeyGenerator; + + let sk: PrivateKey = { + let mut fs_repr = FsRepr::default(); + fs_repr.read_be(private_key).unwrap(); + PrivateKey::(Fs::from_repr(fs_repr).unwrap()) + }; + + let pubkey = JUBJUB_PARAMS.with(|params| PublicKey::from_private(&sk, p_g, params)); + let mut packed_point = [0u8; 32]; + pubkey.write(packed_point.as_mut()).unwrap(); + + let signable_msg = pedersen_hash_tx_msg(msg); + + let seed1 = Seed::deterministic_seed(&sk, &signable_msg); + let sign = + JUBJUB_PARAMS.with(|params| sk.musig_sha256_sign(&signable_msg, &seed1, p_g, params)); + + let mut packed_signature = [0u8; 64]; + let (r_bar, s_bar) = packed_signature.as_mut().split_at_mut(32); + + sign.r.write(r_bar).unwrap(); + sign.s.into_repr().write_le(s_bar).unwrap(); + + let mut result = Vec::with_capacity(32 + 64); + result.extend_from_slice(&packed_point); + result.extend_from_slice(&packed_signature[..]); + result +} + +//#[test] +//fn test_pub_key_hash() { +// let p_g = FixedGenerators::SpendingKeyGenerator; +// +// let sk = { +// PrivateKey::(Fs::from_str("5").unwrap()) +// }; +// +// let pubkey = JUBJUB_PARAMS.with(|params| PublicKey::from_private(&sk, p_g, params)); +// println!("{:?}", pubkey.0.into_xy()); +// println!("{}",hex::encode(&pub_key_hash(&pubkey))); +// panic!() +//} diff --git a/js/zksync-crypto/src/utils.rs b/js/zksync-crypto/src/utils.rs new file mode 100644 index 0000000000..7146b9b593 --- /dev/null +++ b/js/zksync-crypto/src/utils.rs @@ -0,0 +1,111 @@ +use crate::JUBJUB_PARAMS; +use crate::{Engine, Fr}; +use franklin_crypto::{ + bellman::{pairing::ff::PrimeField, BitIterator}, + eddsa::PublicKey, + pedersen_hash::{baby_pedersen_hash, Personalization}, +}; + +const FR_BIT_WIDTH_PADDED: usize = 256; +const PAD_MSG_BEFORE_HASH_BITS_LEN: usize = 736; +const NEW_PUBKEY_HASH_WIDTH: usize = 160; + +pub fn set_panic_hook() { + // When the `console_error_panic_hook` feature is enabled, we can call the + // `set_panic_hook` function at least once during initialization, and then + // we will get better error messages if our code ever panics. + // + // For more details see + // https://github.com/rustwasm/console_error_panic_hook#readme + #[cfg(feature = "console_error_panic_hook")] + console_error_panic_hook::set_once(); +} + +pub fn bytes_into_be_bits(bytes: &[u8]) -> Vec { + let mut bits = Vec::with_capacity(bytes.len() * 8); + for byte in bytes { + let mut temp = *byte; + for _ in 0..8 { + bits.push(temp & 0x80 == 0x80); + temp <<= 1; + } + } + bits +} + +pub fn pack_bits_into_bytes(bits: Vec) -> Vec { + let mut message_bytes: Vec = Vec::with_capacity(bits.len() / 8); + let byte_chunks = bits.chunks(8); + for byte_chunk in byte_chunks { + let mut byte = 0u8; + for (i, bit) in byte_chunk.iter().enumerate() { + if *bit { + byte |= 1 << i; + } + } + message_bytes.push(byte); + } + message_bytes +} + +pub fn append_le_fixed_width(content: &mut Vec, x: &Fr, width: usize) { + let mut token_bits: Vec = BitIterator::new(x.into_repr()).collect(); + token_bits.reverse(); + token_bits.resize(width, false); + content.extend(token_bits); +} + +pub fn le_bit_vector_into_bytes(bits: &[bool]) -> Vec { + let mut bytes: Vec = Vec::with_capacity(bits.len() / 8); + + let byte_chunks = bits.chunks(8); + + for byte_chunk in byte_chunks { + let mut byte = 0u8; + // pack just in order + for (i, bit) in byte_chunk.iter().enumerate() { + if *bit { + byte |= 1 << i; + } + } + bytes.push(byte); + } + + bytes +} + +pub fn pub_key_hash(pub_key: &PublicKey) -> Vec { + let (pub_x, pub_y) = pub_key.0.into_xy(); + let mut pub_key_bits = Vec::with_capacity(FR_BIT_WIDTH_PADDED * 2); + append_le_fixed_width(&mut pub_key_bits, &pub_x, FR_BIT_WIDTH_PADDED); + append_le_fixed_width(&mut pub_key_bits, &pub_y, FR_BIT_WIDTH_PADDED); + let pub_key_hash = pedersen_hash_fr(pub_key_bits); + let mut pub_key_hash_bits = Vec::with_capacity(NEW_PUBKEY_HASH_WIDTH); + append_le_fixed_width(&mut pub_key_hash_bits, &pub_key_hash, NEW_PUBKEY_HASH_WIDTH); + let mut bytes = le_bit_vector_into_bytes(&pub_key_hash_bits); + bytes.reverse(); + bytes +} + +fn pedersen_hash_fr(input: Vec) -> Fr { + JUBJUB_PARAMS.with(|params| { + baby_pedersen_hash::(Personalization::NoteCommitment, input, params) + .into_xy() + .0 + }) +} + +fn pedersen_hash_bits(input: Vec) -> Vec { + let hash_fr = pedersen_hash_fr(input); + let mut hash_bits: Vec = BitIterator::new(hash_fr.into_repr()).collect(); + hash_bits.reverse(); + hash_bits.resize(256, false); + hash_bits +} + +pub fn pedersen_hash_tx_msg(msg: &[u8]) -> Vec { + let mut msg_bits = bytes_into_be_bits(msg); + msg_bits.resize(PAD_MSG_BEFORE_HASH_BITS_LEN, false); + let hash_bits = pedersen_hash_bits(msg_bits); + pack_bits_into_bytes(hash_bits) +} diff --git a/js/zksync-crypto/tests/web.rs b/js/zksync-crypto/tests/web.rs new file mode 100644 index 0000000000..de5c1dafef --- /dev/null +++ b/js/zksync-crypto/tests/web.rs @@ -0,0 +1,13 @@ +//! Test suite for the Web and headless browsers. + +#![cfg(target_arch = "wasm32")] + +extern crate wasm_bindgen_test; +use wasm_bindgen_test::*; + +wasm_bindgen_test_configure!(run_in_browser); + +#[wasm_bindgen_test] +fn pass() { + assert_eq!(1 + 1, 2); +} diff --git a/js/zksync-crypto/webpack.config.js b/js/zksync-crypto/webpack.config.js new file mode 100644 index 0000000000..0aeb576dad --- /dev/null +++ b/js/zksync-crypto/webpack.config.js @@ -0,0 +1,36 @@ +const HtmlWebpackPlugin = require('html-webpack-plugin'); +const path = require('path'); +const webpack = require('webpack'); +const WasmPackPlugin = require("@wasm-tool/wasm-pack-plugin"); + +const config = target => ({ + entry: './indexx.js', + output: { + path: path.resolve(__dirname, 'dist'), + filename: `index.${target}.js`, + libraryTarget: 'umd', + }, + plugins: [ + new HtmlWebpackPlugin(), + new WasmPackPlugin({ + crateDirectory: path.resolve(__dirname, ".") + }), + // Have this example work in Edge which doesn't ship `TextEncoder` or + // `TextDecoder` at this time. + new webpack.ProvidePlugin({ + TextDecoder: ['text-encoding', 'TextDecoder'], + TextEncoder: ['text-encoding', 'TextEncoder'] + }) + ], + mode: 'development', + // module: { + // rules: [ + // { test: /\.wasm$/, type: "webassembly/experimental" }, + // ], + // }, + // devServer: { + // mimeTypes: { 'text/html': ['wasm'] } + // }, +}); + +module.exports = ['web', 'node'].map(target => ({...config(target), target})); diff --git a/js/zksync-crypto/yarn.lock b/js/zksync-crypto/yarn.lock new file mode 100644 index 0000000000..6648b18598 --- /dev/null +++ b/js/zksync-crypto/yarn.lock @@ -0,0 +1,3546 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@babel/code-frame@^7.0.0-beta.36", "@babel/code-frame@^7.5.5", "@babel/code-frame@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.8.3.tgz#33e25903d7481181534e12ec0a25f16b6fcf419e" + integrity sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g== + dependencies: + "@babel/highlight" "^7.8.3" + +"@babel/core@^7.0.0-beta.39", "@babel/core@^7.8.7": + version "7.8.7" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.8.7.tgz#b69017d221ccdeb203145ae9da269d72cf102f3b" + integrity sha512-rBlqF3Yko9cynC5CCFy6+K/w2N+Sq/ff2BPy+Krp7rHlABIr5epbA7OxVeKoMHB39LZOp1UY5SuLjy6uWi35yA== + dependencies: + "@babel/code-frame" "^7.8.3" + "@babel/generator" "^7.8.7" + "@babel/helpers" "^7.8.4" + "@babel/parser" "^7.8.7" + "@babel/template" "^7.8.6" + "@babel/traverse" "^7.8.6" + "@babel/types" "^7.8.7" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.1" + json5 "^2.1.0" + lodash "^4.17.13" + resolve "^1.3.2" + semver "^5.4.1" + source-map "^0.5.0" + +"@babel/generator@^7.8.6", "@babel/generator@^7.8.7": + version "7.8.8" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.8.8.tgz#cdcd58caab730834cee9eeadb729e833b625da3e" + integrity sha512-HKyUVu69cZoclptr8t8U5b6sx6zoWjh8jiUhnuj3MpZuKT2dJ8zPTuiy31luq32swhI0SpwItCIlU8XW7BZeJg== + dependencies: + "@babel/types" "^7.8.7" + jsesc "^2.5.1" + lodash "^4.17.13" + source-map "^0.5.0" + +"@babel/helper-function-name@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz#eeeb665a01b1f11068e9fb86ad56a1cb1a824cca" + integrity sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA== + dependencies: + "@babel/helper-get-function-arity" "^7.8.3" + "@babel/template" "^7.8.3" + "@babel/types" "^7.8.3" + +"@babel/helper-get-function-arity@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz#b894b947bd004381ce63ea1db9f08547e920abd5" + integrity sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA== + dependencies: + "@babel/types" "^7.8.3" + +"@babel/helper-member-expression-to-functions@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.8.3.tgz#659b710498ea6c1d9907e0c73f206eee7dadc24c" + integrity sha512-fO4Egq88utkQFjbPrSHGmGLFqmrshs11d46WI+WZDESt7Wu7wN2G2Iu+NMMZJFDOVRHAMIkB5SNh30NtwCA7RA== + dependencies: + "@babel/types" "^7.8.3" + +"@babel/helper-module-imports@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.8.3.tgz#7fe39589b39c016331b6b8c3f441e8f0b1419498" + integrity sha512-R0Bx3jippsbAEtzkpZ/6FIiuzOURPcMjHp+Z6xPe6DtApDJx+w7UYyOLanZqO8+wKR9G10s/FmHXvxaMd9s6Kg== + dependencies: + "@babel/types" "^7.8.3" + +"@babel/helper-module-transforms@^7.8.3": + version "7.8.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.8.6.tgz#6a13b5eecadc35692047073a64e42977b97654a4" + integrity sha512-RDnGJSR5EFBJjG3deY0NiL0K9TO8SXxS9n/MPsbPK/s9LbQymuLNtlzvDiNS7IpecuL45cMeLVkA+HfmlrnkRg== + dependencies: + "@babel/helper-module-imports" "^7.8.3" + "@babel/helper-replace-supers" "^7.8.6" + "@babel/helper-simple-access" "^7.8.3" + "@babel/helper-split-export-declaration" "^7.8.3" + "@babel/template" "^7.8.6" + "@babel/types" "^7.8.6" + lodash "^4.17.13" + +"@babel/helper-optimise-call-expression@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.8.3.tgz#7ed071813d09c75298ef4f208956006b6111ecb9" + integrity sha512-Kag20n86cbO2AvHca6EJsvqAd82gc6VMGule4HwebwMlwkpXuVqrNRj6CkCV2sKxgi9MyAUnZVnZ6lJ1/vKhHQ== + dependencies: + "@babel/types" "^7.8.3" + +"@babel/helper-plugin-utils@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.8.3.tgz#9ea293be19babc0f52ff8ca88b34c3611b208670" + integrity sha512-j+fq49Xds2smCUNYmEHF9kGNkhbet6yVIBp4e6oeQpH1RUs/Ir06xUKzDjDkGcaaokPiTNs2JBWHjaE4csUkZQ== + +"@babel/helper-replace-supers@^7.8.6": + version "7.8.6" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.8.6.tgz#5ada744fd5ad73203bf1d67459a27dcba67effc8" + integrity sha512-PeMArdA4Sv/Wf4zXwBKPqVj7n9UF/xg6slNRtZW84FM7JpE1CbG8B612FyM4cxrf4fMAMGO0kR7voy1ForHHFA== + dependencies: + "@babel/helper-member-expression-to-functions" "^7.8.3" + "@babel/helper-optimise-call-expression" "^7.8.3" + "@babel/traverse" "^7.8.6" + "@babel/types" "^7.8.6" + +"@babel/helper-simple-access@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.8.3.tgz#7f8109928b4dab4654076986af575231deb639ae" + integrity sha512-VNGUDjx5cCWg4vvCTR8qQ7YJYZ+HBjxOgXEl7ounz+4Sn7+LMD3CFrCTEU6/qXKbA2nKg21CwhhBzO0RpRbdCw== + dependencies: + "@babel/template" "^7.8.3" + "@babel/types" "^7.8.3" + +"@babel/helper-split-export-declaration@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz#31a9f30070f91368a7182cf05f831781065fc7a9" + integrity sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA== + dependencies: + "@babel/types" "^7.8.3" + +"@babel/helpers@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.8.4.tgz#754eb3ee727c165e0a240d6c207de7c455f36f73" + integrity sha512-VPbe7wcQ4chu4TDQjimHv/5tj73qz88o12EPkO2ValS2QiQS/1F2SsjyIGNnAD0vF/nZS6Cf9i+vW6HIlnaR8w== + dependencies: + "@babel/template" "^7.8.3" + "@babel/traverse" "^7.8.4" + "@babel/types" "^7.8.3" + +"@babel/highlight@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.8.3.tgz#28f173d04223eaaa59bc1d439a3836e6d1265797" + integrity sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg== + dependencies: + chalk "^2.0.0" + esutils "^2.0.2" + js-tokens "^4.0.0" + +"@babel/parser@^7.8.6", "@babel/parser@^7.8.7": + version "7.8.8" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.8.8.tgz#4c3b7ce36db37e0629be1f0d50a571d2f86f6cd4" + integrity sha512-mO5GWzBPsPf6865iIbzNE0AvkKF3NE+2S3eRUpE+FE07BOAkXh6G+GW/Pj01hhXjve1WScbaIO4UlY1JKeqCcA== + +"@babel/plugin-transform-modules-commonjs@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.8.3.tgz#df251706ec331bd058a34bdd72613915f82928a5" + integrity sha512-JpdMEfA15HZ/1gNuB9XEDlZM1h/gF/YOH7zaZzQu2xCFRfwc01NXBMHHSTT6hRjlXJJs5x/bfODM3LiCk94Sxg== + dependencies: + "@babel/helper-module-transforms" "^7.8.3" + "@babel/helper-plugin-utils" "^7.8.3" + "@babel/helper-simple-access" "^7.8.3" + babel-plugin-dynamic-import-node "^2.3.0" + +"@babel/template@^7.8.3", "@babel/template@^7.8.6": + version "7.8.6" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.8.6.tgz#86b22af15f828dfb086474f964dcc3e39c43ce2b" + integrity sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg== + dependencies: + "@babel/code-frame" "^7.8.3" + "@babel/parser" "^7.8.6" + "@babel/types" "^7.8.6" + +"@babel/traverse@^7.0.0-beta.39", "@babel/traverse@^7.8.4", "@babel/traverse@^7.8.6": + version "7.8.6" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.8.6.tgz#acfe0c64e1cd991b3e32eae813a6eb564954b5ff" + integrity sha512-2B8l0db/DPi8iinITKuo7cbPznLCEk0kCxDoB9/N6gGNg/gxOXiR/IcymAFPiBwk5w6TtQ27w4wpElgp9btR9A== + dependencies: + "@babel/code-frame" "^7.8.3" + "@babel/generator" "^7.8.6" + "@babel/helper-function-name" "^7.8.3" + "@babel/helper-split-export-declaration" "^7.8.3" + "@babel/parser" "^7.8.6" + "@babel/types" "^7.8.6" + debug "^4.1.0" + globals "^11.1.0" + lodash "^4.17.13" + +"@babel/types@^7.0.0-beta.39", "@babel/types@^7.8.3", "@babel/types@^7.8.6", "@babel/types@^7.8.7": + version "7.8.7" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.8.7.tgz#1fc9729e1acbb2337d5b6977a63979b4819f5d1d" + integrity sha512-k2TreEHxFA4CjGkL+GYjRyx35W0Mr7DP5+9q6WMkyKXB+904bYmG40syjMFV0oLlhhFCwWl0vA0DyzTDkwAiJw== + dependencies: + esutils "^2.0.2" + lodash "^4.17.13" + to-fast-properties "^2.0.0" + +"@rollup/plugin-commonjs@^11.0.2": + version "11.0.2" + resolved "https://registry.yarnpkg.com/@rollup/plugin-commonjs/-/plugin-commonjs-11.0.2.tgz#837cc6950752327cb90177b608f0928a4e60b582" + integrity sha512-MPYGZr0qdbV5zZj8/2AuomVpnRVXRU5XKXb3HVniwRoRCreGlf5kOE081isNWeiLIi6IYkwTX9zE0/c7V8g81g== + dependencies: + "@rollup/pluginutils" "^3.0.0" + estree-walker "^1.0.1" + is-reference "^1.1.2" + magic-string "^0.25.2" + resolve "^1.11.0" + +"@rollup/plugin-node-resolve@^7.1.1": + version "7.1.1" + resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-7.1.1.tgz#8c6e59c4b28baf9d223028d0e450e06a485bb2b7" + integrity sha512-14ddhD7TnemeHE97a4rLOhobfYvUVcaYuqTnL8Ti7Jxi9V9Jr5LY7Gko4HZ5k4h4vqQM0gBQt6tsp9xXW94WPA== + dependencies: + "@rollup/pluginutils" "^3.0.6" + "@types/resolve" "0.0.8" + builtin-modules "^3.1.0" + is-module "^1.0.0" + resolve "^1.14.2" + +"@rollup/plugin-typescript@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@rollup/plugin-typescript/-/plugin-typescript-4.0.0.tgz#7a4f7b2844d28669e58c03c880f6ed0d6e926685" + integrity sha512-qA3r4WlR8JnTm+VdBzvQSIkfXt802keGxXuE4SAjUjRMKK3nMXTUCvOGSzFkav2qf0QiGv6yijfbjuf+bhwmZQ== + dependencies: + "@rollup/pluginutils" "^3.0.1" + resolve "^1.14.1" + +"@rollup/plugin-wasm@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@rollup/plugin-wasm/-/plugin-wasm-3.0.0.tgz#a757ba29d63621bdb6f4f3dfbb08d6aa2b3fd3ab" + integrity sha512-ggiUAwvjKN3jbGbdYs0zLus4rDa8ug08tigb4vyur/miPcaDJG178cgeJw+zVV1rOQk9rtzK5V+nZg31AcgU/w== + +"@rollup/pluginutils@^3.0.0", "@rollup/pluginutils@^3.0.1", "@rollup/pluginutils@^3.0.6": + version "3.0.8" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-3.0.8.tgz#4e94d128d94b90699e517ef045422960d18c8fde" + integrity sha512-rYGeAc4sxcZ+kPG/Tw4/fwJODC3IXHYDH4qusdN/b6aLw5LPUbzpecYbEJh4sVQGPFJxd2dBU4kc1H3oy9/bnw== + dependencies: + estree-walker "^1.0.1" + +"@types/estree@0.0.39": + version "0.0.39" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" + integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== + +"@types/node@*": + version "13.9.1" + resolved "https://registry.yarnpkg.com/@types/node/-/node-13.9.1.tgz#96f606f8cd67fb018847d9b61e93997dabdefc72" + integrity sha512-E6M6N0blf/jiZx8Q3nb0vNaswQeEyn0XlupO+xN6DtJ6r6IT4nXrTry7zhIfYvFCl3/8Cu6WIysmUBKiqV0bqQ== + +"@types/resolve@0.0.8": + version "0.0.8" + resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-0.0.8.tgz#f26074d238e02659e323ce1a13d041eee280e194" + integrity sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ== + dependencies: + "@types/node" "*" + +"@wasm-tool/wasm-pack-plugin@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@wasm-tool/wasm-pack-plugin/-/wasm-pack-plugin-1.1.0.tgz#94016deba0f59306d1a9f0cb3b15144d8cd9ab34" + integrity sha512-44vbq7MyZzavE7g5Q7RKlnFtI35BhUkNiUANTeOivbpRfsRw0d0n9lA2ytmiVS4O+AVRsjjPLVSv35kPvL+OWg== + dependencies: + chalk "^2.4.1" + command-exists "^1.2.7" + watchpack "^1.6.0" + +"@webassemblyjs/ast@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" + integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== + dependencies: + "@webassemblyjs/helper-module-context" "1.8.5" + "@webassemblyjs/helper-wasm-bytecode" "1.8.5" + "@webassemblyjs/wast-parser" "1.8.5" + +"@webassemblyjs/floating-point-hex-parser@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" + integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== + +"@webassemblyjs/helper-api-error@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" + integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== + +"@webassemblyjs/helper-buffer@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" + integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== + +"@webassemblyjs/helper-code-frame@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" + integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== + dependencies: + "@webassemblyjs/wast-printer" "1.8.5" + +"@webassemblyjs/helper-fsm@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" + integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== + +"@webassemblyjs/helper-module-context@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" + integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== + dependencies: + "@webassemblyjs/ast" "1.8.5" + mamacro "^0.0.3" + +"@webassemblyjs/helper-wasm-bytecode@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" + integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== + +"@webassemblyjs/helper-wasm-section@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" + integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== + dependencies: + "@webassemblyjs/ast" "1.8.5" + "@webassemblyjs/helper-buffer" "1.8.5" + "@webassemblyjs/helper-wasm-bytecode" "1.8.5" + "@webassemblyjs/wasm-gen" "1.8.5" + +"@webassemblyjs/ieee754@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" + integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== + dependencies: + "@xtuc/ieee754" "^1.2.0" + +"@webassemblyjs/leb128@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" + integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== + dependencies: + "@xtuc/long" "4.2.2" + +"@webassemblyjs/utf8@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" + integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== + +"@webassemblyjs/wasm-edit@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" + integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== + dependencies: + "@webassemblyjs/ast" "1.8.5" + "@webassemblyjs/helper-buffer" "1.8.5" + "@webassemblyjs/helper-wasm-bytecode" "1.8.5" + "@webassemblyjs/helper-wasm-section" "1.8.5" + "@webassemblyjs/wasm-gen" "1.8.5" + "@webassemblyjs/wasm-opt" "1.8.5" + "@webassemblyjs/wasm-parser" "1.8.5" + "@webassemblyjs/wast-printer" "1.8.5" + +"@webassemblyjs/wasm-gen@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" + integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== + dependencies: + "@webassemblyjs/ast" "1.8.5" + "@webassemblyjs/helper-wasm-bytecode" "1.8.5" + "@webassemblyjs/ieee754" "1.8.5" + "@webassemblyjs/leb128" "1.8.5" + "@webassemblyjs/utf8" "1.8.5" + +"@webassemblyjs/wasm-opt@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" + integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== + dependencies: + "@webassemblyjs/ast" "1.8.5" + "@webassemblyjs/helper-buffer" "1.8.5" + "@webassemblyjs/wasm-gen" "1.8.5" + "@webassemblyjs/wasm-parser" "1.8.5" + +"@webassemblyjs/wasm-parser@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" + integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== + dependencies: + "@webassemblyjs/ast" "1.8.5" + "@webassemblyjs/helper-api-error" "1.8.5" + "@webassemblyjs/helper-wasm-bytecode" "1.8.5" + "@webassemblyjs/ieee754" "1.8.5" + "@webassemblyjs/leb128" "1.8.5" + "@webassemblyjs/utf8" "1.8.5" + +"@webassemblyjs/wast-parser@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" + integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== + dependencies: + "@webassemblyjs/ast" "1.8.5" + "@webassemblyjs/floating-point-hex-parser" "1.8.5" + "@webassemblyjs/helper-api-error" "1.8.5" + "@webassemblyjs/helper-code-frame" "1.8.5" + "@webassemblyjs/helper-fsm" "1.8.5" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/wast-printer@1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" + integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== + dependencies: + "@webassemblyjs/ast" "1.8.5" + "@webassemblyjs/wast-parser" "1.8.5" + "@xtuc/long" "4.2.2" + +"@xtuc/ieee754@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" + integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== + +"@xtuc/long@4.2.2": + version "4.2.2" + resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" + integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== + +acorn@^6.2.1: + version "6.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.1.tgz#531e58ba3f51b9dacb9a6646ca4debf5b14ca474" + integrity sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA== + +ajv-errors@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" + integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== + +ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" + integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== + +ajv@^6.1.0, ajv@^6.10.2: + version "6.12.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.0.tgz#06d60b96d87b8454a5adaba86e7854da629db4b7" + integrity sha512-D6gFiFA0RRLyUbvijN74DWAjXSFxWKaWP7mldxkVhyhAV3+SWA9HEJPHQ2c9soIeTFJqcSdFDGFgdqs1iUU2Hw== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= + +ansi-regex@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" + integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== + +ansi-styles@^3.2.0, ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +anymatch@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" + integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== + dependencies: + micromatch "^3.1.4" + normalize-path "^2.1.1" + +aproba@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" + integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== + +arr-diff@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= + +arr-flatten@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" + integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== + +arr-union@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= + +array-unique@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= + +asn1.js@^4.0.0: + version "4.10.1" + resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" + integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== + dependencies: + bn.js "^4.0.0" + inherits "^2.0.1" + minimalistic-assert "^1.0.0" + +assert@^1.1.1: + version "1.5.0" + resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" + integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== + dependencies: + object-assign "^4.1.1" + util "0.10.3" + +assign-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= + +async-each@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" + integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== + +atob@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" + integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== + +babel-plugin-dynamic-import-node@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" + integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== + dependencies: + object.assign "^4.1.0" + +babylon@^7.0.0-beta.39: + version "7.0.0-beta.47" + resolved "https://registry.yarnpkg.com/babylon/-/babylon-7.0.0-beta.47.tgz#6d1fa44f0abec41ab7c780481e62fd9aafbdea80" + integrity sha512-+rq2cr4GDhtToEzKFD6KZZMDBXhjFAr9JjPw9pAppZACeEWqNM294j+NdBzkSHYXwzzBmVjZ3nEVJlOhbR2gOQ== + +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + +base64-js@^1.0.2: + version "1.3.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" + integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== + +base@^0.11.1: + version "0.11.2" + resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== + dependencies: + cache-base "^1.0.1" + class-utils "^0.3.5" + component-emitter "^1.2.1" + define-property "^1.0.0" + isobject "^3.0.1" + mixin-deep "^1.2.0" + pascalcase "^0.1.1" + +big.js@^3.1.3: + version "3.2.0" + resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" + integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== + +big.js@^5.2.2: + version "5.2.2" + resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" + integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== + +binary-extensions@^1.0.0: + version "1.13.1" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" + integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== + +bindings@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" + integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== + dependencies: + file-uri-to-path "1.0.0" + +bluebird@^3.5.5: + version "3.7.2" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + +bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0: + version "4.11.8" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f" + integrity sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA== + +boolbase@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^2.3.1, braces@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== + dependencies: + arr-flatten "^1.1.0" + array-unique "^0.3.2" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" + +brorand@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" + integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= + +browserify-aes@^1.0.0, browserify-aes@^1.0.4: + version "1.2.0" + resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" + integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== + dependencies: + buffer-xor "^1.0.3" + cipher-base "^1.0.0" + create-hash "^1.1.0" + evp_bytestokey "^1.0.3" + inherits "^2.0.1" + safe-buffer "^5.0.1" + +browserify-cipher@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" + integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== + dependencies: + browserify-aes "^1.0.4" + browserify-des "^1.0.0" + evp_bytestokey "^1.0.0" + +browserify-des@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" + integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== + dependencies: + cipher-base "^1.0.1" + des.js "^1.0.0" + inherits "^2.0.1" + safe-buffer "^5.1.2" + +browserify-rsa@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" + integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= + dependencies: + bn.js "^4.1.0" + randombytes "^2.0.1" + +browserify-sign@^4.0.0: + version "4.0.4" + resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" + integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= + dependencies: + bn.js "^4.1.1" + browserify-rsa "^4.0.0" + create-hash "^1.1.0" + create-hmac "^1.1.2" + elliptic "^6.0.0" + inherits "^2.0.1" + parse-asn1 "^5.0.0" + +browserify-zlib@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" + integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== + dependencies: + pako "~1.0.5" + +buffer-es6@^4.9.3: + version "4.9.3" + resolved "https://registry.yarnpkg.com/buffer-es6/-/buffer-es6-4.9.3.tgz#f26347b82df76fd37e18bcb5288c4970cfd5c404" + integrity sha1-8mNHuC33b9N+GLy1KIxJcM/VxAQ= + +buffer-from@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" + integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== + +buffer-xor@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" + integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= + +buffer@^4.3.0: + version "4.9.2" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" + integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== + dependencies: + base64-js "^1.0.2" + ieee754 "^1.1.4" + isarray "^1.0.0" + +builtin-modules@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.1.0.tgz#aad97c15131eb76b65b50ef208e7584cd76a7484" + integrity sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw== + +builtin-status-codes@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" + integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= + +cacache@^12.0.2: + version "12.0.3" + resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" + integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== + dependencies: + bluebird "^3.5.5" + chownr "^1.1.1" + figgy-pudding "^3.5.1" + glob "^7.1.4" + graceful-fs "^4.1.15" + infer-owner "^1.0.3" + lru-cache "^5.1.1" + mississippi "^3.0.0" + mkdirp "^0.5.1" + move-concurrently "^1.0.1" + promise-inflight "^1.0.1" + rimraf "^2.6.3" + ssri "^6.0.1" + unique-filename "^1.1.1" + y18n "^4.0.0" + +cache-base@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== + dependencies: + collection-visit "^1.0.0" + component-emitter "^1.2.1" + get-value "^2.0.6" + has-value "^1.0.0" + isobject "^3.0.1" + set-value "^2.0.0" + to-object-path "^0.3.0" + union-value "^1.0.0" + unset-value "^1.0.0" + +camel-case@3.0.x: + version "3.0.0" + resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" + integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= + dependencies: + no-case "^2.2.0" + upper-case "^1.1.1" + +camelcase@^5.0.0: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chokidar@^2.0.2: + version "2.1.8" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" + integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== + dependencies: + anymatch "^2.0.0" + async-each "^1.0.1" + braces "^2.3.2" + glob-parent "^3.1.0" + inherits "^2.0.3" + is-binary-path "^1.0.0" + is-glob "^4.0.0" + normalize-path "^3.0.0" + path-is-absolute "^1.0.0" + readdirp "^2.2.1" + upath "^1.1.1" + optionalDependencies: + fsevents "^1.2.7" + +chownr@^1.1.1: + version "1.1.4" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" + integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== + +chrome-trace-event@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" + integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== + dependencies: + tslib "^1.9.0" + +cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" + integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== + dependencies: + inherits "^2.0.1" + safe-buffer "^5.0.1" + +class-utils@^0.3.5: + version "0.3.6" + resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== + dependencies: + arr-union "^3.1.0" + define-property "^0.2.5" + isobject "^3.0.0" + static-extend "^0.1.1" + +clean-css@4.2.x: + version "4.2.3" + resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.3.tgz#507b5de7d97b48ee53d84adb0160ff6216380f78" + integrity sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA== + dependencies: + source-map "~0.6.0" + +cliui@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" + integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== + dependencies: + string-width "^3.1.0" + strip-ansi "^5.2.0" + wrap-ansi "^5.1.0" + +collection-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= + dependencies: + map-visit "^1.0.0" + object-visit "^1.0.0" + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + +command-exists@^1.2.7: + version "1.2.8" + resolved "https://registry.yarnpkg.com/command-exists/-/command-exists-1.2.8.tgz#715acefdd1223b9c9b37110a149c6392c2852291" + integrity sha512-PM54PkseWbiiD/mMsbvW351/u+dafwTJ0ye2qB60G1aGQP9j3xK2gmMDc+R34L3nDtx4qMCitXT75mkbkGJDLw== + +commander@2.17.x: + version "2.17.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" + integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== + +commander@^2.20.0: + version "2.20.3" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@~2.19.0: + version "2.19.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" + integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== + +commondir@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= + +component-emitter@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" + integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +concat-stream@^1.5.0: + version "1.6.2" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" + integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== + dependencies: + buffer-from "^1.0.0" + inherits "^2.0.3" + readable-stream "^2.2.2" + typedarray "^0.0.6" + +console-browserify@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" + integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== + +constants-browserify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" + integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= + +convert-source-map@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" + integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== + dependencies: + safe-buffer "~5.1.1" + +copy-concurrently@^1.0.0: + version "1.0.5" + resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" + integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== + dependencies: + aproba "^1.1.1" + fs-write-stream-atomic "^1.0.8" + iferr "^0.1.5" + mkdirp "^0.5.1" + rimraf "^2.5.4" + run-queue "^1.0.0" + +copy-descriptor@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= + +core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= + +create-ecdh@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" + integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== + dependencies: + bn.js "^4.1.0" + elliptic "^6.0.0" + +create-hash@^1.1.0, create-hash@^1.1.2: + version "1.2.0" + resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" + integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== + dependencies: + cipher-base "^1.0.1" + inherits "^2.0.1" + md5.js "^1.3.4" + ripemd160 "^2.0.1" + sha.js "^2.4.0" + +create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: + version "1.1.7" + resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" + integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== + dependencies: + cipher-base "^1.0.3" + create-hash "^1.1.0" + inherits "^2.0.1" + ripemd160 "^2.0.0" + safe-buffer "^5.0.1" + sha.js "^2.4.8" + +cross-spawn@6.0.5, cross-spawn@^6.0.0: + version "6.0.5" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" + integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== + dependencies: + nice-try "^1.0.4" + path-key "^2.0.1" + semver "^5.5.0" + shebang-command "^1.2.0" + which "^1.2.9" + +crypto-browserify@^3.11.0: + version "3.12.0" + resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" + integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== + dependencies: + browserify-cipher "^1.0.0" + browserify-sign "^4.0.0" + create-ecdh "^4.0.0" + create-hash "^1.1.0" + create-hmac "^1.1.0" + diffie-hellman "^5.0.0" + inherits "^2.0.1" + pbkdf2 "^3.0.3" + public-encrypt "^4.0.0" + randombytes "^2.0.0" + randomfill "^1.0.3" + +css-select@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" + integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= + dependencies: + boolbase "~1.0.0" + css-what "2.1" + domutils "1.5.1" + nth-check "~1.0.1" + +css-what@2.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" + integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== + +cyclist@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" + integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= + +debug@^2.2.0, debug@^2.3.3: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" + integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== + dependencies: + ms "^2.1.1" + +decamelize@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= + +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= + +define-properties@^1.1.2, define-properties@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + +define-property@^0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= + dependencies: + is-descriptor "^0.1.0" + +define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= + dependencies: + is-descriptor "^1.0.0" + +define-property@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" + integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== + dependencies: + is-descriptor "^1.0.2" + isobject "^3.0.1" + +des.js@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" + integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== + dependencies: + inherits "^2.0.1" + minimalistic-assert "^1.0.0" + +detect-file@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" + integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= + +diffie-hellman@^5.0.0: + version "5.0.3" + resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" + integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== + dependencies: + bn.js "^4.1.0" + miller-rabin "^4.0.0" + randombytes "^2.0.0" + +dom-converter@^0.2: + version "0.2.0" + resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" + integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== + dependencies: + utila "~0.4" + +dom-serializer@0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" + integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== + dependencies: + domelementtype "^2.0.1" + entities "^2.0.0" + +domain-browser@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" + integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== + +domelementtype@1, domelementtype@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" + integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== + +domelementtype@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d" + integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ== + +domhandler@^2.3.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" + integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== + dependencies: + domelementtype "1" + +domutils@1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" + integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= + dependencies: + dom-serializer "0" + domelementtype "1" + +domutils@^1.5.1: + version "1.7.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" + integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== + dependencies: + dom-serializer "0" + domelementtype "1" + +duplexify@^3.4.2, duplexify@^3.6.0: + version "3.7.1" + resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" + integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== + dependencies: + end-of-stream "^1.0.0" + inherits "^2.0.1" + readable-stream "^2.0.0" + stream-shift "^1.0.0" + +elliptic@^6.0.0: + version "6.5.2" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.2.tgz#05c5678d7173c049d8ca433552224a495d0e3762" + integrity sha512-f4x70okzZbIQl/NSRLkI/+tteV/9WqL98zx+SQ69KbXxmVrmjwsNUPn/gYJJ0sHvEak24cZgHIPegRePAtA/xw== + dependencies: + bn.js "^4.4.0" + brorand "^1.0.1" + hash.js "^1.0.0" + hmac-drbg "^1.0.0" + inherits "^2.0.1" + minimalistic-assert "^1.0.0" + minimalistic-crypto-utils "^1.0.0" + +emoji-regex@^7.0.1: + version "7.0.3" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" + integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== + +emojis-list@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" + integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= + +emojis-list@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + +end-of-stream@^1.0.0, end-of-stream@^1.1.0: + version "1.4.4" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + +enhanced-resolve@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" + integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== + dependencies: + graceful-fs "^4.1.2" + memory-fs "^0.4.0" + tapable "^1.0.0" + +enhanced-resolve@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" + integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== + dependencies: + graceful-fs "^4.1.2" + memory-fs "^0.5.0" + tapable "^1.0.0" + +entities@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" + integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== + +entities@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4" + integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw== + +errno@^0.1.3, errno@~0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" + integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== + dependencies: + prr "~1.0.1" + +es-abstract@^1.17.0-next.1: + version "1.17.4" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.4.tgz#e3aedf19706b20e7c2594c35fc0d57605a79e184" + integrity sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ== + dependencies: + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + is-callable "^1.1.5" + is-regex "^1.0.5" + object-inspect "^1.7.0" + object-keys "^1.1.1" + object.assign "^4.1.0" + string.prototype.trimleft "^2.1.1" + string.prototype.trimright "^2.1.1" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + +eslint-scope@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" + integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== + dependencies: + esrecurse "^4.1.0" + estraverse "^4.1.1" + +esrecurse@^4.1.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" + integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== + dependencies: + estraverse "^4.1.0" + +estraverse@^4.1.0, estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estree-walker@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.6.1.tgz#53049143f40c6eb918b23671d1fe3219f3a1b362" + integrity sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w== + +estree-walker@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" + integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +events@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.1.0.tgz#84279af1b34cb75aa88bf5ff291f6d0bd9b31a59" + integrity sha512-Rv+u8MLHNOdMjTAFeT3nCjHn2aGlx435FP/sDHNaRhDEMwyI/aB22Kj2qIN8R0cw3z28psEQLYwxVKLsKrMgWg== + +evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" + integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== + dependencies: + md5.js "^1.3.4" + safe-buffer "^5.1.1" + +execa@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" + integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== + dependencies: + cross-spawn "^6.0.0" + get-stream "^4.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +expand-brackets@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= + dependencies: + debug "^2.3.3" + define-property "^0.2.5" + extend-shallow "^2.0.1" + posix-character-classes "^0.1.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +expand-tilde@^2.0.0, expand-tilde@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" + integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= + dependencies: + homedir-polyfill "^1.0.1" + +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= + dependencies: + is-extendable "^0.1.0" + +extend-shallow@^3.0.0, extend-shallow@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" + +extglob@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== + dependencies: + array-unique "^0.3.2" + define-property "^1.0.0" + expand-brackets "^2.1.4" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +fast-deep-equal@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz#545145077c501491e33b15ec408c294376e94ae4" + integrity sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA== + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +figgy-pudding@^3.5.1: + version "3.5.1" + resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.1.tgz#862470112901c727a0e495a80744bd5baa1d6790" + integrity sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w== + +file-uri-to-path@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" + integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== + +fill-range@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= + dependencies: + extend-shallow "^2.0.1" + is-number "^3.0.0" + repeat-string "^1.6.1" + to-regex-range "^2.1.0" + +find-cache-dir@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" + integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== + dependencies: + commondir "^1.0.1" + make-dir "^2.0.0" + pkg-dir "^3.0.0" + +find-up@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +findup-sync@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" + integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== + dependencies: + detect-file "^1.0.0" + is-glob "^4.0.0" + micromatch "^3.0.4" + resolve-dir "^1.0.1" + +flush-write-stream@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" + integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== + dependencies: + inherits "^2.0.3" + readable-stream "^2.3.6" + +for-in@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" + integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= + +fragment-cache@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= + dependencies: + map-cache "^0.2.2" + +from2@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" + integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= + dependencies: + inherits "^2.0.1" + readable-stream "^2.0.0" + +fs-write-stream-atomic@^1.0.8: + version "1.0.10" + resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" + integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= + dependencies: + graceful-fs "^4.1.2" + iferr "^0.1.5" + imurmurhash "^0.1.4" + readable-stream "1 || 2" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +fsevents@^1.2.7: + version "1.2.11" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.11.tgz#67bf57f4758f02ede88fb2a1712fef4d15358be3" + integrity sha512-+ux3lx6peh0BpvY0JebGyZoiR4D+oYzdPZMKJwkZ+sFkNJzpL7tXc/wehS49gUAxg3tmMHPHZkA8JU2rhhgDHw== + dependencies: + bindings "^1.5.0" + nan "^2.12.1" + +fsevents@~2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805" + integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +gensync@^1.0.0-beta.1: + version "1.0.0-beta.1" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269" + integrity sha512-r8EC6NO1sngH/zdD9fiRDLdcgnbayXah+mLgManTaIZJqEC1MZstmnox8KpnI2/fxQwrp5OpCOYWLp4rBl4Jcg== + +get-caller-file@^2.0.1: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-stream@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" + integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== + dependencies: + pump "^3.0.0" + +get-value@^2.0.3, get-value@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= + +glob-parent@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" + integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= + dependencies: + is-glob "^3.1.0" + path-dirname "^1.0.0" + +glob@^7.1.3, glob@^7.1.4: + version "7.1.6" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-modules@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" + integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== + dependencies: + global-prefix "^3.0.0" + +global-modules@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" + integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== + dependencies: + global-prefix "^1.0.1" + is-windows "^1.0.1" + resolve-dir "^1.0.0" + +global-prefix@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" + integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= + dependencies: + expand-tilde "^2.0.2" + homedir-polyfill "^1.0.1" + ini "^1.3.4" + is-windows "^1.0.1" + which "^1.2.14" + +global-prefix@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" + integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== + dependencies: + ini "^1.3.5" + kind-of "^6.0.2" + which "^1.3.1" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2: + version "4.2.3" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" + integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-symbols@^1.0.0, has-symbols@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" + integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== + +has-value@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= + dependencies: + get-value "^2.0.3" + has-values "^0.1.4" + isobject "^2.0.0" + +has-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= + dependencies: + get-value "^2.0.6" + has-values "^1.0.0" + isobject "^3.0.0" + +has-values@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= + +has-values@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hash-base@^3.0.0: + version "3.0.4" + resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" + integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= + dependencies: + inherits "^2.0.1" + safe-buffer "^5.0.1" + +hash.js@^1.0.0, hash.js@^1.0.3: + version "1.1.7" + resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" + integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== + dependencies: + inherits "^2.0.3" + minimalistic-assert "^1.0.1" + +he@1.2.x: + version "1.2.0" + resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + +hmac-drbg@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" + integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= + dependencies: + hash.js "^1.0.3" + minimalistic-assert "^1.0.0" + minimalistic-crypto-utils "^1.0.1" + +homedir-polyfill@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" + integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== + dependencies: + parse-passwd "^1.0.0" + +html-minifier@^3.2.3: + version "3.5.21" + resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" + integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== + dependencies: + camel-case "3.0.x" + clean-css "4.2.x" + commander "2.17.x" + he "1.2.x" + param-case "2.1.x" + relateurl "0.2.x" + uglify-js "3.4.x" + +html-webpack-plugin@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" + integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= + dependencies: + html-minifier "^3.2.3" + loader-utils "^0.2.16" + lodash "^4.17.3" + pretty-error "^2.0.2" + tapable "^1.0.0" + toposort "^1.0.0" + util.promisify "1.0.0" + +htmlparser2@^3.3.0: + version "3.10.1" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" + integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== + dependencies: + domelementtype "^1.3.1" + domhandler "^2.3.0" + domutils "^1.5.1" + entities "^1.1.1" + inherits "^2.0.1" + readable-stream "^3.1.1" + +https-browserify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" + integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= + +ieee754@^1.1.4: + version "1.1.13" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" + integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== + +iferr@^0.1.5: + version "0.1.5" + resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" + integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= + +import-local@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" + integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== + dependencies: + pkg-dir "^3.0.0" + resolve-cwd "^2.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + +infer-owner@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" + integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.1, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" + integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= + +ini@^1.3.4, ini@^1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" + integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== + +interpret@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" + integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== + +invert-kv@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" + integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== + +is-accessor-descriptor@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= + dependencies: + kind-of "^3.0.2" + +is-accessor-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== + dependencies: + kind-of "^6.0.0" + +is-binary-path@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" + integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= + dependencies: + binary-extensions "^1.0.0" + +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== + +is-callable@^1.1.4, is-callable@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.5.tgz#f7e46b596890456db74e7f6e976cb3273d06faab" + integrity sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q== + +is-data-descriptor@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= + dependencies: + kind-of "^3.0.2" + +is-data-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== + dependencies: + kind-of "^6.0.0" + +is-date-object@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" + integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g== + +is-descriptor@^0.1.0: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== + dependencies: + is-accessor-descriptor "^0.1.6" + is-data-descriptor "^0.1.4" + kind-of "^5.0.0" + +is-descriptor@^1.0.0, is-descriptor@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== + dependencies: + is-accessor-descriptor "^1.0.0" + is-data-descriptor "^1.0.0" + kind-of "^6.0.2" + +is-extendable@^0.1.0, is-extendable@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= + +is-extendable@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== + dependencies: + is-plain-object "^2.0.4" + +is-extglob@^2.1.0, is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= + +is-glob@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" + integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= + dependencies: + is-extglob "^2.1.0" + +is-glob@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" + integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== + dependencies: + is-extglob "^2.1.1" + +is-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE= + +is-number@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" + integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= + dependencies: + kind-of "^3.0.2" + +is-plain-object@^2.0.3, is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-reference@^1.1.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-1.1.4.tgz#3f95849886ddb70256a3e6d062b1a68c13c51427" + integrity sha512-uJA/CDPO3Tao3GTrxYn6AwkM4nUPJiGGYu5+cB8qbC7WGFlrKZbiRo7SFKxUAEpFUfiHofWCXBUNhvYJMh+6zw== + dependencies: + "@types/estree" "0.0.39" + +is-regex@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.5.tgz#39d589a358bf18967f726967120b8fc1aed74eae" + integrity sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ== + dependencies: + has "^1.0.3" + +is-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= + +is-symbol@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" + integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== + dependencies: + has-symbols "^1.0.1" + +is-windows@^1.0.1, is-windows@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== + +is-wsl@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" + integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= + +isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +isobject@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= + dependencies: + isarray "1.0.0" + +isobject@^3.0.0, isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= + +jest-worker@^24.9.0: + version "24.9.0" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-24.9.0.tgz#5dbfdb5b2d322e98567898238a9697bcce67b3e5" + integrity sha512-51PE4haMSXcHohnSMdM42anbvZANYTqMrr52tVKPqqsPJMzoP6FYYDVqahX/HrAoKEKz3uUPzSvKs9A3qR4iVw== + dependencies: + merge-stream "^2.0.0" + supports-color "^6.1.0" + +js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +json-parse-better-errors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" + integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json5@^0.5.0: + version "0.5.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" + integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= + +json5@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" + integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + dependencies: + minimist "^1.2.0" + +json5@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" + integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== + dependencies: + minimist "^1.2.0" + +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= + dependencies: + is-buffer "^1.1.5" + +kind-of@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" + integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= + dependencies: + is-buffer "^1.1.5" + +kind-of@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +lcid@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" + integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== + dependencies: + invert-kv "^2.0.0" + +loader-runner@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" + integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== + +loader-utils@1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" + integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== + dependencies: + big.js "^5.2.2" + emojis-list "^2.0.0" + json5 "^1.0.1" + +loader-utils@^0.2.16: + version "0.2.17" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" + integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= + dependencies: + big.js "^3.1.3" + emojis-list "^2.0.0" + json5 "^0.5.0" + object-assign "^4.0.1" + +loader-utils@^1.1.0, loader-utils@^1.2.3: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.0.tgz#c579b5e34cb34b1a74edc6c1fb36bfa371d5a613" + integrity sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^1.0.1" + +locate-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +lodash@^4.17.13, lodash@^4.17.3: + version "4.17.15" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" + integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== + +long@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/long/-/long-3.2.0.tgz#d821b7138ca1cb581c172990ef14db200b5c474b" + integrity sha1-2CG3E4yhy1gcFymQ7xTbIAtcR0s= + +lower-case@^1.1.1: + version "1.1.4" + resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" + integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= + +lru-cache@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== + dependencies: + yallist "^3.0.2" + +magic-string@^0.25.2: + version "0.25.7" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.7.tgz#3f497d6fd34c669c6798dcb821f2ef31f5445051" + integrity sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA== + dependencies: + sourcemap-codec "^1.4.4" + +make-dir@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" + integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== + dependencies: + pify "^4.0.1" + semver "^5.6.0" + +mamacro@^0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" + integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== + +map-age-cleaner@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" + integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== + dependencies: + p-defer "^1.0.0" + +map-cache@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= + +map-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= + dependencies: + object-visit "^1.0.0" + +md5.js@^1.3.4: + version "1.3.5" + resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" + integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== + dependencies: + hash-base "^3.0.0" + inherits "^2.0.1" + safe-buffer "^5.1.2" + +mem@^4.0.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" + integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== + dependencies: + map-age-cleaner "^0.1.1" + mimic-fn "^2.0.0" + p-is-promise "^2.0.0" + +memory-fs@^0.4.0, memory-fs@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" + integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= + dependencies: + errno "^0.1.3" + readable-stream "^2.0.1" + +memory-fs@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" + integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== + dependencies: + errno "^0.1.3" + readable-stream "^2.0.1" + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: + version "3.1.10" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" + integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + braces "^2.3.1" + define-property "^2.0.2" + extend-shallow "^3.0.2" + extglob "^2.0.4" + fragment-cache "^0.2.1" + kind-of "^6.0.2" + nanomatch "^1.2.9" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.2" + +miller-rabin@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" + integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== + dependencies: + bn.js "^4.0.0" + brorand "^1.0.1" + +mimic-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" + integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= + +minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimist@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" + integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= + +minimist@^1.2.0: + version "1.2.5" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" + integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== + +mississippi@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" + integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== + dependencies: + concat-stream "^1.5.0" + duplexify "^3.4.2" + end-of-stream "^1.1.0" + flush-write-stream "^1.0.0" + from2 "^2.1.0" + parallel-transform "^1.1.0" + pump "^3.0.0" + pumpify "^1.3.3" + stream-each "^1.1.0" + through2 "^2.0.0" + +mixin-deep@^1.2.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" + integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== + dependencies: + for-in "^1.0.2" + is-extendable "^1.0.1" + +mkdirp@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" + integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= + dependencies: + minimist "0.0.8" + +move-concurrently@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" + integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= + dependencies: + aproba "^1.1.1" + copy-concurrently "^1.0.0" + fs-write-stream-atomic "^1.0.8" + mkdirp "^0.5.1" + rimraf "^2.5.4" + run-queue "^1.0.3" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +nan@^2.12.1: + version "2.14.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" + integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== + +nanomatch@^1.2.9: + version "1.2.13" + resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" + integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + define-property "^2.0.2" + extend-shallow "^3.0.2" + fragment-cache "^0.2.1" + is-windows "^1.0.2" + kind-of "^6.0.2" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +neo-async@^2.5.0, neo-async@^2.6.1: + version "2.6.1" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.1.tgz#ac27ada66167fa8849a6addd837f6b189ad2081c" + integrity sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw== + +nice-try@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" + integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== + +no-case@^2.2.0: + version "2.3.2" + resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" + integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== + dependencies: + lower-case "^1.1.1" + +node-libs-browser@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" + integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== + dependencies: + assert "^1.1.1" + browserify-zlib "^0.2.0" + buffer "^4.3.0" + console-browserify "^1.1.0" + constants-browserify "^1.0.0" + crypto-browserify "^3.11.0" + domain-browser "^1.1.1" + events "^3.0.0" + https-browserify "^1.0.0" + os-browserify "^0.3.0" + path-browserify "0.0.1" + process "^0.11.10" + punycode "^1.2.4" + querystring-es3 "^0.2.0" + readable-stream "^2.3.3" + stream-browserify "^2.0.1" + stream-http "^2.7.2" + string_decoder "^1.0.0" + timers-browserify "^2.0.4" + tty-browserify "0.0.0" + url "^0.11.0" + util "^0.11.0" + vm-browserify "^1.0.1" + +normalize-path@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" + integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= + dependencies: + remove-trailing-separator "^1.0.1" + +normalize-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +npm-run-path@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= + dependencies: + path-key "^2.0.0" + +nth-check@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" + integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== + dependencies: + boolbase "~1.0.0" + +object-assign@^4.0.1, object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + +object-copy@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= + dependencies: + copy-descriptor "^0.1.0" + define-property "^0.2.5" + kind-of "^3.0.3" + +object-inspect@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" + integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== + +object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object-visit@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= + dependencies: + isobject "^3.0.0" + +object.assign@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" + integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== + dependencies: + define-properties "^1.1.2" + function-bind "^1.1.1" + has-symbols "^1.0.0" + object-keys "^1.0.11" + +object.getownpropertydescriptors@^2.0.3: + version "2.1.0" + resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz#369bf1f9592d8ab89d712dced5cb81c7c5352649" + integrity sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.0-next.1" + +object.pick@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= + dependencies: + isobject "^3.0.1" + +once@^1.3.0, once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +os-browserify@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" + integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= + +os-locale@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" + integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== + dependencies: + execa "^1.0.0" + lcid "^2.0.0" + mem "^4.0.0" + +p-defer@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" + integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= + +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= + +p-is-promise@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" + integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== + +p-limit@^2.0.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.2.tgz#61279b67721f5287aa1c13a9a7fbbc48c9291b1e" + integrity sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ== + dependencies: + p-try "^2.0.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +pako@~1.0.5: + version "1.0.11" + resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" + integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== + +parallel-transform@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" + integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== + dependencies: + cyclist "^1.0.1" + inherits "^2.0.3" + readable-stream "^2.1.5" + +param-case@2.1.x: + version "2.1.1" + resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" + integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= + dependencies: + no-case "^2.2.0" + +parse-asn1@^5.0.0: + version "5.1.5" + resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" + integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== + dependencies: + asn1.js "^4.0.0" + browserify-aes "^1.0.0" + create-hash "^1.1.0" + evp_bytestokey "^1.0.0" + pbkdf2 "^3.0.3" + safe-buffer "^5.1.1" + +parse-passwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" + integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= + +pascalcase@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= + +path-browserify@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" + integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== + +path-dirname@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" + integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-key@^2.0.0, path-key@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= + +path-parse@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" + integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== + +pbkdf2@^3.0.3: + version "3.0.17" + resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" + integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== + dependencies: + create-hash "^1.1.2" + create-hmac "^1.1.4" + ripemd160 "^2.0.1" + safe-buffer "^5.0.1" + sha.js "^2.4.8" + +pify@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" + integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== + +pkg-dir@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" + integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== + dependencies: + find-up "^3.0.0" + +posix-character-classes@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= + +pretty-error@^2.0.2: + version "2.1.1" + resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" + integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= + dependencies: + renderkid "^2.0.1" + utila "~0.4" + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +process@^0.11.10: + version "0.11.10" + resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" + integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= + +promise-inflight@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" + integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= + +prr@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" + integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= + +public-encrypt@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" + integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== + dependencies: + bn.js "^4.1.0" + browserify-rsa "^4.0.0" + create-hash "^1.1.0" + parse-asn1 "^5.0.0" + randombytes "^2.0.1" + safe-buffer "^5.1.2" + +pump@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" + integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +pump@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" + integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +pumpify@^1.3.3: + version "1.5.1" + resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" + integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== + dependencies: + duplexify "^3.6.0" + inherits "^2.0.3" + pump "^2.0.0" + +punycode@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" + integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= + +punycode@^1.2.4: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= + +punycode@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +querystring-es3@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" + integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= + +querystring@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" + integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= + +randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: + version "2.1.0" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +randomfill@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" + integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== + dependencies: + randombytes "^2.0.5" + safe-buffer "^5.1.0" + +"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.1.1: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" + integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== + dependencies: + graceful-fs "^4.1.11" + micromatch "^3.1.10" + readable-stream "^2.0.2" + +regex-not@^1.0.0, regex-not@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" + integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== + dependencies: + extend-shallow "^3.0.2" + safe-regex "^1.1.0" + +relateurl@0.2.x: + version "0.2.7" + resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" + integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= + +remove-trailing-separator@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= + +renderkid@^2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" + integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== + dependencies: + css-select "^1.1.0" + dom-converter "^0.2" + htmlparser2 "^3.3.0" + strip-ansi "^3.0.0" + utila "^0.4.0" + +repeat-element@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" + integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== + +repeat-string@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + +require-main-filename@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" + integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== + +resolve-cwd@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" + integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= + dependencies: + resolve-from "^3.0.0" + +resolve-dir@^1.0.0, resolve-dir@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" + integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= + dependencies: + expand-tilde "^2.0.0" + global-modules "^1.0.0" + +resolve-from@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" + integrity sha1-six699nWiBvItuZTM17rywoYh0g= + +resolve-url@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= + +resolve@^1.11.0, resolve@^1.14.1, resolve@^1.14.2, resolve@^1.3.2: + version "1.15.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.15.1.tgz#27bdcdeffeaf2d6244b95bb0f9f4b4653451f3e8" + integrity sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w== + dependencies: + path-parse "^1.0.6" + +ret@~0.1.10: + version "0.1.15" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" + integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== + +rimraf@^2.5.4, rimraf@^2.6.3: + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== + dependencies: + glob "^7.1.3" + +ripemd160@^2.0.0, ripemd160@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" + integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== + dependencies: + hash-base "^3.0.0" + inherits "^2.0.1" + +rollup-plugin-dts@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/rollup-plugin-dts/-/rollup-plugin-dts-1.3.0.tgz#34de28ea8c9464392f2b0d4cb8cd0fe7c51d482e" + integrity sha512-G08HZvwliQdRbAOwNb1VnyKuRSp1EXpKPW5FrvRcHbxsmPP2Co443zZ0p8tSCTjuC5xNYyZ9VMzjcwtqrPn6Ew== + optionalDependencies: + "@babel/code-frame" "^7.8.3" + +rollup-plugin-rust@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/rollup-plugin-rust/-/rollup-plugin-rust-1.2.0.tgz#580ec7203aa0f1cf4ae370492c4635a3480caf60" + integrity sha512-LI+Thy5JrAa2eAbGAv6zkEFeR6aKgirI6Gvk7LqeFOaGpRjfSIqcOg+4t0Qe5dXixDPYFlL+qIbdW2O6IIvEEA== + dependencies: + rollup-pluginutils "^2.3.1" + +rollup-plugin-terser@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/rollup-plugin-terser/-/rollup-plugin-terser-5.3.0.tgz#9c0dd33d5771df9630cd027d6a2559187f65885e" + integrity sha512-XGMJihTIO3eIBsVGq7jiNYOdDMb3pVxuzY0uhOE/FM4x/u9nQgr3+McsjzqBn3QfHIpNSZmFnpoKAwHBEcsT7g== + dependencies: + "@babel/code-frame" "^7.5.5" + jest-worker "^24.9.0" + rollup-pluginutils "^2.8.2" + serialize-javascript "^2.1.2" + terser "^4.6.2" + +rollup-pluginutils@^2.3.1, rollup-pluginutils@^2.8.2: + version "2.8.2" + resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz#72f2af0748b592364dbd3389e600e5a9444a351e" + integrity sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ== + dependencies: + estree-walker "^0.6.1" + +rollup@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.0.6.tgz#865d6bb15a28cff3429ea1dc57236013661cb9de" + integrity sha512-P42IlI6a/bxh52ed8hEXXe44LcHfep2f26OZybMJPN1TTQftibvQEl3CWeOmJrzqGbFxOA000QXDWO9WJaOQpA== + optionalDependencies: + fsevents "~2.1.2" + +run-queue@^1.0.0, run-queue@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" + integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= + dependencies: + aproba "^1.1.1" + +safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" + integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg== + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" + integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= + dependencies: + ret "~0.1.10" + +schema-utils@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" + integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== + dependencies: + ajv "^6.1.0" + ajv-errors "^1.0.0" + ajv-keywords "^3.1.0" + +semver@^5.4.1, semver@^5.5.0, semver@^5.6.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +serialize-javascript@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-2.1.2.tgz#ecec53b0e0317bdc95ef76ab7074b7384785fa61" + integrity sha512-rs9OggEUF0V4jUSecXazOYsLfu7OGK2qIn3c7IPBiffz32XniEp/TX9Xmc9LQfK2nQ2QKHvZ2oygKUGU0lG4jQ== + +set-blocking@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= + +set-value@^2.0.0, set-value@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" + integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.3" + split-string "^3.0.1" + +setimmediate@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" + integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= + +sha.js@^2.4.0, sha.js@^2.4.8: + version "2.4.11" + resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" + integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== + dependencies: + inherits "^2.0.1" + safe-buffer "^5.0.1" + +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= + dependencies: + shebang-regex "^1.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= + +signal-exit@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" + integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= + +snapdragon-node@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== + dependencies: + define-property "^1.0.0" + isobject "^3.0.0" + snapdragon-util "^3.0.1" + +snapdragon-util@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== + dependencies: + kind-of "^3.2.0" + +snapdragon@^0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" + integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== + dependencies: + base "^0.11.1" + debug "^2.2.0" + define-property "^0.2.5" + extend-shallow "^2.0.1" + map-cache "^0.2.2" + source-map "^0.5.6" + source-map-resolve "^0.5.0" + use "^3.1.0" + +source-list-map@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" + integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== + +source-map-resolve@^0.5.0: + version "0.5.3" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" + integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== + dependencies: + atob "^2.1.2" + decode-uri-component "^0.2.0" + resolve-url "^0.2.1" + source-map-url "^0.4.0" + urix "^0.1.0" + +source-map-support@~0.5.12: + version "0.5.16" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" + integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-url@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" + integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= + +source-map@^0.5.0, source-map@^0.5.6: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +sourcemap-codec@^1.4.4: + version "1.4.8" + resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" + integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== + +split-string@^3.0.1, split-string@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== + dependencies: + extend-shallow "^3.0.0" + +ssri@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.1.tgz#2a3c41b28dd45b62b63676ecb74001265ae9edd8" + integrity sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA== + dependencies: + figgy-pudding "^3.5.1" + +static-extend@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= + dependencies: + define-property "^0.2.5" + object-copy "^0.1.0" + +stream-browserify@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" + integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== + dependencies: + inherits "~2.0.1" + readable-stream "^2.0.2" + +stream-each@^1.1.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" + integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== + dependencies: + end-of-stream "^1.1.0" + stream-shift "^1.0.0" + +stream-http@^2.7.2: + version "2.8.3" + resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" + integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== + dependencies: + builtin-status-codes "^3.0.0" + inherits "^2.0.1" + readable-stream "^2.3.6" + to-arraybuffer "^1.0.0" + xtend "^4.0.0" + +stream-shift@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" + integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== + +string-width@^3.0.0, string-width@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" + integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== + dependencies: + emoji-regex "^7.0.1" + is-fullwidth-code-point "^2.0.0" + strip-ansi "^5.1.0" + +string.prototype.trimleft@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz#9bdb8ac6abd6d602b17a4ed321870d2f8dcefc74" + integrity sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag== + dependencies: + define-properties "^1.1.3" + function-bind "^1.1.1" + +string.prototype.trimright@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz#440314b15996c866ce8a0341894d45186200c5d9" + integrity sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g== + dependencies: + define-properties "^1.1.3" + function-bind "^1.1.1" + +string_decoder@^1.0.0, string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" + integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== + dependencies: + ansi-regex "^4.1.0" + +strip-eof@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= + +supports-color@6.1.0, supports-color@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" + integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== + dependencies: + has-flag "^3.0.0" + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +tapable@^1.0.0, tapable@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" + integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== + +terser-webpack-plugin@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.3.tgz#5ecaf2dbdc5fb99745fd06791f46fc9ddb1c9a7c" + integrity sha512-QMxecFz/gHQwteWwSo5nTc6UaICqN1bMedC5sMtUc7y3Ha3Q8y6ZO0iCR8pq4RJC8Hjf0FEPEHZqcMB/+DFCrA== + dependencies: + cacache "^12.0.2" + find-cache-dir "^2.1.0" + is-wsl "^1.1.0" + schema-utils "^1.0.0" + serialize-javascript "^2.1.2" + source-map "^0.6.1" + terser "^4.1.2" + webpack-sources "^1.4.0" + worker-farm "^1.7.0" + +terser@^4.1.2, terser@^4.6.2: + version "4.6.6" + resolved "https://registry.yarnpkg.com/terser/-/terser-4.6.6.tgz#da2382e6cafbdf86205e82fb9a115bd664d54863" + integrity sha512-4lYPyeNmstjIIESr/ysHg2vUPRGf2tzF9z2yYwnowXVuVzLEamPN1Gfrz7f8I9uEPuHcbFlW4PLIAsJoxXyJ1g== + dependencies: + commander "^2.20.0" + source-map "~0.6.1" + source-map-support "~0.5.12" + +text-encoding@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/text-encoding/-/text-encoding-0.7.0.tgz#f895e836e45990624086601798ea98e8f36ee643" + integrity sha512-oJQ3f1hrOnbRLOcwKz0Liq2IcrvDeZRHXhd9RgLrsT+DjWY/nty1Hi7v3dtkaEYbPYe0mUoOfzRrMwfXXwgPUA== + +through2@^2.0.0: + version "2.0.5" + resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" + integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== + dependencies: + readable-stream "~2.3.6" + xtend "~4.0.1" + +timers-browserify@^2.0.4: + version "2.0.11" + resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" + integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== + dependencies: + setimmediate "^1.0.4" + +to-arraybuffer@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" + integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= + +to-object-path@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= + dependencies: + kind-of "^3.0.2" + +to-regex-range@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= + dependencies: + is-number "^3.0.0" + repeat-string "^1.6.1" + +to-regex@^3.0.1, to-regex@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" + integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== + dependencies: + define-property "^2.0.2" + extend-shallow "^3.0.2" + regex-not "^1.0.2" + safe-regex "^1.1.0" + +toposort@^1.0.0: + version "1.0.7" + resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" + integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= + +tslib@^1.9.0: + version "1.11.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.11.1.tgz#eb15d128827fbee2841549e171f45ed338ac7e35" + integrity sha512-aZW88SY8kQbU7gpV19lN24LtXh/yD4ZZg6qieAJDDg+YBsJcSmLGK9QpnUjAKVG/xefmvJGd1WUmfpT/g6AJGA== + +tty-browserify@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" + integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= + +typedarray@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= + +typescript@^3.8.3: + version "3.8.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.8.3.tgz#409eb8544ea0335711205869ec458ab109ee1061" + integrity sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w== + +uglify-js@3.4.x: + version "3.4.10" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" + integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== + dependencies: + commander "~2.19.0" + source-map "~0.6.1" + +union-value@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" + integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== + dependencies: + arr-union "^3.1.0" + get-value "^2.0.6" + is-extendable "^0.1.1" + set-value "^2.0.1" + +unique-filename@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" + integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== + dependencies: + unique-slug "^2.0.0" + +unique-slug@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" + integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== + dependencies: + imurmurhash "^0.1.4" + +unset-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= + dependencies: + has-value "^0.3.1" + isobject "^3.0.0" + +upath@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== + +upper-case@^1.1.1: + version "1.1.3" + resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" + integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= + +uri-js@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" + integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== + dependencies: + punycode "^2.1.0" + +urix@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= + +url@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" + integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= + dependencies: + punycode "1.3.2" + querystring "0.2.0" + +use@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" + integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== + +util-deprecate@^1.0.1, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +util.promisify@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" + integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== + dependencies: + define-properties "^1.1.2" + object.getownpropertydescriptors "^2.0.3" + +util@0.10.3: + version "0.10.3" + resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" + integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= + dependencies: + inherits "2.0.1" + +util@^0.11.0: + version "0.11.1" + resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" + integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== + dependencies: + inherits "2.0.3" + +utila@^0.4.0, utila@~0.4: + version "0.4.0" + resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" + integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= + +v8-compile-cache@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" + integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== + +vm-browserify@^1.0.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" + integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== + +wasm-dce@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wasm-dce/-/wasm-dce-1.0.2.tgz#7e21e566fa17c61e8e257742a377a5bdb8f2e4f5" + integrity sha512-Fq1+nu43ybsjSnBquLrW/cULmKs61qbv9k8ep13QUe0nABBezMoNAA+j6QY66MW0/eoDVDp1rjXDqQ2VKyS/Xg== + dependencies: + "@babel/core" "^7.0.0-beta.39" + "@babel/traverse" "^7.0.0-beta.39" + "@babel/types" "^7.0.0-beta.39" + babylon "^7.0.0-beta.39" + webassembly-interpreter "0.0.30" + +wasm-loader@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/wasm-loader/-/wasm-loader-1.3.0.tgz#a123a6b6c9a9dac60de38449703be3537742e155" + integrity sha512-R4s75XH+o8qM+WaRrAU9S2rbAMDzob18/S3V8R9ZoFpZkPWLAohWWlzWAp1ybeTkOuuku/X1zJtxiV0pBYxZww== + dependencies: + loader-utils "^1.1.0" + wasm-dce "^1.0.0" + +watchpack@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" + integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== + dependencies: + chokidar "^2.0.2" + graceful-fs "^4.1.2" + neo-async "^2.5.0" + +webassembly-floating-point-hex-parser@0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/webassembly-floating-point-hex-parser/-/webassembly-floating-point-hex-parser-0.1.2.tgz#85bb01f54e68690c2645ea0cfad26c1110fdf988" + integrity sha512-TUf1H++8U10+stJbFydnvrpG5Sznz5Rilez/oZlV5zI0C/e4cSxd8rALAJ8VpTvjVWxLmL3SVSJUK6Ap9AoiNg== + +webassembly-interpreter@0.0.30: + version "0.0.30" + resolved "https://registry.yarnpkg.com/webassembly-interpreter/-/webassembly-interpreter-0.0.30.tgz#f35aaec0fff2e6fd9ca7277eb1a9059dccedcb7f" + integrity sha512-+Jdy2piEvz9T5j751mOE8+rBO12p+nNW6Fg4kJZ+zP1oUfsm+151sbAbM8AFxWTURmWCGP+r8Lxwfv3pzN1bCQ== + dependencies: + "@babel/code-frame" "^7.0.0-beta.36" + long "^3.2.0" + webassembly-floating-point-hex-parser "0.1.2" + +webpack-cli@^3.3.11: + version "3.3.11" + resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.11.tgz#3bf21889bf597b5d82c38f215135a411edfdc631" + integrity sha512-dXlfuml7xvAFwYUPsrtQAA9e4DOe58gnzSxhgrO/ZM/gyXTBowrsYeubyN4mqGhYdpXMFNyQ6emjJS9M7OBd4g== + dependencies: + chalk "2.4.2" + cross-spawn "6.0.5" + enhanced-resolve "4.1.0" + findup-sync "3.0.0" + global-modules "2.0.0" + import-local "2.0.0" + interpret "1.2.0" + loader-utils "1.2.3" + supports-color "6.1.0" + v8-compile-cache "2.0.3" + yargs "13.2.4" + +webpack-sources@^1.4.0, webpack-sources@^1.4.1: + version "1.4.3" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" + integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== + dependencies: + source-list-map "^2.0.0" + source-map "~0.6.1" + +webpack@^4.42.0: + version "4.42.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.42.0.tgz#b901635dd6179391d90740a63c93f76f39883eb8" + integrity sha512-EzJRHvwQyBiYrYqhyjW9AqM90dE4+s1/XtCfn7uWg6cS72zH+2VPFAlsnW0+W0cDi0XRjNKUMoJtpSi50+Ph6w== + dependencies: + "@webassemblyjs/ast" "1.8.5" + "@webassemblyjs/helper-module-context" "1.8.5" + "@webassemblyjs/wasm-edit" "1.8.5" + "@webassemblyjs/wasm-parser" "1.8.5" + acorn "^6.2.1" + ajv "^6.10.2" + ajv-keywords "^3.4.1" + chrome-trace-event "^1.0.2" + enhanced-resolve "^4.1.0" + eslint-scope "^4.0.3" + json-parse-better-errors "^1.0.2" + loader-runner "^2.4.0" + loader-utils "^1.2.3" + memory-fs "^0.4.1" + micromatch "^3.1.10" + mkdirp "^0.5.1" + neo-async "^2.6.1" + node-libs-browser "^2.2.1" + schema-utils "^1.0.0" + tapable "^1.1.3" + terser-webpack-plugin "^1.4.3" + watchpack "^1.6.0" + webpack-sources "^1.4.1" + +which-module@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= + +which@^1.2.14, which@^1.2.9, which@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +worker-farm@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" + integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== + dependencies: + errno "~0.1.7" + +wrap-ansi@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" + integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== + dependencies: + ansi-styles "^3.2.0" + string-width "^3.0.0" + strip-ansi "^5.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +xtend@^4.0.0, xtend@~4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +y18n@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" + integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== + +yallist@^3.0.2: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== + +yargs-parser@^13.1.0: + version "13.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" + integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs@13.2.4: + version "13.2.4" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" + integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== + dependencies: + cliui "^5.0.0" + find-up "^3.0.0" + get-caller-file "^2.0.1" + os-locale "^3.1.0" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^3.0.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^13.1.0" + +"zksync-crypto@file:pkg": + version "0.1.0" diff --git a/js/zksync.js/package.json b/js/zksync.js/package.json index 74e612f0e5..149a82d6b4 100644 --- a/js/zksync.js/package.json +++ b/js/zksync.js/package.json @@ -10,9 +10,11 @@ "bn.js": "^5.0.0", "crypto-js": "^3.1.9-1", "elliptic": "^6.5.0", + "example-node-wasm": "../zksync-crypto", "js-sha256": "^0.9.0", "websocket": "^1.0.30", - "websocket-as-promised": "^0.10.1" + "websocket-as-promised": "^0.10.1", + "zksync-crypto": "../zksync-crypto/pkg" }, "peerDependencies": { "ethers": "^4.0.33" diff --git a/js/zksync.js/src/crypto.ts b/js/zksync.js/src/crypto.ts index 24f6bd82f2..17cb5e8f83 100644 --- a/js/zksync.js/src/crypto.ts +++ b/js/zksync.js/src/crypto.ts @@ -1,383 +1,33 @@ import BN = require("bn.js"); -import { curve } from "elliptic"; -import EdwardsPoint = curve.edwards.EdwardsPoint; -import { sha256 } from "js-sha256"; -import edwards = curve.edwards; import { Signature } from "./types"; -import { buffer2bitsBE, buffer2bitsLE } from "./utils"; - -const blake2b = require("blake2b"); -const elliptic = require("elliptic"); -const crypto = require("crypto"); - -// ! `Fr modulus = 21888242871839275222246405745257275088548364400416034343698204186575808495617` -// ! -// ! It takes the form `-x^2 + y^2 = 1 + dx^2y^2` with -// ! `d = -(168696/168700)` using the isomorphism from usual Baby Jubjub -// ! with a requirement that `a' = -1, a = 168696`, that results in -// ! ``` -// ! scaling = 1911982854305225074381251344103329931637610209014896889891168275855466657090 -// ! a' = 21888242871839275222246405745257275088548364400416034343698204186575808495616 == -1 = a*scale^2 mod P -// ! d' = 12181644023421730124874158521699555681764249180949974110617291017600649128846 == -(168696/168700) = d*scale^2 -// ! ``` -const babyJubjubParams = { - a: new BN( - "21888242871839275222246405745257275088548364400416034343698204186575808495616" - ), - d: new BN( - "12181644023421730124874158521699555681764249180949974110617291017600649128846" - ), - n: new BN( - "2736030358979909402780800718157159386076813972158567259200215660948447373041" - ), - p: new BN( - "21888242871839275222246405745257275088548364400416034343698204186575808495617" - ), - c: "1", - g: [ - "2ef3f9b423a2c8c74e9803958f6c320e854a1c1c06cd5cc8fd221dc052d76df7", - "05a01167ea785d3f784224644a68e4067532c815f5f6d57d984b5c0e9c6c94b7" - ] -}; - -const fsModulus = babyJubjubParams.n; -const fsOne = new BN(1); -const fsZero = new BN(0); -export const altjubjubCurve = new elliptic.curve.edwards(babyJubjubParams); -const curveZero = altjubjubCurve.point("0", "1"); -const chunksPerGenerator = 62; -export const addressLen = 20; -const PAD_MSG_BEFORE_HASH_BYTES_LEN = 92; - -const gen1 = altjubjubCurve.point( - "184570ed4909a81b2793320a26e8f956be129e4eed381acf901718dff8802135", - "1c3a9a830f61587101ef8cbbebf55063c1c6480e7e5a7441eac7f626d8f69a45" -); -const gen2 = altjubjubCurve.point( - "0afc00ffa0065f5479f53575e86f6dcd0d88d7331eefd39df037eea2d6f031e4", - "237a6734dd50e044b4f44027ee9e70fcd2e5724ded1d1c12b820a11afdc15c7a" -); -const gen3 = altjubjubCurve.point( - "00fb62ad05ee0e615f935c5a83a870f389a5ea2baccf22ad731a4929e7a75b37", - "00bc8b1c9d376ceeea2cf66a91b7e2ad20ab8cce38575ac13dbefe2be548f702" -); -const gen4 = altjubjubCurve.point( - "0675544aa0a708b0c584833fdedda8d89be14c516e0a7ef3042f378cb01f6e48", - "169025a530508ee4f1d34b73b4d32e008b97da2147f15af3c53f405cf44f89d4" -); -const gen5 = altjubjubCurve.point( - "07350a0660a05014168047155c0a0647ea2720ecb182a6cb137b29f8a5cfd37f", - "3004ad73b7abe27f17ec04b04b450955a4189dd012b4cf4b174af15bd412696a" -); -const basicGenerators = [gen1, gen2, gen3, gen4, gen5]; - -function wrapScalar(fs: BN): BN { - while (fs.ltn(0)) { - fs = fs.add(fsModulus); - } - if (fs.gte(fsModulus)) { - fs = fs.mod(fsModulus); - } - return fs; -} - -let generatorExpTable; - -function lookupGeneratorFromTable(generator, window, idx) { - if (!generatorExpTable) { - generatorExpTable = genPedersonHashLookupTable(); - } - return generatorExpTable[generator][window][idx]; -} - -function calulateGenerator(generator, window, idx) { - const basePower = new BN(256).pow(new BN(window)); - const power = basePower.muln(idx); - return basicGenerators[generator].mul(power).normalize(); -} - -function genPedersonHashLookupTable() { - function genTableForGenerator(g) { - const result = []; - for (let window = 0; window < 32; ++window) { - const window_table = [curveZero]; - let accum = curveZero; - for (let mul = 1; mul < 256; ++mul) { - accum = accum.add(g).normalize(); - window_table.push(accum); - } - g = g.mul(new BN(256)); - result.push(window_table); - } - return result; - } - - const table = []; - for (const g of basicGenerators) { - table.push(genTableForGenerator(g)); - } - return table; -} - -export function pedersenHash( - input: Buffer, - bit_endianness: "le" | "be" = "le" -): EdwardsPoint { - const personaizationBits = new Array(6).fill(true); - let bits; - if (bit_endianness == "le") { - bits = personaizationBits.concat(buffer2bitsLE(input)); - } else { - bits = personaizationBits.concat(buffer2bitsBE(input)); - } - - function fsToPoint(fs, generator) { - fs = wrapScalar(fs); - - let tmpPoint = curveZero; - const accStr = fs.toString("hex").padStart(64, "0"); - const accBuff = Buffer.from(accStr, "hex").reverse(); - for (let window = 0; window < 32; ++window) { - tmpPoint = tmpPoint.add( - calulateGenerator(generator, window, accBuff[window]) - ); - } - return tmpPoint; - } - - while (bits.length % 3 != 0) { - bits.push(false); - } - - let result = curveZero; - - let newChunkEncountered = false; - let currentTriple = 0; - let currentGenerator = 0; - - let generatorChunksLeft = chunksPerGenerator; - - let acc = fsZero; - let cur = fsOne; - - while (bits.length > 0) { - const triple = bits.slice(0, 3); - bits = bits.slice(3); - ++currentTriple; - generatorChunksLeft -= 1; - newChunkEncountered = true; - - let tmp = cur; - const [a, b, c] = triple; - if (a) { - tmp = tmp.add(cur); - } - cur = cur.muln(2); - if (b) { - tmp = tmp.add(cur); - } - if (c) { - tmp = tmp.neg(); - } - acc = acc.add(tmp); - - cur = cur.muln(8); - - if (generatorChunksLeft == 0) { - result = result.add(fsToPoint(acc, currentGenerator)); - ++currentGenerator; - generatorChunksLeft = chunksPerGenerator; - acc = fsZero; - cur = fsOne; - newChunkEncountered = false; - } - } - - if (newChunkEncountered) { - result = result.add(fsToPoint(acc, currentGenerator)); - } - - return result.normalize(); -} - -function to_uniform(bytes: Buffer): BN { - const bits = new Array(bytes.length * 8); - let bit_n = 0; - for (let i = bytes.length - 1; i >= 0; --i) { - const b = bytes[i]; - bits[bit_n] = (b & 0x80) != 0; - bits[bit_n + 1] = (b & 0x40) != 0; - bits[bit_n + 2] = (b & 0x20) != 0; - bits[bit_n + 3] = (b & 0x10) != 0; - bits[bit_n + 4] = (b & 0x08) != 0; - bits[bit_n + 5] = (b & 0x04) != 0; - bits[bit_n + 6] = (b & 0x02) != 0; - bits[bit_n + 7] = (b & 0x01) != 0; - bit_n += 8; - } - - let res = new BN(0); - for (let n = 0; n < bits.length; n++) { - res = res.muln(2); - if (bits[n]) { - res = res.addn(1); - } - } - - return wrapScalar(res); -} - -function balke2bHStar(a: Buffer, b: Buffer): BN { - let output = new Uint8Array(64); - const hash = blake2b(64, null, null, Buffer.from("Zcash_RedJubjubH")); - hash.update(a); - hash.update(b); - output = hash.digest(); - const buff = Buffer.from(output); - - return to_uniform(buff); -} - -function sha256HStart(a: Buffer, b: Buffer): BN { - const hasher = sha256.create(); - const personaization = ""; - hasher.update(personaization); - hasher.update(a); - hasher.update(b); - const hash = Buffer.from(hasher.array()); - return to_uniform(hash); -} - -function pedersenHStar(input: Buffer): BN { - const p_hash_start_res = pedersenHash(input); - const p_hash_star_fe = to_uniform( - p_hash_start_res.getX().toArrayLike(Buffer, "le", 32) - ); - return p_hash_star_fe; -} - -export function musigSHA256(priv_key: BN, msg: Buffer): Signature { - const msgToHash = Buffer.alloc(PAD_MSG_BEFORE_HASH_BYTES_LEN, 0); - msg.copy(msgToHash); - msg = pedersenHash(msgToHash, "be") - .getX() - .toArrayLike(Buffer, "le", 32); - - const t = crypto.randomBytes(80); - - const pub_key = privateKeyToPublicKey(priv_key); - const pk_bytes = pub_key.getX().toArrayLike(Buffer, "le", 32); - - const r = balke2bHStar(t, msg); - const r_g = altjubjubCurve.g.mul(r); - const r_g_bytes = r_g.getX().toArrayLike(Buffer, "le", 32); - - const concat = Buffer.concat([pk_bytes, r_g_bytes]); - - const msg_padded = Buffer.alloc(32, 0); - msg.copy(msg_padded, 0, 0, 32); - - const s = wrapScalar( - sha256HStart(concat, msg_padded) - .mul(priv_key) - .add(r) - ); - - const signature = Buffer.concat([ - serializePointPacked(r_g), - s.toArrayLike(Buffer, "le", 32) - ]).toString("hex"); - const publicKey = serializePointPacked(pub_key).toString("hex"); - return { pubKey: publicKey, signature }; -} - -export function musigPedersen(priv_key: BN, msg: Buffer): Signature { - const msgToHash = Buffer.alloc(PAD_MSG_BEFORE_HASH_BYTES_LEN, 0); - msg.copy(msgToHash); - msg = pedersenHash(msgToHash, "be") - .getX() - .toArrayLike(Buffer, "le", 32); - - const t = crypto.randomBytes(80); - - const pub_key = privateKeyToPublicKey(priv_key); - const pk_bytes = pub_key.getX().toArrayLike(Buffer, "le", 32); - - const r = balke2bHStar(t, msg); - const r_g = altjubjubCurve.g.mul(r); - const r_g_bytes = r_g.getX().toArrayLike(Buffer, "le", 32); - - const concat = Buffer.concat([pk_bytes, r_g_bytes]); - const concat_hash_bytes = pedersenHash(concat) - .getX() - .toArrayLike(Buffer, "le", 32); - - const msg_padded = Buffer.alloc(32, 0); - msg.copy(msg_padded, 0, 0, 32); - - const s = wrapScalar( - pedersenHStar(Buffer.concat([concat_hash_bytes, msg_padded])) - .mul(priv_key) - .add(r) - ); - - const signature = Buffer.concat([ - serializePointPacked(r_g), - s.toArrayLike(Buffer, "le", 32) - ]).toString("hex"); - const publicKey = serializePointPacked(pub_key).toString("hex"); - return { pubKey: publicKey, signature }; -} - -export function privateKeyToPublicKey(pk: BN): edwards.EdwardsPoint { - return altjubjubCurve.g.mul(pk); -} - -export function pubkeyToAddress(pubKey: edwards.EdwardsPoint): Buffer { - const x = pubKey.getX().toArrayLike(Buffer, "le", 32); - const y = pubKey.getY().toArrayLike(Buffer, "le", 32); - const res = pedersenHash(Buffer.concat([x, y])) - .getX() - .toArrayLike(Buffer, "le", 32) - .slice(0, addressLen) - .reverse(); - return res; -} - -export function serializePointPacked(point: edwards.EdwardsPoint): Buffer { - const y = point.getY(); - const y_buff = y.toArrayLike(Buffer, "le", 32); - - if ( - altjubjubCurve - .pointFromY(y, true) - .getX() - .eq(point.getX()) - ) { - // x is odd - y_buff[y_buff.length - 1] |= 1 << 7; - } - return y_buff; -} - -export function signTransactionBytes(privKey: BN, bytes: Buffer): Signature { - return musigSHA256(privKey, bytes); -} - -export function privateKeyFromSeed(seed: Buffer): BN { - if (seed.length < 32) { - throw new Error("Seed is too short"); - } - let effectiveSeed = new Uint8Array(seed); - while (true) { - const hasher = sha256.create(); - hasher.update(effectiveSeed); - const hashResult = new Uint8Array(hasher.arrayBuffer()); - const privateKey = new BN(hashResult); - if (privateKey.gte(fsModulus)) { - effectiveSeed = hashResult; - continue; - } - return privateKey; - } +import zksync_crypto from "example-node-wasm"; +// import * as zksync_crypto from '../node_modules/zksync-crypto/zksync_crypto.js'; +// console.log({zksync_crypto}); +// zksync_crypto.then(console.log); + +// init(); +// const zksync_crypto = import("zksync-crypto"); +// import * as scrscr from 'example-node-wasm'; +// console.log({scrscr}); + +export async function signTransactionBytes(privKey: BN, bytes: Buffer): Promise { + const { sign_musig_sha256 } = await zksync_crypto; + const signaturePacked = sign_musig_sha256(privKey.toBuffer(), bytes); + const pubKey = Buffer.from(signaturePacked.slice(0,32)).toString("hex"); + const signature = Buffer.from(signaturePacked.slice(32, 32 + 64)).toString("hex"); + return { + pubKey, + signature, + }; +} + +export async function privateKeyFromSeed(seed: Buffer): Promise { + console.log({zksync_crypto: await zksync_crypto}); + const { private_key_from_seed } = await zksync_crypto; + return new BN(private_key_from_seed(seed)); +} + +export async function privateKeyToPubKeyHash(privateKey: BN): Promise { + const { private_key_to_pubkey_hash } = await zksync_crypto; + return `sync:${Buffer.from(private_key_to_pubkey_hash(privateKey.toBuffer())).toString("hex")}` } diff --git a/js/zksync.js/src/signer.ts b/js/zksync.js/src/signer.ts index e1c76214a0..7fd77458e5 100644 --- a/js/zksync.js/src/signer.ts +++ b/js/zksync.js/src/signer.ts @@ -1,10 +1,8 @@ import { curve } from "elliptic"; import { privateKeyFromSeed, - privateKeyToPublicKey, - pubkeyToAddress, - serializePointPacked, - signTransactionBytes + signTransactionBytes, + privateKeyToPubKeyHash, } from "./crypto"; import {ethers, utils} from "ethers"; import { packAmountChecked, packFeeChecked } from "./utils"; @@ -16,25 +14,23 @@ const MAX_NUMBER_OF_ACCOUNTS = 1 << 24; export class Signer { readonly privateKey: BN; - readonly publicKey: curve.edwards.EdwardsPoint; private constructor(privKey: BN) { this.privateKey = privKey; - this.publicKey = privateKeyToPublicKey(this.privateKey); } - pubKeyHash(): PubKeyHash { - return `sync:${pubkeyToAddress(this.publicKey).toString("hex")}`; + async pubKeyHash(): Promise { + return await privateKeyToPubKeyHash(this.privateKey); } - signSyncTransfer(transfer: { + async signSyncTransfer(transfer: { from: Address; to: Address; tokenId: number; amount: utils.BigNumberish; fee: utils.BigNumberish; nonce: number; - }): Transfer { + }): Promise { const type = Buffer.from([5]); // tx type const from = serializeAddress(transfer.from); const to = serializeAddress(transfer.to); @@ -52,7 +48,7 @@ export class Signer { nonce ]); - const signature = signTransactionBytes(this.privateKey, msgBytes); + const signature = await signTransactionBytes(this.privateKey, msgBytes); return { type: "Transfer", @@ -66,14 +62,14 @@ export class Signer { }; } - signSyncWithdraw(withdraw: { + async signSyncWithdraw(withdraw: { from: Address; ethAddress: string; tokenId: number; amount: utils.BigNumberish; fee: utils.BigNumberish; nonce: number; - }): Withdraw { + }): Promise { const typeBytes = Buffer.from([3]); const accountBytes = serializeAddress(withdraw.from); const ethAddressBytes = serializeAddress(withdraw.ethAddress); @@ -90,7 +86,7 @@ export class Signer { feeBytes, nonceBytes ]); - const signature = signTransactionBytes(this.privateKey, msgBytes); + const signature = await signTransactionBytes(this.privateKey, msgBytes); return { type: "Withdraw", from: withdraw.from, @@ -107,14 +103,14 @@ export class Signer { return new Signer(pk); } - static fromSeed(seed: Buffer): Signer { - return new Signer(privateKeyFromSeed(seed)); + static async fromSeed(seed: Buffer): Promise { + return new Signer(await privateKeyFromSeed(seed)); } static async fromETHSignature(ethSigner: ethers.Signer): Promise { const sign = await ethSigner.signMessage("Access ZK Sync account.\n" + "\n" + "Only sign this message for a trusted client!"); const seed = Buffer.from(sign.substr(2), "hex"); - return Signer.fromSeed(seed); + return await Signer.fromSeed(seed); } } diff --git a/js/zksync.js/src/wallet.ts b/js/zksync.js/src/wallet.ts index 9a6db0dc17..98b9671962 100644 --- a/js/zksync.js/src/wallet.ts +++ b/js/zksync.js/src/wallet.ts @@ -96,7 +96,7 @@ export class Wallet { const txMessageEthSignature = await this.ethSigner.signMessage(humanReadableTxInfo); - const signedTransferTransaction = this.signer.signSyncTransfer( + const signedTransferTransaction = await this.signer.signSyncTransfer( transactionData ); @@ -150,7 +150,7 @@ export class Wallet { const txMessageEthSignature = await this.ethSigner.signMessage(humanReadableTxInfo); - const signedWithdrawTransaction = this.signer.signSyncWithdraw( + const signedWithdrawTransaction = await this.signer.signSyncWithdraw( transactionData ); @@ -170,7 +170,7 @@ export class Wallet { throw new Error("ZKSync signer is required for current pubkey calculation."); } const currentPubKeyHash = await this.getCurrentPubKeyHash(); - const signerPubKeyHash = this.signer.pubKeyHash(); + const signerPubKeyHash = await this.signer.pubKeyHash(); return currentPubKeyHash === signerPubKeyHash; } @@ -183,7 +183,7 @@ export class Wallet { } const currentPubKeyHash = await this.getCurrentPubKeyHash(); - const newPubKeyHash = this.signer.pubKeyHash(); + const newPubKeyHash = await this.signer.pubKeyHash(); if (currentPubKeyHash == newPubKeyHash) { throw new Error("Current signing key is set already"); @@ -197,7 +197,7 @@ export class Wallet { const txData = { type: "ChangePubKey", account: this.address(), - newPkHash: this.signer.pubKeyHash(), + newPkHash: await this.signer.pubKeyHash(), nonce: numNonce, ethSignature }; @@ -215,7 +215,7 @@ export class Wallet { } const currentPubKeyHash = await this.getCurrentPubKeyHash(); - const newPubKeyHash = this.signer.pubKeyHash(); + const newPubKeyHash = await this.signer.pubKeyHash(); if (currentPubKeyHash == newPubKeyHash) { throw new Error("Current PubKeyHash is the same as new"); diff --git a/js/zksync.js/yarn.lock b/js/zksync.js/yarn.lock index 81ec68b4fe..1676efcce7 100644 --- a/js/zksync.js/yarn.lock +++ b/js/zksync.js/yarn.lock @@ -167,6 +167,11 @@ browser-stdout@1.3.1: resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== +buffer-es6@^4.9.3: + version "4.9.3" + resolved "https://registry.yarnpkg.com/buffer-es6/-/buffer-es6-4.9.3.tgz#f26347b82df76fd37e18bcb5288c4970cfd5c404" + integrity sha1-8mNHuC33b9N+GLy1KIxJcM/VxAQ= + buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" @@ -418,6 +423,12 @@ ethers@4.0.33: uuid "2.0.1" xmlhttprequest "1.8.0" +example-node-wasm@../zksync-crypto: + version "0.0.0" + dependencies: + buffer-es6 "^4.9.3" + zksync-crypto "file:../../../../Library/Caches/Yarn/v6/npm-example-node-wasm-0.0.0-2163f48b-7e4c-44ee-acc1-d990808c92ae-1584117749254/node_modules/example-node-wasm/pkg" + execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" @@ -1330,3 +1341,6 @@ yn@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.0.tgz#fcbe2db63610361afcc5eb9e0ac91e976d046114" integrity sha512-kKfnnYkbTfrAdd0xICNFw7Atm8nKpLcLv9AZGEt+kczL/WQVai4e2V6ZN8U/O+iI6WrNuJjNNOyu4zfhl9D3Hg== + +zksync-crypto@../zksync-crypto/pkg, "zksync-crypto@file:../zksync-crypto/pkg": + version "0.1.0" From 89061f34d4320b758a4f87ed450ff8671918a1f9 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Mon, 16 Mar 2020 18:56:06 +0200 Subject: [PATCH 030/186] Renamed WaitUpgrade to NoticePeriod --- bin/prepare-test-contracts.sh | 4 ++-- contracts/contracts/UpgradeGatekeeper.sol | 14 +++++++------- .../test/unit_tests/upgradeGatekeeper_test.ts | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index 132aa9b8b9..704771ff0e 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -46,10 +46,10 @@ set_constant MAX_AMOUNT_OF_REGISTERED_TOKENS 4 $OUT_DIR/ConfigTest.sol set_constant EXPECT_VERIFICATION_IN 8 $OUT_DIR/ConfigTest.sol set_constant MAX_UNVERIFIED_BLOCKS 4 $OUT_DIR/ConfigTest.sol set_constant PRIORITY_EXPIRATION 16 $OUT_DIR/ConfigTest.sol -set_constant WAIT_UPGRADE_MODE_PERIOD 4 $OUT_DIR/UpgradeGatekeeperTest.sol +set_constant NOTICE_PERIOD 4 $OUT_DIR/UpgradeGatekeeperTest.sol create_constant_getter MAX_AMOUNT_OF_REGISTERED_TOKENS $OUT_DIR/ConfigTest.sol -create_constant_getter WAIT_UPGRADE_MODE_PERIOD $OUT_DIR/UpgradeGatekeeperTest.sol +create_constant_getter NOTICE_PERIOD $OUT_DIR/UpgradeGatekeeperTest.sol # Verify always true set_constant DUMMY_VERIFIER true $OUT_DIR/VerifierTest.sol diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 1d48082b05..7aea5e7b13 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -9,8 +9,8 @@ import "./Proxy.sol"; /// @author Matter Labs contract UpgradeGatekeeper is UpgradeEvents, Ownable { - /// @notice Waiting period to activate finalize status mode (in seconds) - uint256 constant WAIT_UPGRADE_MODE_PERIOD = 2 weeks; + /// @notice Notice period before activation finalize status mode (in seconds) + uint256 constant NOTICE_PERIOD = 2 weeks; /// @notice Versions of proxy contracts mapping(address => uint64) public version; @@ -21,7 +21,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @notice Upgrade mode statuses enum UpgradeStatus { NotActive, - WaitUpgrade, + NoticePeriod, Finalize } @@ -29,7 +29,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { struct UpgradeInfo { UpgradeStatus upgradeStatus; - /// @notice Time of activating waiting upgrade mode + /// @notice Time of activating notice period /// @dev Will be equal to zero in case of not active mode uint256 activationTime; @@ -52,14 +52,14 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { mainContractAddress = _mainContractAddress; } - /// @notice Activates wait upgrade status + /// @notice Activates notice period /// @param proxyAddress Address of proxy to process /// @param newTarget New target function upgradeProxy(address proxyAddress, address newTarget) external { requireMaster(msg.sender); require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.NotActive, "upa11"); // upa11 - unable to activate active upgrade mode - upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.WaitUpgrade; + upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.NoticePeriod; upgradeInfo[proxyAddress].activationTime = now; upgradeInfo[proxyAddress].nextTarget = newTarget; upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; @@ -91,7 +91,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { return true; } - if (now >= upgradeInfo[proxyAddress].activationTime + WAIT_UPGRADE_MODE_PERIOD) { + if (now >= upgradeInfo[proxyAddress].activationTime + NOTICE_PERIOD) { upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Finalize; (bool callSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index eb8302b952..5878197219 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -73,7 +73,7 @@ describe("UpgradeGatekeeper unit tests", function () { let activated_time = performance.now(); // wait and activate finalize status - let all_time_in_sec = parseInt(await UpgradeGatekeeperContract.get_WAIT_UPGRADE_MODE_PERIOD()); + let all_time_in_sec = parseInt(await UpgradeGatekeeperContract.get_NOTICE_PERIOD()); for (let step = 1; step <= 3; step++) { if (step != 3) { while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { From 6e430b65dd51a5cd1c7f4ff588429d548eb912f0 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 17 Mar 2020 09:30:03 +0200 Subject: [PATCH 031/186] Removed initializeTarget function from Upgradeable --- bin/deploy-contracts.sh | 6 ++ contracts/contracts/Proxy.sol | 4 +- contracts/contracts/Upgradeable.sol | 21 ++--- contracts/scripts/testnet-deploy.ts | 4 +- contracts/src.ts/deploy.ts | 90 +++++++++---------- contracts/test/unit_tests/common.js | 8 +- contracts/test/unit_tests/proxy_test.ts | 9 +- .../test/unit_tests/upgradeGatekeeper_test.ts | 7 +- core/data_restore/src/contract_functions.rs | 56 +++++------- core/models/src/params.rs | 1 + 10 files changed, 93 insertions(+), 113 deletions(-) diff --git a/bin/deploy-contracts.sh b/bin/deploy-contracts.sh index 27bb783571..972661b2f4 100755 --- a/bin/deploy-contracts.sh +++ b/bin/deploy-contracts.sh @@ -17,6 +17,9 @@ cd contracts; yarn deploy-no-build | tee ../deploy.log; cd ..; +GOVERNANCE_TARGET_ADDR_NEW_VALUE=`grep "GOVERNANCE_TARGET_ADDR" deploy.log` +VERIFIER_TARGET_ADDR_NEW_VALUE=`grep "VERIFIER_TARGET_ADDR" deploy.log` +CONTRACT_TARGET_ADDR_NEW_VALUE=`grep "CONTRACT_TARGET_ADDR" deploy.log` CONTRACT_GENESIS_TX_HASH_NEW_VALUE=`grep "CONTRACT_GENESIS_TX_HASH" deploy.log` CONTRACT_ADDR_NEW_VALUE=`grep "CONTRACT_ADDR" deploy.log` ERC20_ADDR_NEW_VALUE=`grep "TEST_ERC20" deploy.log` @@ -30,6 +33,9 @@ then cp ./$ENV_FILE logs/$LABEL/$ZKSYNC_ENV.bak cp deploy.log logs/$LABEL/ echo $CONTRACT_ADDR_NEW_VALUE + python3 bin/replace-env-variable.py ./$ENV_FILE $GOVERNANCE_TARGET_ADDR_NEW_VALUE + python3 bin/replace-env-variable.py ./$ENV_FILE $VERIFIER_TARGET_ADDR_NEW_VALUE + python3 bin/replace-env-variable.py ./$ENV_FILE $CONTRACT_TARGET_ADDR_NEW_VALUE python3 bin/replace-env-variable.py ./$ENV_FILE $CONTRACT_GENESIS_TX_HASH_NEW_VALUE python3 bin/replace-env-variable.py ./$ENV_FILE $CONTRACT_ADDR_NEW_VALUE python3 bin/replace-env-variable.py ./$ENV_FILE $ERC20_ADDR_NEW_VALUE diff --git a/contracts/contracts/Proxy.sol b/contracts/contracts/Proxy.sol index 2b88b2b13b..fc5c7bb40f 100644 --- a/contracts/contracts/Proxy.sol +++ b/contracts/contracts/Proxy.sol @@ -9,7 +9,9 @@ contract Proxy is Upgradeable { /// @notice Contract constructor /// @dev Calls Upgradeable contract constructor - constructor() Upgradeable() public {} + /// @param target Initial implementation address + /// @param targetInitializationParameters Target initialization parameters + constructor(address target, bytes memory targetInitializationParameters) Upgradeable(target, targetInitializationParameters) public {} /// @notice Performs a delegatecall to the contract implementation /// @dev Fallback function allowing to perform a delegatecall to the given implementation diff --git a/contracts/contracts/Upgradeable.sol b/contracts/contracts/Upgradeable.sol index 83b93a3587..cf76bbb2ac 100644 --- a/contracts/contracts/Upgradeable.sol +++ b/contracts/contracts/Upgradeable.sol @@ -11,22 +11,10 @@ contract Upgradeable is Ownable { bytes32 private constant targetPosition = keccak256("target"); /// @notice Contract constructor - /// @dev Calls Ownable contract constructor - constructor() Ownable(msg.sender) public { - - } - - /// @notice Intercepts initialization calls - function initialize(bytes calldata) external pure { - revert("ini11"); // ini11 - interception of initialization call - } - - /// @notice Upgradeable contract initialization + /// @dev Calls Ownable contract constructor and initialize target /// @param target Initial implementation address /// @param targetInitializationParameters Target initialization parameters - function initializeTarget(address target, bytes calldata targetInitializationParameters) external { - requireMaster(msg.sender); - + constructor(address target, bytes memory targetInitializationParameters) Ownable(msg.sender) public { setTarget(target); (bool initializationSuccess, ) = getTarget().delegatecall( abi.encodeWithSignature("initialize(bytes)", targetInitializationParameters) @@ -34,6 +22,11 @@ contract Upgradeable is Ownable { require(initializationSuccess, "uin11"); // uin11 - target initialization failed } + /// @notice Intercepts initialization calls + function initialize(bytes calldata) external pure { + revert("ini11"); // ini11 - interception of initialization call + } + /// @notice Returns target of contract /// @return Actual implementation address function getTarget() public view returns (address target) { diff --git a/contracts/scripts/testnet-deploy.ts b/contracts/scripts/testnet-deploy.ts index 4064c2fe80..0ca35bdc70 100644 --- a/contracts/scripts/testnet-deploy.ts +++ b/contracts/scripts/testnet-deploy.ts @@ -35,7 +35,7 @@ async function main() { let timer = Date.now(); await deployer.deployGovernance(); console.log(`GOVERNANCE_TARGET_ADDR=${await deployer.getDeployedContract('GovernanceTarget').address}`); - console.log(`GOVERNANCE_GENESIS_TX_HASH=${await deployer.getInitTransactionHash('Governance')}`); + console.log(`GOVERNANCE_GENESIS_TX_HASH=${await deployer.getDeployTransactionHash('Governance')}`); console.log(`GOVERNANCE_ADDR=${await deployer.getDeployedContract('Governance').address}`); console.log(`Governance contract deployed, time: ${(Date.now() - timer) / 1000} secs`); @@ -48,7 +48,7 @@ async function main() { timer = Date.now(); await deployer.deployFranklin(); console.log(`CONTRACT_TARGET_ADDR=${await deployer.getDeployedContract('FranklinTarget').address}`); - console.log(`CONTRACT_GENESIS_TX_HASH=${await deployer.getInitTransactionHash('Franklin')}`); + console.log(`CONTRACT_GENESIS_TX_HASH=${await deployer.getDeployTransactionHash('Franklin')}`); console.log(`CONTRACT_ADDR=${await deployer.getDeployedContract('Franklin').address}`); console.log(`Main contract deployed, time: ${(Date.now() - timer) / 1000} secs`); diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 55751950d0..073f693cbf 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -61,7 +61,7 @@ async function getSolidityInput(contractPath) { export class Deployer { bytecodes: any; addresses: any; - initTxHash: any; + deployTransactionHash: any; constructor(public wallet: ethers.Wallet, isTest: boolean) { this.bytecodes = { @@ -82,14 +82,14 @@ export class Deployer { Franklin: process.env.CONTRACT_ADDR, }; - this.initTxHash = { + this.deployTransactionHash = { Governance: process.env.GOVERNANCE_GENESIS_TX_HASH, Franklin: process.env.CONTRACT_GENESIS_TX_HASH, }; } - getInitTransactionHash(name) { - return this.initTxHash[name]; + getDeployTransactionHash(name) { + return this.deployTransactionHash[name]; } getDeployedContract(name) { @@ -109,16 +109,6 @@ export class Deployer { } } - constructorArgs(contractName) { - return { - 'GovernanceTarget': [], - 'VerifierTarget': [], - 'FranklinTarget': [], - 'Governance': [], - 'Verifier': [], - 'Franklin': [], - }[contractName]; - } initializationArgs(contractName) { return { 'Governance': [["address"], [this.wallet.address]], @@ -131,6 +121,20 @@ export class Deployer { ]], }[contractName]; } + encodedInitializationArgs(contractName) { + let [initArgs, initArgsValues] = this.initializationArgs(contractName); + return abi.rawEncode(initArgs, initArgsValues); + } + constructorArgs(contractName) { + return { + 'GovernanceTarget': [], + 'VerifierTarget': [], + 'FranklinTarget': [], + 'Governance': [this.addresses.GovernanceTarget, this.encodedInitializationArgs('Governance')], + 'Verifier': [this.addresses.VerifierTarget, this.encodedInitializationArgs('Verifier')], + 'Franklin': [this.addresses.FranklinTarget, this.encodedInitializationArgs('Franklin')], + }[contractName]; + } encodedConstructorArgs(contractName) { const args = this.constructorArgs(contractName); const iface = this.bytecodes[contractName].abi.filter(i => i.type === 'constructor'); @@ -148,73 +152,61 @@ export class Deployer { } async deployGovernance() { - const proxy = await deployContract( - this.wallet, - this.bytecodes.Governance, - this.constructorArgs('Governance'), - { gasLimit: 3000000 }, - ); const target = await deployContract( this.wallet, this.bytecodes.GovernanceTarget, this.constructorArgs('GovernanceTarget'), { gasLimit: 3000000 }, ); - let [initArgs, initArgsValues] = this.initializationArgs('Governance'); - const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); - const tx = await proxy.initializeTarget(target.address, initArgsInBytes); - await tx.wait(); - this.addresses.GovernanceTarget = target.address; - this.addresses.Governance = proxy.address; - this.initTxHash.Governance = tx.hash; - return new ethers.Contract(proxy.address, this.bytecodes.GovernanceTarget.interface, this.wallet); - } - async deployVerifier() { const proxy = await deployContract( this.wallet, - this.bytecodes.Verifier, - this.constructorArgs('Verifier'), + this.bytecodes.Governance, + this.constructorArgs('Governance'), { gasLimit: 3000000 }, ); + this.addresses.Governance = proxy.address; + this.deployTransactionHash.Governance = proxy.deployTransaction.hash; + return new ethers.Contract(proxy.address, this.bytecodes.GovernanceTarget.interface, this.wallet); + } + + async deployVerifier() { const target = await deployContract( this.wallet, this.bytecodes.VerifierTarget, this.constructorArgs('VerifierTarget'), { gasLimit: 3000000 }, ); - let [initArgs, initArgsValues] = this.initializationArgs('Verifier'); - const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); - const tx = await proxy.initializeTarget(target.address, initArgsInBytes); - await tx.wait(); - this.addresses.VerifierTarget = target.address; - this.addresses.Verifier = proxy.address; - return new ethers.Contract(proxy.address, this.bytecodes.VerifierTarget.interface, this.wallet); - } - async deployFranklin() { const proxy = await deployContract( this.wallet, - this.bytecodes.Franklin, - this.constructorArgs('Franklin'), + this.bytecodes.Verifier, + this.constructorArgs('Verifier'), { gasLimit: 3000000 }, ); + this.addresses.Verifier = proxy.address; + return new ethers.Contract(proxy.address, this.bytecodes.VerifierTarget.interface, this.wallet); + } + + async deployFranklin() { const target = await deployContract( this.wallet, this.bytecodes.FranklinTarget, this.constructorArgs('FranklinTarget'), { gasLimit: 6500000 }, ); - let [initArgs, initArgsValues] = this.initializationArgs('Franklin'); - const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); - const tx = await proxy.initializeTarget(target.address, initArgsInBytes); - await tx.wait(); - this.addresses.FranklinTarget = target.address; + + const proxy = await deployContract( + this.wallet, + this.bytecodes.Franklin, + this.constructorArgs('Franklin'), + { gasLimit: 3000000 }, + ); this.addresses.Franklin = proxy.address; - this.initTxHash.Franklin = tx.hash; + this.deployTransactionHash.Franklin = proxy.deployTransaction.hash; return new ethers.Contract(proxy.address, this.bytecodes.FranklinTarget.interface, this.wallet); } diff --git a/contracts/test/unit_tests/common.js b/contracts/test/unit_tests/common.js index f12a5e47fc..ab8f74eea7 100644 --- a/contracts/test/unit_tests/common.js +++ b/contracts/test/unit_tests/common.js @@ -37,15 +37,13 @@ async function deployProxyContract( initArgsValues, ) { try { - const proxy = await deployContract(wallet, proxyCode, [], { + const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); + const contract = await deployContract(wallet, contractCode, [], { gasLimit: 3000000, }); - const contract = await deployContract(wallet, contractCode, [], { + const proxy = await deployContract(wallet, proxyCode, [contract.address, initArgsInBytes], { gasLimit: 3000000, }); - const initArgsInBytes = await abi.rawEncode(initArgs, initArgsValues); - const tx = await proxy.initializeTarget(contract.address, initArgsInBytes); - await tx.wait(); const returnContract = new ethers.Contract(proxy.address, contractCode.interface, wallet); return [returnContract, contract.address]; diff --git a/contracts/test/unit_tests/proxy_test.ts b/contracts/test/unit_tests/proxy_test.ts index 8f188f8d81..fb9a0cfac9 100644 --- a/contracts/test/unit_tests/proxy_test.ts +++ b/contracts/test/unit_tests/proxy_test.ts @@ -12,21 +12,20 @@ describe("Proxy unit tests", function () { let proxyDummyInterface let DummyFirst before(async () => { - proxyTestContract = await deployTestContract('../../build/Proxy') - proxyDummyInterface = new Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); DummyFirst = await deployTestContract('../../build/DummyFirst') - await proxyTestContract.initializeTarget(DummyFirst.address, [1, 2]); + proxyTestContract = await deployContract(wallet, require('../../build/Proxy'), [DummyFirst.address, [1, 2]], { + gasLimit: 6000000, + }) + proxyDummyInterface = new Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); }); it("checking that requireMaster calls present", async () => { let testContract_with_wallet2_signer = await proxyTestContract.connect(wallet2); - expect((await getCallRevertReason( () => testContract_with_wallet2_signer.initializeTarget(AddressZero, []) )).revertReason).equal("oro11") expect((await getCallRevertReason( () => testContract_with_wallet2_signer.upgradeTarget(AddressZero, []) )).revertReason).equal("oro11") }); it("check Proxy reverts", async () => { expect((await getCallRevertReason( () => proxyTestContract.initialize([]) )).revertReason).equal("ini11") - expect((await getCallRevertReason( () => proxyTestContract.initializeTarget(proxyTestContract.address, []) )).revertReason).equal("uin11") expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(proxyTestContract.address, []) )).revertReason).equal("ufu11") }); diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index 5878197219..a7d8ec47fd 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -20,11 +20,12 @@ describe("UpgradeGatekeeper unit tests", function () { let DummyFirst let DummySecond before(async () => { - proxyTestContract = await deployTestContract('../../build/Proxy') - proxyDummyInterface = new Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); DummyFirst = await deployTestContract('../../build/DummyFirst') DummySecond = await deployTestContract('../../build/DummySecond') - await proxyTestContract.initializeTarget(DummyFirst.address, [bytes[0], bytes[1]]); + proxyTestContract = await deployContract(wallet, require('../../build/Proxy'), [DummyFirst.address, [bytes[0], bytes[1]]], { + gasLimit: 6000000, + }) + proxyDummyInterface = new Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); UpgradeGatekeeperContract = await deployContract(wallet, require('../../build/UpgradeGatekeeperTest'), [proxyTestContract.address], { gasLimit: 6000000, }) diff --git a/core/data_restore/src/contract_functions.rs b/core/data_restore/src/contract_functions.rs index 3bf88f01ab..232732d4dd 100644 --- a/core/data_restore/src/contract_functions.rs +++ b/core/data_restore/src/contract_functions.rs @@ -1,57 +1,45 @@ extern crate ethabi; use crate::eth_tx_helpers::get_input_data_from_ethereum_transaction; use models::node::account::Account; -use models::params::INPUT_DATA_ROOT_HASH_BYTES_WIDTH; +use models::params::{INPUT_DATA_ADDRESS_BYTES_WIDTH, INPUT_DATA_ROOT_HASH_BYTES_WIDTH}; use web3::contract::{Contract, Options}; use web3::futures::Future; use web3::types::{Address, BlockNumber, Transaction, U256}; use web3::Transport; -/// Returns Rollup genesis (fees) account from the input of the Rollup contract initialization transaction +/// Returns Rollup genesis (fees) account from the input of the Rollup contract creation transaction /// /// # Arguments /// -/// * `transaction` - Ethereum Rollup contract initialization transaction description +/// * `transaction` - Ethereum Rollup contract creation transaction description /// pub fn get_genesis_account(genesis_transaction: &Transaction) -> Result { + const ENCODED_INIT_PARAMETERS_WIDTH: usize = + 3 * INPUT_DATA_ADDRESS_BYTES_WIDTH + INPUT_DATA_ROOT_HASH_BYTES_WIDTH; + let input_data = get_input_data_from_ethereum_transaction(&genesis_transaction)?; - // target address and targetInitializationParameters - let input_parameters = ethabi::decode( - vec![ethabi::ParamType::Address, ethabi::ParamType::Bytes].as_slice(), - input_data.as_slice(), - ) - .map_err(|_| { - failure::Error::from_boxed_compat(Box::new(std::io::Error::new( - std::io::ErrorKind::NotFound, - "can't get input parameters from target initialization transaction", - ))) - })?; - let encoded_parameters = input_parameters[1] - .clone() - .to_bytes() - .ok_or_else(|| Err("Invalid token in parameters")) - .map_err(|_: Result, _>| { - failure::Error::from_boxed_compat(Box::new(std::io::Error::new( - std::io::ErrorKind::NotFound, - "can't get initialization parameters from target initialization transaction", - ))) - })?; + // encoded target initialization parameters + let encoded_init_parameters = + input_data[input_data.len() - ENCODED_INIT_PARAMETERS_WIDTH..].to_vec(); - let input_types = vec![ + let init_parameters_types = vec![ ethabi::ParamType::Address, ethabi::ParamType::Address, ethabi::ParamType::Address, ethabi::ParamType::FixedBytes(INPUT_DATA_ROOT_HASH_BYTES_WIDTH), ]; - let decoded_parameters = ethabi::decode(input_types.as_slice(), encoded_parameters.as_slice()) - .map_err(|_| { - failure::Error::from_boxed_compat(Box::new(std::io::Error::new( - std::io::ErrorKind::NotFound, - "can't get decoded parameters from target initialization transaction", - ))) - })?; - match &decoded_parameters[2] { + let decoded_init_parameters = ethabi::decode( + init_parameters_types.as_slice(), + encoded_init_parameters.as_slice(), + ) + .map_err(|_| { + failure::Error::from_boxed_compat(Box::new(std::io::Error::new( + std::io::ErrorKind::NotFound, + "can't get decoded init parameters from contract creation transaction", + ))) + })?; + match &decoded_init_parameters[2] { ethabi::Token::Address(genesis_operator_address) => { Some(Account::default_with_address(&genesis_operator_address)) } @@ -61,7 +49,7 @@ pub fn get_genesis_account(genesis_transaction: &Transaction) -> Result| { failure::Error::from_boxed_compat(Box::new(std::io::Error::new( std::io::ErrorKind::NotFound, - "can't get decoded parameter from target initialization transaction", + "can't get decoded init parameter from contract creation transaction", ))) }) } diff --git a/core/models/src/params.rs b/core/models/src/params.rs index ab2b798060..e70370666a 100644 --- a/core/models/src/params.rs +++ b/core/models/src/params.rs @@ -38,6 +38,7 @@ pub fn account_tree_depth() -> usize { } pub const ACCOUNT_ID_BIT_WIDTH: usize = 24; +pub const INPUT_DATA_ADDRESS_BYTES_WIDTH: usize = 32; pub const INPUT_DATA_BLOCK_NUMBER_BYTES_WIDTH: usize = 32; pub const INPUT_DATA_FEE_ACC_BYTES_WIDTH_WITH_EMPTY_OFFSET: usize = 32; pub const INPUT_DATA_FEE_ACC_BYTES_WIDTH: usize = 3; From af0cd2c28ca98d5b8ff8d3fe55895f83e1abe556 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 17 Mar 2020 09:42:41 +0200 Subject: [PATCH 032/186] Removed dependency on Proxy in UpgradeGatekeeper --- contracts/contracts/UpgradeGatekeeper.sol | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 7aea5e7b13..47f93b4950 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -2,7 +2,6 @@ pragma solidity 0.5.16; import "./Events.sol"; import "./Ownable.sol"; -import "./Proxy.sol"; /// @title Upgrade Gatekeeper Contract @@ -94,10 +93,10 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { if (now >= upgradeInfo[proxyAddress].activationTime + NOTICE_PERIOD) { upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Finalize; - (bool callSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( + (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("registeredPriorityOperations()") ); - require(callSuccess, "uaf12"); // uaf12 - main contract static call failed + require(mainContractCallSuccess, "uaf12"); // uaf12 - main contract static call failed uint64 registeredPriorityOperations = abi.decode(encodedResult, (uint64)); upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = registeredPriorityOperations; @@ -115,15 +114,18 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { requireMaster(msg.sender); require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Finalize, "umf11"); // umf11 - unable to finish upgrade without finalize status active - (bool callSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( + (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("verifiedPriorityOperations()") ); - require(callSuccess, "umf12"); // umf12 - main contract static call failed + require(mainContractCallSuccess, "umf12"); // umf12 - main contract static call failed uint64 verifiedPriorityOperations = abi.decode(encodedResult, (uint64)); require(verifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, "umf13"); // umf13 - can't finish upgrade before verifing all priority operations received before start of finalize status - Proxy(address(uint160(proxyAddress))).upgradeTarget(upgradeInfo[proxyAddress].nextTarget, newTargetInitializationParameters); + (bool proxyUpgradeCallSuccess, ) = proxyAddress.call( + abi.encodeWithSignature("upgradeTarget(address,bytes)", upgradeInfo[proxyAddress].nextTarget, newTargetInitializationParameters) + ); + require(proxyUpgradeCallSuccess, "umf14"); // umf14 - proxy contract call failed emit UpgradeCompleted(proxyAddress, version[proxyAddress], upgradeInfo[proxyAddress].nextTarget); version[proxyAddress]++; From dee1fe4939164da2c6feafb991068a2af2ed6c21 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Tue, 17 Mar 2020 11:58:40 +0200 Subject: [PATCH 033/186] Use wasm in zksync.js --- js/client/src/WalletDecorator.js | 1 - js/client/src/views/Login.vue | 3 ++- js/explorer/src/Client.js | 3 ++- js/explorer/yarn.lock | 9 +++++++++ js/zksync-crypto/webpack.config.js | 15 ++++++--------- js/zksync-crypto/yarn.lock | 6 +++--- js/zksync.js/package.json | 5 +++-- js/zksync.js/src/crypto.ts | 20 +++++++++----------- js/zksync.js/yarn.lock | 19 +++++++------------ 9 files changed, 41 insertions(+), 40 deletions(-) diff --git a/js/client/src/WalletDecorator.js b/js/client/src/WalletDecorator.js index f3777551c1..84eaa3a2f8 100644 --- a/js/client/src/WalletDecorator.js +++ b/js/client/src/WalletDecorator.js @@ -3,7 +3,6 @@ import { readableEther, sleep, isReadablyPrintable } from './utils'; import timeConstants from './timeConstants'; import { BlockExplorerClient } from './BlockExplorerClient'; import config from './env-config'; -const zksync = require('zksync'); const ethers = require('ethers'); import franklin_abi from '../../../contracts/build/Franklin.json' import { Emitter } from './Emitter'; diff --git a/js/client/src/views/Login.vue b/js/client/src/views/Login.vue index b51d69b046..c44e85c818 100644 --- a/js/client/src/views/Login.vue +++ b/js/client/src/views/Login.vue @@ -26,7 +26,7 @@ const components = { }; const ethers = require('ethers'); -const zksync = require('zksync'); +const zksync_promise = import('zksync'); import config from '../env-config'; import { WalletDecorator } from '../WalletDecorator' @@ -39,6 +39,7 @@ export default { methods: { async login() { try { + const zksync = await zksync_promise; const syncProvider = await zksync.Provider.newHttpProvider(config.HTTP_RPC_API_ADDR); // const syncProvider = await zksync.Provider.newWebsocketProvider(config.WS_API_ADDR); const tokensList = await syncProvider.getTokens() diff --git a/js/explorer/src/Client.js b/js/explorer/src/Client.js index 34741b6cd8..c2ecad1b85 100644 --- a/js/explorer/src/Client.js +++ b/js/explorer/src/Client.js @@ -2,7 +2,7 @@ import config from './env-config'; import * as constants from './constants'; import { readableEther } from './utils'; import { BlockExplorerClient } from './BlockExplorerClient'; -const zksync = require('zksync'); +const zksync_promise = import('zksync'); import axios from 'axios'; async function fetch(req) { @@ -23,6 +23,7 @@ export class Client { } static async new() { + const zksync = await zksync_promise; const syncProvider = await zksync.Provider.newHttpProvider(config.HTTP_RPC_API_ADDR); const tokensPromise = syncProvider.getTokens() .then(tokens => { diff --git a/js/explorer/yarn.lock b/js/explorer/yarn.lock index bde2c67270..af163592d1 100644 --- a/js/explorer/yarn.lock +++ b/js/explorer/yarn.lock @@ -9057,6 +9057,15 @@ yorkie@^2.0.0: normalize-path "^1.0.0" strip-indent "^2.0.0" +zksync-crypto-node@../zksync-crypto/nodejspgk: + version "0.1.0" + +zksync-crypto-web@../zksync-crypto/webpgk: + version "0.1.0" + +zksync-crypto@../zksync-crypto/pkg: + version "0.1.0" + "zksync@link:../zksync.js": version "0.0.0" uid "" diff --git a/js/zksync-crypto/webpack.config.js b/js/zksync-crypto/webpack.config.js index 0aeb576dad..e47cf96508 100644 --- a/js/zksync-crypto/webpack.config.js +++ b/js/zksync-crypto/webpack.config.js @@ -5,6 +5,7 @@ const WasmPackPlugin = require("@wasm-tool/wasm-pack-plugin"); const config = target => ({ entry: './indexx.js', + // entry: `./index_${target}.js`, output: { path: path.resolve(__dirname, 'dist'), filename: `index.${target}.js`, @@ -13,7 +14,11 @@ const config = target => ({ plugins: [ new HtmlWebpackPlugin(), new WasmPackPlugin({ - crateDirectory: path.resolve(__dirname, ".") + crateDirectory: path.resolve(__dirname, "."), + extraArgs + : target == 'web' ? '' + : target == 'node' ? '--target=nodejs' + : null, }), // Have this example work in Edge which doesn't ship `TextEncoder` or // `TextDecoder` at this time. @@ -23,14 +28,6 @@ const config = target => ({ }) ], mode: 'development', - // module: { - // rules: [ - // { test: /\.wasm$/, type: "webassembly/experimental" }, - // ], - // }, - // devServer: { - // mimeTypes: { 'text/html': ['wasm'] } - // }, }); module.exports = ['web', 'node'].map(target => ({...config(target), target})); diff --git a/js/zksync-crypto/yarn.lock b/js/zksync-crypto/yarn.lock index 6648b18598..e26368102b 100644 --- a/js/zksync-crypto/yarn.lock +++ b/js/zksync-crypto/yarn.lock @@ -3518,9 +3518,9 @@ yallist@^3.0.2: integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yargs-parser@^13.1.0: - version "13.1.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" - integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== + version "13.1.2" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" + integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg== dependencies: camelcase "^5.0.0" decamelize "^1.2.0" diff --git a/js/zksync.js/package.json b/js/zksync.js/package.json index 149a82d6b4..d891d8ede4 100644 --- a/js/zksync.js/package.json +++ b/js/zksync.js/package.json @@ -10,11 +10,12 @@ "bn.js": "^5.0.0", "crypto-js": "^3.1.9-1", "elliptic": "^6.5.0", - "example-node-wasm": "../zksync-crypto", "js-sha256": "^0.9.0", "websocket": "^1.0.30", "websocket-as-promised": "^0.10.1", - "zksync-crypto": "../zksync-crypto/pkg" + "zksync-crypto": "../zksync-crypto/pkg", + "zksync-crypto-node": "../zksync-crypto/nodejspgk", + "zksync-crypto-web": "../zksync-crypto/webpgk" }, "peerDependencies": { "ethers": "^4.0.33" diff --git a/js/zksync.js/src/crypto.ts b/js/zksync.js/src/crypto.ts index 17cb5e8f83..2b878b8ede 100644 --- a/js/zksync.js/src/crypto.ts +++ b/js/zksync.js/src/crypto.ts @@ -1,18 +1,17 @@ import BN = require("bn.js"); import { Signature } from "./types"; -import zksync_crypto from "example-node-wasm"; -// import * as zksync_crypto from '../node_modules/zksync-crypto/zksync_crypto.js'; -// console.log({zksync_crypto}); -// zksync_crypto.then(console.log); -// init(); -// const zksync_crypto = import("zksync-crypto"); -// import * as scrscr from 'example-node-wasm'; -// console.log({scrscr}); +const zksync_crypto = (async () => { + if (typeof window !== "undefined" && window.window === window) { + return await import("zksync-crypto"); + } else { + return await import("zksync-crypto-node"); + } +})(); export async function signTransactionBytes(privKey: BN, bytes: Buffer): Promise { const { sign_musig_sha256 } = await zksync_crypto; - const signaturePacked = sign_musig_sha256(privKey.toBuffer(), bytes); + const signaturePacked = sign_musig_sha256(privKey.toArrayLike(Buffer), bytes); const pubKey = Buffer.from(signaturePacked.slice(0,32)).toString("hex"); const signature = Buffer.from(signaturePacked.slice(32, 32 + 64)).toString("hex"); return { @@ -22,12 +21,11 @@ export async function signTransactionBytes(privKey: BN, bytes: Buffer): Promise< } export async function privateKeyFromSeed(seed: Buffer): Promise { - console.log({zksync_crypto: await zksync_crypto}); const { private_key_from_seed } = await zksync_crypto; return new BN(private_key_from_seed(seed)); } export async function privateKeyToPubKeyHash(privateKey: BN): Promise { const { private_key_to_pubkey_hash } = await zksync_crypto; - return `sync:${Buffer.from(private_key_to_pubkey_hash(privateKey.toBuffer())).toString("hex")}` + return `sync:${Buffer.from(private_key_to_pubkey_hash(privateKey.toArrayLike(Buffer))).toString("hex")}` } diff --git a/js/zksync.js/yarn.lock b/js/zksync.js/yarn.lock index 1676efcce7..0f0e788f3f 100644 --- a/js/zksync.js/yarn.lock +++ b/js/zksync.js/yarn.lock @@ -167,11 +167,6 @@ browser-stdout@1.3.1: resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== -buffer-es6@^4.9.3: - version "4.9.3" - resolved "https://registry.yarnpkg.com/buffer-es6/-/buffer-es6-4.9.3.tgz#f26347b82df76fd37e18bcb5288c4970cfd5c404" - integrity sha1-8mNHuC33b9N+GLy1KIxJcM/VxAQ= - buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" @@ -423,12 +418,6 @@ ethers@4.0.33: uuid "2.0.1" xmlhttprequest "1.8.0" -example-node-wasm@../zksync-crypto: - version "0.0.0" - dependencies: - buffer-es6 "^4.9.3" - zksync-crypto "file:../../../../Library/Caches/Yarn/v6/npm-example-node-wasm-0.0.0-2163f48b-7e4c-44ee-acc1-d990808c92ae-1584117749254/node_modules/example-node-wasm/pkg" - execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" @@ -1342,5 +1331,11 @@ yn@^3.0.0: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.0.tgz#fcbe2db63610361afcc5eb9e0ac91e976d046114" integrity sha512-kKfnnYkbTfrAdd0xICNFw7Atm8nKpLcLv9AZGEt+kczL/WQVai4e2V6ZN8U/O+iI6WrNuJjNNOyu4zfhl9D3Hg== -zksync-crypto@../zksync-crypto/pkg, "zksync-crypto@file:../zksync-crypto/pkg": +zksync-crypto-node@../zksync-crypto/nodejspgk: + version "0.1.0" + +zksync-crypto-web@../zksync-crypto/webpgk: + version "0.1.0" + +zksync-crypto@../zksync-crypto/pkg: version "0.1.0" From 144a104c20a2020b3352f40f7a89d2a30aea9d8b Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 17 Mar 2020 15:12:12 +0200 Subject: [PATCH 034/186] Added using proxy contracts in zksync_test.ts --- contracts/contracts/test/ZKSyncUnitTest.sol | 16 +--- contracts/test/unit_tests/zksync_test.ts | 81 +++++++++------------ 2 files changed, 34 insertions(+), 63 deletions(-) diff --git a/contracts/contracts/test/ZKSyncUnitTest.sol b/contracts/contracts/test/ZKSyncUnitTest.sol index 99631112d8..56f6934512 100644 --- a/contracts/contracts/test/ZKSyncUnitTest.sol +++ b/contracts/contracts/test/ZKSyncUnitTest.sol @@ -5,20 +5,6 @@ import "../generated/FranklinTest.sol"; contract ZKSyncUnitTest is FranklinTest { - constructor( - address _governanceAddress, - address _verifierAddress, - address _genesisAccAddress, - bytes32 _genesisRoot - ) FranklinTest() public{ - /// initialization - - verifier = VerifierTest(_verifierAddress); - governance = GovernanceTest(_governanceAddress); - - blocks[0].stateRoot = _genesisRoot; - } - function changePubkeySignatureCheck(bytes calldata _signature, bytes calldata _newPkHash, uint32 _nonce, address _ethAddress) external pure returns (bool) { return verifyChangePubkeySignature(_signature, _newPkHash, _nonce, _ethAddress); } @@ -27,7 +13,7 @@ contract ZKSyncUnitTest is FranklinTest { balancesToWithdraw[_owner][_token] = _amount; } - function () payable external{} + function receiveETH() payable external{} function addPendingWithdrawal(address _to, uint16 _tokenId, uint128 _amount) external { storeWithdrawalAsPending(_to, _tokenId, _amount); diff --git a/contracts/test/unit_tests/zksync_test.ts b/contracts/test/unit_tests/zksync_test.ts index 2c787e4126..18f94b4b4e 100644 --- a/contracts/test/unit_tests/zksync_test.ts +++ b/contracts/test/unit_tests/zksync_test.ts @@ -1,30 +1,36 @@ import { addTestERC20Token, addTestNotApprovedERC20Token, - mintTestERC20Token, - Deployer, + franklinTestContractCode, + governanceTestContractCode, mintTestERC20Token, + verifierTestContractCode, Deployer } from "../../src.ts/deploy"; import {BigNumber, bigNumberify, BigNumberish, parseEther} from "ethers/utils"; import {ETHProxy} from "zksync"; import {Address, TokenAddress} from "zksync/build/types"; import {AddressZero} from "ethers/constants"; import {Contract, ethers} from "ethers"; -import {deployContract} from "ethereum-waffle"; +const abi = require('ethereumjs-abi') const { expect } = require("chai") +const { deployContract } = require("ethereum-waffle"); const { wallet, exitWallet, deployTestContract, getCallRevertReason, IERC20_INTERFACE} = require("./common"); import * as zksync from "zksync"; const TEST_PRIORITY_EXPIRATION = 16; + describe("ZK Sync signature verification unit tests", function () { this.timeout(50000); let testContract; let randomWallet = ethers.Wallet.createRandom(); before(async () => { - testContract = await deployContract(wallet, require('../../build/ZKSyncUnitTest'), [AddressZero, AddressZero, AddressZero, Buffer.alloc(32, 0)], { - gasLimit: 6500000, - }); + const deployer = new Deployer(wallet, true); + await deployer.deployGovernance(); + await deployer.deployVerifier(); + process.env.OPERATOR_FRANKLIN_ADDRESS = wallet.address; + deployer.bytecodes.FranklinTarget = require("../../build/ZKSyncUnitTest"); + testContract = await deployer.deployFranklin(); }); it("pubkey hash signature verification success", async () => { @@ -85,16 +91,10 @@ describe("ZK priority queue ops unit tests", function () { let operationTestContract; before(async () => { const deployer = new Deployer(wallet, true); - let verifierDeployedContract = await deployer.deployVerifier(); - let governanceDeployedContract = await deployer.deployGovernance(); + const governanceDeployedContract = await deployer.deployGovernance(); + await deployer.deployVerifier(); process.env.OPERATOR_FRANKLIN_ADDRESS = wallet.address; - zksyncContract = await deployContract( - wallet, - require('../../build/ZKSyncUnitTest'), - [governanceDeployedContract.address, verifierDeployedContract.address, wallet.address, ethers.constants.HashZero], - { - gasLimit: 6500000, - }); + zksyncContract = await deployer.deployFranklin(); await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); await mintTestERC20Token(wallet, tokenContract); @@ -222,16 +222,11 @@ describe("ZK Sync withdraw unit tests", function () { let ethProxy; before(async () => { const deployer = new Deployer(wallet, true); - let verifierDeployedContract = await deployer.deployVerifier(); - let governanceDeployedContract = await deployer.deployGovernance(); + const governanceDeployedContract = await deployer.deployGovernance(); + await deployer.deployVerifier(); process.env.OPERATOR_FRANKLIN_ADDRESS = wallet.address; - zksyncContract = await deployContract( - wallet, - require('../../build/ZKSyncUnitTest'), - [governanceDeployedContract.address, verifierDeployedContract.address, wallet.address, ethers.constants.HashZero], - { - gasLimit: 6500000, - }); + deployer.bytecodes.FranklinTarget = require("../../build/ZKSyncUnitTest"); + zksyncContract = await deployer.deployFranklin(); await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); await mintTestERC20Token(wallet, tokenContract); @@ -246,11 +241,11 @@ describe("ZK Sync withdraw unit tests", function () { const balanceBefore = await onchainBalance(ethWallet, token); const contractBalanceBefore = bigNumberify(await zksyncContract.balancesToWithdraw(ethWallet.address, tokenId)); if (token === ethers.constants.AddressZero) { - const tx = await zksyncContract.withdrawETH(amount); + const tx = await zksyncContract.withdrawETH(amount, {gasLimit: 100000}); const receipt = await tx.wait(); gasFee = receipt.gasUsed.mul(await ethWallet.provider.getGasPrice()); } else { - await zksyncContract.withdrawERC20(token, amount); + await zksyncContract.withdrawERC20(token, amount, {gasLimit: 100000}); } const balanceAfter = await onchainBalance(ethWallet, token); @@ -266,7 +261,7 @@ describe("ZK Sync withdraw unit tests", function () { zksyncContract.connect(wallet); const withdrawAmount = parseEther("1.0"); - const sendETH = await wallet.sendTransaction({to: zksyncContract.address, value: withdrawAmount.mul(2)}); + const sendETH = await wallet.sendTransaction({to: zksyncContract.address, value: withdrawAmount.mul(2), data: abi.simpleEncode("receiveETH()")}); await sendETH.wait(); await zksyncContract.setBalanceToWithdraw(wallet.address, 0, withdrawAmount); @@ -281,7 +276,7 @@ describe("ZK Sync withdraw unit tests", function () { zksyncContract.connect(wallet); const withdrawAmount = parseEther("1.0"); - const sendETH = await wallet.sendTransaction({to: zksyncContract.address, value: withdrawAmount}); + const sendETH = await wallet.sendTransaction({to: zksyncContract.address, value: withdrawAmount, data: abi.simpleEncode("receiveETH()")}); await sendETH.wait(); await zksyncContract.setBalanceToWithdraw(wallet.address, 0, withdrawAmount); @@ -332,7 +327,7 @@ describe("ZK Sync withdraw unit tests", function () { const withdrawAmount = parseEther("1.0"); const withdrawsToCancel = 5; - await wallet.sendTransaction({to: zksyncContract.address, value: withdrawAmount}); + await wallet.sendTransaction({to: zksyncContract.address, value: withdrawAmount, data: abi.simpleEncode("receiveETH()")}); await tokenContract.transfer(zksyncContract.address, withdrawAmount); @@ -362,16 +357,11 @@ describe("ZK Sync auth pubkey onchain unit tests", function () { let ethProxy; before(async () => { const deployer = new Deployer(wallet, true); - let verifierDeployedContract = await deployer.deployVerifier(); - let governanceDeployedContract = await deployer.deployGovernance(); + const governanceDeployedContract = await deployer.deployGovernance(); + await deployer.deployVerifier(); process.env.OPERATOR_FRANKLIN_ADDRESS = wallet.address; - zksyncContract = await deployContract( - wallet, - require('../../build/ZKSyncUnitTest'), - [governanceDeployedContract.address, verifierDeployedContract.address, wallet.address, ethers.constants.HashZero], - { - gasLimit: 6500000, - }); + deployer.bytecodes.FranklinTarget = require("../../build/ZKSyncUnitTest"); + zksyncContract = await deployer.deployFranklin(); await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); await mintTestERC20Token(wallet, tokenContract); @@ -434,16 +424,11 @@ describe("ZK Sync test process next operation", function () { let ethProxy; before(async () => { const deployer = new Deployer(wallet, true); - let verifierDeployedContract = await deployer.deployVerifier(); - let governanceDeployedContract = await deployer.deployGovernance(); + const governanceDeployedContract = await deployer.deployGovernance(); + await deployer.deployVerifier(); process.env.OPERATOR_FRANKLIN_ADDRESS = wallet.address; - zksyncContract = await deployContract( - wallet, - require('../../build/ZKSyncUnitTest'), - [governanceDeployedContract.address, verifierDeployedContract.address, wallet.address, ethers.constants.HashZero], - { - gasLimit: 6500000, - }); + deployer.bytecodes.FranklinTarget = require("../../build/ZKSyncUnitTest"); + zksyncContract = await deployer.deployFranklin(); await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); await mintTestERC20Token(wallet, tokenContract); @@ -620,4 +605,4 @@ describe("ZK Sync test process next operation", function () { expect(committedPriorityRequestsAfter, "priority request number").eq(committedPriorityRequestsBefore); expect(totalOnchainOpsAfter, "committed onchain ops number").eq(totalOnchainOpsBefore); }); -}); +}); \ No newline at end of file From 28b0d9900f757c1d5ef25823091acdf57f2f2a01 Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Tue, 17 Mar 2020 15:29:45 +0200 Subject: [PATCH 035/186] gas limit, format code --- contracts/test/unit_tests/zksync_test.ts | 60 +++++++++++++++++------- 1 file changed, 42 insertions(+), 18 deletions(-) diff --git a/contracts/test/unit_tests/zksync_test.ts b/contracts/test/unit_tests/zksync_test.ts index 18f94b4b4e..fd3d38fec1 100644 --- a/contracts/test/unit_tests/zksync_test.ts +++ b/contracts/test/unit_tests/zksync_test.ts @@ -11,9 +11,9 @@ import {AddressZero} from "ethers/constants"; import {Contract, ethers} from "ethers"; const abi = require('ethereumjs-abi') -const { expect } = require("chai") -const { deployContract } = require("ethereum-waffle"); -const { wallet, exitWallet, deployTestContract, getCallRevertReason, IERC20_INTERFACE} = require("./common"); +const {expect} = require("chai") +const {deployContract} = require("ethereum-waffle"); +const {wallet, exitWallet, deployTestContract, getCallRevertReason, IERC20_INTERFACE} = require("./common"); import * as zksync from "zksync"; const TEST_PRIORITY_EXPIRATION = 16; @@ -73,7 +73,7 @@ describe("ZK Sync signature verification unit tests", function () { }); it("signature verification success", async () => { - for(const message of [Buffer.from("msg", "ascii"), Buffer.alloc(0), Buffer.alloc(10, 1)]) { + for (const message of [Buffer.from("msg", "ascii"), Buffer.alloc(0), Buffer.alloc(10, 1)]) { const signature = await wallet.signMessage(message); const sinedMessage = Buffer.concat([Buffer.from(`\x19Ethereum Signed Message:\n${message.length}`, "ascii"), message]); const address = await testContract.testVerifyEthereumSignature(signature, sinedMessage); @@ -98,7 +98,10 @@ describe("ZK priority queue ops unit tests", function () { await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); await mintTestERC20Token(wallet, tokenContract); - ethProxy = new ETHProxy(wallet.provider, {mainContract: zksyncContract.address, govContract: governanceDeployedContract.address}); + ethProxy = new ETHProxy(wallet.provider, { + mainContract: zksyncContract.address, + govContract: governanceDeployedContract.address + }); operationTestContract = await deployTestContract('../../build/OperationsTest'); }); @@ -230,7 +233,10 @@ describe("ZK Sync withdraw unit tests", function () { await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); await mintTestERC20Token(wallet, tokenContract); - ethProxy = new ETHProxy(wallet.provider, {mainContract: zksyncContract.address, govContract: governanceDeployedContract.address}); + ethProxy = new ETHProxy(wallet.provider, { + mainContract: zksyncContract.address, + govContract: governanceDeployedContract.address + }); incorrectTokenContract = await addTestNotApprovedERC20Token(wallet); await mintTestERC20Token(wallet, tokenContract); @@ -241,11 +247,11 @@ describe("ZK Sync withdraw unit tests", function () { const balanceBefore = await onchainBalance(ethWallet, token); const contractBalanceBefore = bigNumberify(await zksyncContract.balancesToWithdraw(ethWallet.address, tokenId)); if (token === ethers.constants.AddressZero) { - const tx = await zksyncContract.withdrawETH(amount, {gasLimit: 100000}); + const tx = await zksyncContract.withdrawETH(amount, {gasLimit: 70000}); const receipt = await tx.wait(); gasFee = receipt.gasUsed.mul(await ethWallet.provider.getGasPrice()); } else { - await zksyncContract.withdrawERC20(token, amount, {gasLimit: 100000}); + await zksyncContract.withdrawERC20(token, amount, {gasLimit: 70000}); } const balanceAfter = await onchainBalance(ethWallet, token); @@ -261,7 +267,11 @@ describe("ZK Sync withdraw unit tests", function () { zksyncContract.connect(wallet); const withdrawAmount = parseEther("1.0"); - const sendETH = await wallet.sendTransaction({to: zksyncContract.address, value: withdrawAmount.mul(2), data: abi.simpleEncode("receiveETH()")}); + const sendETH = await wallet.sendTransaction({ + to: zksyncContract.address, + value: withdrawAmount.mul(2), + data: abi.simpleEncode("receiveETH()") + }); await sendETH.wait(); await zksyncContract.setBalanceToWithdraw(wallet.address, 0, withdrawAmount); @@ -276,11 +286,15 @@ describe("ZK Sync withdraw unit tests", function () { zksyncContract.connect(wallet); const withdrawAmount = parseEther("1.0"); - const sendETH = await wallet.sendTransaction({to: zksyncContract.address, value: withdrawAmount, data: abi.simpleEncode("receiveETH()")}); + const sendETH = await wallet.sendTransaction({ + to: zksyncContract.address, + value: withdrawAmount, + data: abi.simpleEncode("receiveETH()") + }); await sendETH.wait(); await zksyncContract.setBalanceToWithdraw(wallet.address, 0, withdrawAmount); - const {revertReason} = await getCallRevertReason( async () => await performWithdraw(wallet, AddressZero, 0, withdrawAmount.add(1))); + const {revertReason} = await getCallRevertReason(async () => await performWithdraw(wallet, AddressZero, 0, withdrawAmount.add(1))); expect(revertReason, "wrong revert reason").eq("frw11"); }); @@ -310,7 +324,7 @@ describe("ZK Sync withdraw unit tests", function () { await zksyncContract.setBalanceToWithdraw(wallet.address, tokenId, withdrawAmount); - const {revertReason} = await getCallRevertReason( async () => await performWithdraw(wallet, tokenContract.address, tokenId, withdrawAmount.add(1))); + const {revertReason} = await getCallRevertReason(async () => await performWithdraw(wallet, tokenContract.address, tokenId, withdrawAmount.add(1))); expect(revertReason, "wrong revert reason").eq("frw11"); }); @@ -318,7 +332,7 @@ describe("ZK Sync withdraw unit tests", function () { zksyncContract.connect(wallet); const withdrawAmount = parseEther("1.0"); - const {revertReason} = await getCallRevertReason( async () => await performWithdraw(wallet, incorrectTokenContract.address, 1, withdrawAmount.add(1))); + const {revertReason} = await getCallRevertReason(async () => await performWithdraw(wallet, incorrectTokenContract.address, 1, withdrawAmount.add(1))); expect(revertReason, "wrong revert reason").eq("gvs12"); }); @@ -327,7 +341,11 @@ describe("ZK Sync withdraw unit tests", function () { const withdrawAmount = parseEther("1.0"); const withdrawsToCancel = 5; - await wallet.sendTransaction({to: zksyncContract.address, value: withdrawAmount, data: abi.simpleEncode("receiveETH()")}); + await wallet.sendTransaction({ + to: zksyncContract.address, + value: withdrawAmount, + data: abi.simpleEncode("receiveETH()") + }); await tokenContract.transfer(zksyncContract.address, withdrawAmount); @@ -365,7 +383,10 @@ describe("ZK Sync auth pubkey onchain unit tests", function () { await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); await mintTestERC20Token(wallet, tokenContract); - ethProxy = new ETHProxy(wallet.provider, {mainContract: zksyncContract.address, govContract: governanceDeployedContract.address}); + ethProxy = new ETHProxy(wallet.provider, { + mainContract: zksyncContract.address, + govContract: governanceDeployedContract.address + }); }); it("Auth pubkey success", async () => { @@ -398,7 +419,7 @@ describe("ZK Sync auth pubkey onchain unit tests", function () { await zksyncContract.authPubkeyHash(pubkeyHash, nonce); // const otherPubkeyHash = "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; - const {revertReason} = await getCallRevertReason(async () => await zksyncContract.authPubkeyHash(otherPubkeyHash, nonce) ); + const {revertReason} = await getCallRevertReason(async () => await zksyncContract.authPubkeyHash(otherPubkeyHash, nonce)); expect(revertReason, "revert reason incorrect").eq("ahf11"); }); @@ -409,7 +430,7 @@ describe("ZK Sync auth pubkey onchain unit tests", function () { const longPubkeyHash = "0xfefefefefefefefefefefefefefefefefefefefefe"; for (const pkHash of [shortPubkeyHash, longPubkeyHash]) { - const {revertReason} = await getCallRevertReason(async () => await zksyncContract.authPubkeyHash(shortPubkeyHash, nonce) ); + const {revertReason} = await getCallRevertReason(async () => await zksyncContract.authPubkeyHash(shortPubkeyHash, nonce)); expect(revertReason, "revert reason incorrect").eq("ahf10"); } }); @@ -432,7 +453,10 @@ describe("ZK Sync test process next operation", function () { await governanceDeployedContract.setValidator(wallet.address, true); tokenContract = await addTestERC20Token(wallet, governanceDeployedContract); await mintTestERC20Token(wallet, tokenContract); - ethProxy = new ETHProxy(wallet.provider, {mainContract: zksyncContract.address, govContract: governanceDeployedContract.address}); + ethProxy = new ETHProxy(wallet.provider, { + mainContract: zksyncContract.address, + govContract: governanceDeployedContract.address + }); }); it("Process noop", async () => { From b0ffaa1b691cb9cf20901e45e923c450968de845 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Wed, 18 Mar 2020 11:00:01 +0200 Subject: [PATCH 036/186] ode style small fix --- contracts/src.ts/deploy.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 073f693cbf..477795f719 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -93,7 +93,7 @@ export class Deployer { } getDeployedContract(name) { - if (name == "Governance" || name == "Verifier" || name == "Franklin") { + if (["Governance", "Verifier", "Franklin"].includes(name)) { return new ethers.Contract( this.addresses[name], this.bytecodes[name+"Target"].interface, From f3e7992ee0887954b8c95e58a3f7909a4b932fb7 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 18 Mar 2020 15:13:22 +0300 Subject: [PATCH 037/186] Add basic queue structures --- core/server/src/eth_sender/mod.rs | 1 + .../src/eth_sender/tx_queue/counter_queue.rs | 54 ++++++ core/server/src/eth_sender/tx_queue/mod.rs | 168 ++++++++++++++++++ .../src/eth_sender/tx_queue/sparse_queue.rs | 103 +++++++++++ 4 files changed, 326 insertions(+) create mode 100644 core/server/src/eth_sender/tx_queue/counter_queue.rs create mode 100644 core/server/src/eth_sender/tx_queue/mod.rs create mode 100644 core/server/src/eth_sender/tx_queue/sparse_queue.rs diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 7101e275eb..c8b89ef7dd 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -26,6 +26,7 @@ use self::transactions::*; mod database; mod ethereum_interface; mod transactions; +mod tx_queue; #[cfg(test)] mod tests; diff --git a/core/server/src/eth_sender/tx_queue/counter_queue.rs b/core/server/src/eth_sender/tx_queue/counter_queue.rs new file mode 100644 index 0000000000..ecd79300b4 --- /dev/null +++ b/core/server/src/eth_sender/tx_queue/counter_queue.rs @@ -0,0 +1,54 @@ +// TODO: Remove me +#![allow(dead_code)] + +use std::{collections::VecDeque, fmt}; + +/// Counter queue is basically a queue which +/// tracks the amount of the processed elements. +#[derive(Debug)] +pub struct CounterQueue { + elements: VecDeque, + counter: usize, +} + +impl Default for CounterQueue { + fn default() -> Self { + Self { + counter: 0, + elements: VecDeque::new(), + } + } +} + +impl CounterQueue { + /// Creates a new empty counter queue. + pub fn new() -> Self { + Self::default() + } + + /// Creates a new empty counter queue with the custom number of processed elements. + /// This method is used to restore the state of the queue. + pub fn new_with_count(counter: usize) -> Self { + Self { + counter, + ..Default::default() + } + } + + /// Inserts an element to the end of the queue. + pub fn push_back(&mut self, element: T) { + self.elements.push_back(element); + self.counter += 1; + } + + /// Attempts to take the next element from the queue. Returns `None` + /// if the queue is empty. + pub fn pop_front(&mut self) -> Option { + self.elements.pop_front() + } + + /// Returns the value of the counter. + pub fn get_count(&self) -> usize { + self.counter + } +} diff --git a/core/server/src/eth_sender/tx_queue/mod.rs b/core/server/src/eth_sender/tx_queue/mod.rs new file mode 100644 index 0000000000..18ca1a5f31 --- /dev/null +++ b/core/server/src/eth_sender/tx_queue/mod.rs @@ -0,0 +1,168 @@ +// TODO: Remove me +#![allow(dead_code)] + +use eth_client::SignedCallResult; + +use self::{counter_queue::CounterQueue, sparse_queue::SparseQueue}; + +mod counter_queue; +mod sparse_queue; + +#[derive(Debug)] +pub struct TxQueueBuilder { + max_pending_txs: usize, + sent_pending_txs: usize, + + commit_operations_count: usize, + verify_operations_count: usize, + withdraw_operations_count: usize, +} + +impl TxQueueBuilder { + pub fn new(max_pending_txs: usize) -> Self { + Self { + max_pending_txs, + sent_pending_txs: 0, + commit_operations_count: 0, + verify_operations_count: 0, + withdraw_operations_count: 0, + } + } + + pub fn with_sent_pending_txs(self, sent_pending_txs: usize) -> Self { + Self { + sent_pending_txs, + ..self + } + } + + pub fn with_commit_operations_count(self, commit_operations_count: usize) -> Self { + Self { + commit_operations_count, + ..self + } + } + + pub fn with_verify_operations_count(self, verify_operations_count: usize) -> Self { + Self { + verify_operations_count, + ..self + } + } + + pub fn with_withdraw_operations_count(self, withdraw_operations_count: usize) -> Self { + Self { + withdraw_operations_count, + ..self + } + } + + pub fn build(self) -> TxQueue { + TxQueue { + max_pending_txs: self.max_pending_txs, + sent_pending_txs: self.sent_pending_txs, + + commit_operations: CounterQueue::new_with_count(self.commit_operations_count), + verify_operations: SparseQueue::new_from(self.verify_operations_count), + withdraw_operations: CounterQueue::new_with_count(self.withdraw_operations_count), + } + } +} + +/// Transaction queue combines the underlying operations queues and determines +/// the transaction sending policy. It chooses the next operation to send out of +/// these queues, using the following rules: +/// +/// 1. If the amount of sent transactions is equal to the `MAX_PENDING_TXS` value, +/// no transaction is yielded until some of already sent ones are committed. +/// 2. If `commit` queue contains elements, and `verify` queue does not, the `commit` +/// operation is yielded. +/// 3. If `verify` queue contains elements, and `commit` operation with corresponding +/// ID is committed, the `verify` operation is yielded (meaning that `verify` operations +/// are prioritized unless the amount of sent `commit` and `verify` operations is equal: +/// if so, we should send the `commit` operation first). +/// 4. If both queues are empty, no operation is yielded. +/// +/// TODO: explain `withdraw` operations policy/priority. +#[derive(Debug)] +pub struct TxQueue { + max_pending_txs: usize, + sent_pending_txs: usize, + + commit_operations: CounterQueue, + verify_operations: SparseQueue, + withdraw_operations: CounterQueue, +} + +impl TxQueue { + /// Creates a new empty transactions queue. + pub fn new(max_pending_txs: usize) -> Self { + Self { + max_pending_txs, + sent_pending_txs: 0, + + commit_operations: CounterQueue::new(), + verify_operations: SparseQueue::new(), + withdraw_operations: CounterQueue::new(), + } + } + + /// Creates a new empty transactions queue with the custom expected next ID + /// for the `Verify` operations queue. + /// This method is used to restore the state of the queue. + pub fn new_from(max_pending_txs: usize, idx: usize) -> Self { + Self { + verify_operations: SparseQueue::new_from(idx), + ..Self::new(max_pending_txs) + } + } + + pub fn set_pending_txs_count(&mut self, sent_pending_txs: usize) { + self.sent_pending_txs = sent_pending_txs; + } + + pub fn add_commit_operation(&mut self, commit_operation: SignedCallResult) { + self.commit_operations.push_back(commit_operation); + } + + pub fn add_verify_operation(&mut self, block_idx: usize, verify_operation: SignedCallResult) { + self.verify_operations.insert(block_idx, verify_operation); + } + + pub fn add_withdraw_operation(&mut self, withdraw_operation: SignedCallResult) { + self.withdraw_operations.push_back(withdraw_operation); + } + + pub fn pop_front(&mut self) -> Option { + if self.sent_pending_txs >= self.max_pending_txs { + return None; + } + + // 1. Highest priority: verify operations. + + // If we've committed a corresponding `Commit` operation, and + // there is a pending `verify` operation, chose it. + let next_verify_op_id = self.verify_operations.next_id(); + if next_verify_op_id < self.commit_operations.get_count() + && self.verify_operations.has_next() + { + return Some(self.verify_operations.pop_front().unwrap()); + } + + // 2. After verify operations we should process withdraw operation. + + if let Some(withdraw_operation) = self.withdraw_operations.pop_front() { + return Some(withdraw_operation); + } + + // 3. Finally, check the commit queue. + + if let Some(commit_operation) = self.commit_operations.pop_front() { + return Some(commit_operation); + } + + // 4. There are no operations to process, return `None`. + + None + } +} diff --git a/core/server/src/eth_sender/tx_queue/sparse_queue.rs b/core/server/src/eth_sender/tx_queue/sparse_queue.rs new file mode 100644 index 0000000000..6746e19229 --- /dev/null +++ b/core/server/src/eth_sender/tx_queue/sparse_queue.rs @@ -0,0 +1,103 @@ +// TODO: Remove me +#![allow(dead_code)] + +use std::{collections::HashMap, fmt}; + +/// Sparse queue is a sparse queue which allows inserting an element +/// with index (N + 1) when element with index N is not yet inserted. +/// +/// Operation `pop_front` for this queue will not yield (N + 1) element +/// until the gap is filled, but once it's filled it will yield both +/// N and (N + 1) elements. +#[derive(Debug)] +pub struct SparseQueue { + current_idx: usize, + elements: HashMap, +} + +impl Default for SparseQueue { + fn default() -> Self { + Self { + current_idx: 0, + elements: HashMap::new(), + } + } +} + +impl SparseQueue { + /// Creates a new empty sparse queue. + pub fn new() -> Self { + Self::default() + } + + /// Creates a new empty sparse queue with the custom expected next ID. + /// This method is used to restore the state of the queue. + pub fn new_from(idx: usize) -> Self { + Self { + current_idx: idx, + ..Default::default() + } + } + + /// Inserts an element to the queue given its index. + pub fn insert(&mut self, idx: usize, element: T) { + assert!( + idx >= self.current_idx, + "Can't insert the element with index lower than the next expected one" + ); + self.elements.insert(idx, element); + } + + /// Attempts to take the next element from the queue. Returns `None` + /// if either the queue is empty, or the next expected element is yet + /// missing in the queue. + pub fn pop_front(&mut self) -> Option { + match self.elements.remove(&self.current_idx) { + Some(value) => { + self.current_idx += 1; + Some(value) + } + None => None, + } + } + + /// Checks whether `pop_front` operation will return an element or not. + /// Returns `true` if the next expected element exists in the queue, + /// and returns `false` otherwise. + pub fn has_next(&self) -> bool { + self.elements.contains_key(&self.current_idx) + } + + /// Returns the next expected element ID. + pub fn next_id(&self) -> usize { + self.current_idx + } +} + +#[cfg(test)] +mod tests { + use super::*; + + /// Checks the main operations of the queue: `insert`, `pop_front` and `has_next`. + #[test] + fn basic_operations() { + let mut queue: SparseQueue = SparseQueue::new(); + + // Insert the next element and obtain it. + queue.insert(0, "zero".into()); + assert!(queue.has_next()); + assert_eq!(queue.pop_front().unwrap(), "zero"); + + // Now insert an element with a gap, and check that it won't be yielded. + queue.insert(2, "two".into()); + assert!(!queue.has_next()); + assert!(queue.pop_front().is_none()); + + // Now fill the gap and obtain both elements. + queue.insert(1, "one".into()); + assert!(queue.has_next()); + assert_eq!(queue.pop_front().unwrap(), "one"); + assert!(queue.has_next()); + assert_eq!(queue.pop_front().unwrap(), "two"); + } +} From 946b7410894732f5d8c829732935683d603908d4 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 18 Mar 2020 16:29:07 +0300 Subject: [PATCH 038/186] Add more tests for queues --- .../src/eth_sender/tx_queue/counter_queue.rs | 43 ++++++++++++++++++- core/server/src/eth_sender/tx_queue/mod.rs | 28 ++++++------ .../src/eth_sender/tx_queue/sparse_queue.rs | 25 +++++++++++ 3 files changed, 81 insertions(+), 15 deletions(-) diff --git a/core/server/src/eth_sender/tx_queue/counter_queue.rs b/core/server/src/eth_sender/tx_queue/counter_queue.rs index ecd79300b4..afb447da3d 100644 --- a/core/server/src/eth_sender/tx_queue/counter_queue.rs +++ b/core/server/src/eth_sender/tx_queue/counter_queue.rs @@ -38,13 +38,20 @@ impl CounterQueue { /// Inserts an element to the end of the queue. pub fn push_back(&mut self, element: T) { self.elements.push_back(element); - self.counter += 1; } /// Attempts to take the next element from the queue. Returns `None` /// if the queue is empty. + /// + /// Taking the actual value updates the counter. pub fn pop_front(&mut self) -> Option { - self.elements.pop_front() + match self.elements.pop_front() { + Some(element) => { + self.counter += 1; + Some(element) + } + None => None, + } } /// Returns the value of the counter. @@ -52,3 +59,35 @@ impl CounterQueue { self.counter } } + +#[cfg(test)] +mod tests { + use super::*; + + /// Checks the main operations of the queue: `push_back`, `pop_front` and `get_count`. + #[test] + fn basic_operations() { + let mut queue: CounterQueue = CounterQueue::new(); + + // Check that by default the current count is 0. + assert_eq!(queue.get_count(), 0); + + // Insert the next element and obtain it. + queue.push_back("one".into()); + // Inserting the element should NOT update the counter. + assert_eq!(queue.get_count(), 0); + assert_eq!(queue.pop_front().unwrap(), "one"); + // After taking the element, the counter should be updated. + assert_eq!(queue.get_count(), 1); + + // Perform the same check again and check that overall counter will become 2. + queue.push_back("two".into()); + assert_eq!(queue.get_count(), 1); + assert_eq!(queue.pop_front().unwrap(), "two"); + assert_eq!(queue.get_count(), 2); + + // Now attempt take no value, and check that counter is not increased. + assert_eq!(queue.pop_front(), None); + assert_eq!(queue.get_count(), 2); + } +} diff --git a/core/server/src/eth_sender/tx_queue/mod.rs b/core/server/src/eth_sender/tx_queue/mod.rs index 18ca1a5f31..61290b1395 100644 --- a/core/server/src/eth_sender/tx_queue/mod.rs +++ b/core/server/src/eth_sender/tx_queue/mod.rs @@ -75,20 +75,21 @@ impl TxQueueBuilder { /// /// 1. If the amount of sent transactions is equal to the `MAX_PENDING_TXS` value, /// no transaction is yielded until some of already sent ones are committed. -/// 2. If `commit` queue contains elements, and `verify` queue does not, the `commit` -/// operation is yielded. -/// 3. If `verify` queue contains elements, and `commit` operation with corresponding -/// ID is committed, the `verify` operation is yielded (meaning that `verify` operations -/// are prioritized unless the amount of sent `commit` and `verify` operations is equal: -/// if so, we should send the `commit` operation first). -/// 4. If both queues are empty, no operation is yielded. -/// -/// TODO: explain `withdraw` operations policy/priority. +/// 2. Otherwise, transactions are yielded according to the following policy: +/// - If `verify` queue contains elements, and `commit` operation with corresponding +/// ID is committed, the `verify` operation is yielded (meaning that `verify` operations +/// are prioritized unless the amount of sent `commit` and `verify` operations is equal: +/// if so, we should send the `commit` operation first). +/// - Otherwise, if `withdraw` queue contains elements, a `withdraw` operation is yielded. +/// - Otherwise, if `commit` queue is not empty, a `commit` operation is yielded. +/// 3. If all the queues are empty, no operation is returned. #[derive(Debug)] pub struct TxQueue { max_pending_txs: usize, sent_pending_txs: usize, + // TODO: SignedCallResult isn't appropriate, since it means an assigned nonce. We don't want + // to assign nonce until the actual tx send. commit_operations: CounterQueue, verify_operations: SparseQueue, withdraw_operations: CounterQueue, @@ -117,22 +118,23 @@ impl TxQueue { } } - pub fn set_pending_txs_count(&mut self, sent_pending_txs: usize) { - self.sent_pending_txs = sent_pending_txs; - } - + /// Adds the `commit` operation to the queue. pub fn add_commit_operation(&mut self, commit_operation: SignedCallResult) { self.commit_operations.push_back(commit_operation); } + /// Adds the `verify` operation to the queue. pub fn add_verify_operation(&mut self, block_idx: usize, verify_operation: SignedCallResult) { self.verify_operations.insert(block_idx, verify_operation); } + /// Adds the `withdraw` operation to the queue. pub fn add_withdraw_operation(&mut self, withdraw_operation: SignedCallResult) { self.withdraw_operations.push_back(withdraw_operation); } + /// Gets the next transaction to send, according to the transaction sending policy. + /// For details, see the structure doc-comment. pub fn pop_front(&mut self) -> Option { if self.sent_pending_txs >= self.max_pending_txs { return None; diff --git a/core/server/src/eth_sender/tx_queue/sparse_queue.rs b/core/server/src/eth_sender/tx_queue/sparse_queue.rs index 6746e19229..4bb82c1fe8 100644 --- a/core/server/src/eth_sender/tx_queue/sparse_queue.rs +++ b/core/server/src/eth_sender/tx_queue/sparse_queue.rs @@ -100,4 +100,29 @@ mod tests { assert!(queue.has_next()); assert_eq!(queue.pop_front().unwrap(), "two"); } + + /// Checks that we can use the difference `current_idx` as the custom + /// queue start point. + #[test] + fn different_start_point() { + let mut queue: SparseQueue = SparseQueue::new_from(10); + + // Check that by default the queue is empty. + assert!(!queue.has_next()); + + // Insert the next element and obtain it. + queue.insert(10, "ten".into()); + assert!(queue.has_next()); + assert_eq!(queue.pop_front().unwrap(), "ten"); + } + + /// Checks that attempt to add the element with the index lower than + /// the current expected ID results in panic. + #[test] + #[should_panic] + fn add_out_of_order_element() { + let mut queue: SparseQueue = SparseQueue::new_from(10); + // Insert the element with too low index. + queue.insert(0, "zero".into()); + } } From 403d4c77574fde8ae8dcfc016799a8854711e587 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 19 Mar 2020 08:10:32 +0300 Subject: [PATCH 039/186] Add a nonce table to the storage --- .../2020-03-19-042712_add_eth_nonce/down.sql | 2 ++ .../2020-03-19-042712_add_eth_nonce/up.sql | 6 ++++ core/storage/src/ethereum/mod.rs | 33 ++++++++++++++++++- core/storage/src/ethereum/records.rs | 13 ++++++++ core/storage/src/schema.rs | 8 +++++ 5 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 core/storage/migrations/2020-03-19-042712_add_eth_nonce/down.sql create mode 100644 core/storage/migrations/2020-03-19-042712_add_eth_nonce/up.sql diff --git a/core/storage/migrations/2020-03-19-042712_add_eth_nonce/down.sql b/core/storage/migrations/2020-03-19-042712_add_eth_nonce/down.sql new file mode 100644 index 0000000000..c932806e59 --- /dev/null +++ b/core/storage/migrations/2020-03-19-042712_add_eth_nonce/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE IF EXISTS eth_nonce CASCADE; \ No newline at end of file diff --git a/core/storage/migrations/2020-03-19-042712_add_eth_nonce/up.sql b/core/storage/migrations/2020-03-19-042712_add_eth_nonce/up.sql new file mode 100644 index 0000000000..c50f650707 --- /dev/null +++ b/core/storage/migrations/2020-03-19-042712_add_eth_nonce/up.sql @@ -0,0 +1,6 @@ +-- Your SQL goes here +CREATE TABLE eth_nonce ( + -- enforce single record + id bool PRIMARY KEY NOT NULL DEFAULT true, + nonce BIGINT NOT NULL +); diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 405069fa09..850ae5534d 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -8,7 +8,7 @@ use web3::types::H256; // Workspace imports use models::Operation; // Local imports -use self::records::{NewETHOperation, StorageETHOperation}; +use self::records::{ETHNonce, NewETHNonce, NewETHOperation, StorageETHOperation}; use crate::chain::operations::records::StoredOperation; use crate::schema::*; use crate::StorageProcessor; @@ -127,4 +127,35 @@ impl<'a> EthereumSchema<'a> { .map(drop) }) } + + pub fn get_next_nonce(&self) -> QueryResult { + let nonce: Option = eth_nonce::table.first(self.0.conn()).optional()?; + + let old_nonce_value = if let Some(old_nonce) = nonce { + // There is a stored nonce. We take its value and update the entry with a new nonce. + let new_nonce_value = old_nonce.nonce + 1; + + update(eth_nonce::table.filter(eth_nonce::id.eq(true))) + .set(eth_nonce::nonce.eq(new_nonce_value)) + .execute(self.0.conn())?; + + old_nonce.nonce + } else { + // There is no stored value. We start with 0, and store the incremented nonce (1). + let old_nonce_value = 0; + let new_nonce_value = old_nonce_value + 1; + let new_nonce = NewETHNonce { + nonce: new_nonce_value, + }; + + insert_into(eth_nonce::table) + .values(new_nonce) + .execute(self.0.conn()) + .map(drop)?; + + old_nonce_value + }; + + Ok(old_nonce_value) + } } diff --git a/core/storage/src/ethereum/records.rs b/core/storage/src/ethereum/records.rs index 71e67af275..2ff2543285 100644 --- a/core/storage/src/ethereum/records.rs +++ b/core/storage/src/ethereum/records.rs @@ -27,3 +27,16 @@ pub struct NewETHOperation { pub tx_hash: Vec, pub raw_tx: Vec, } + +#[derive(Debug, Insertable, PartialEq)] +#[table_name = "eth_nonce"] +pub struct NewETHNonce { + pub nonce: i64, +} + +#[derive(Debug, Queryable, QueryableByName, PartialEq)] +#[table_name = "eth_nonce"] +pub struct ETHNonce { + pub id: bool, + pub nonce: i64, +} diff --git a/core/storage/src/schema.rs b/core/storage/src/schema.rs index 74f8b16cce..7b8fb28eeb 100644 --- a/core/storage/src/schema.rs +++ b/core/storage/src/schema.rs @@ -82,6 +82,13 @@ table! { } } +table! { + eth_nonce (id) { + id -> Bool, + nonce -> Int8, + } +} + table! { eth_operations (id) { id -> Int8, @@ -221,6 +228,7 @@ allow_tables_to_appear_in_same_query!( balances, blocks, data_restore_last_watched_eth_block, + eth_nonce, eth_operations, events_state, executed_priority_operations, From fa9d57b0194ae76cdcf26a2b3ff325dbccc8a81f Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 19 Mar 2020 08:13:40 +0300 Subject: [PATCH 040/186] Add a test for the new eth_nonce table --- core/storage/src/tests/ethereum.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index 7ac642d755..5aa7c23629 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -152,3 +152,19 @@ fn ethereum_storage() { Ok(()) }); } + +/// Check that stored nonce starts with 0 and is incremented after every getting. +#[test] +#[cfg_attr(not(feature = "db_test"), ignore)] +fn eth_nonce() { + let conn = StorageProcessor::establish_connection().unwrap(); + db_test(conn.conn(), || { + for expected_next_nonce in 0..5 { + let actual_next_nonce = EthereumSchema(&conn).get_next_nonce()?; + + assert_eq!(actual_next_nonce, expected_next_nonce); + } + + Ok(()) + }); +} From 35342eefcb24f24e935c650ee8a1fbeb914e5d53 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 19 Mar 2020 09:02:47 +0300 Subject: [PATCH 041/186] eth_client: Add a method to encode tx data without signing --- core/eth_client/src/lib.rs | 70 ++++++++++++++++++++++++++++---------- 1 file changed, 52 insertions(+), 18 deletions(-) diff --git a/core/eth_client/src/lib.rs b/core/eth_client/src/lib.rs index 072c270b26..0359fc92cb 100644 --- a/core/eth_client/src/lib.rs +++ b/core/eth_client/src/lib.rs @@ -49,47 +49,60 @@ impl ETHClient { } } - pub async fn current_nonce(&self) -> Result { + /// Returns the next *expected* nonce with respect to the transactions + /// in the mempool. + /// + /// Note that this method may be inconsistent if used with a cluster of nodes + /// (e.g. `infura`), since the consecutive tx send and attempt to get a pending + /// nonce may be routed to the different nodes in cluster, and the latter node + /// may not know about the send tx yet. Thus it is not recommended to rely on this + /// method as on the trusted source of the latest nonce. + pub async fn pending_nonce(&self) -> Result { self.web3 .eth() - .transaction_count(self.sender_account, Some(BlockNumber::Latest)) + .transaction_count(self.sender_account, Some(BlockNumber::Pending)) .compat() .await } - pub async fn block_number(&self) -> Result { - self.web3.eth().block_number().compat().await - } - - pub async fn pending_nonce(&self) -> Result { + /// Returns the account nonce based on the last *mined* block. Not mined transactions + /// (which are in mempool yet) are not taken into account by this method. + pub async fn current_nonce(&self) -> Result { self.web3 .eth() - .transaction_count(self.sender_account, Some(BlockNumber::Pending)) + .transaction_count(self.sender_account, Some(BlockNumber::Latest)) .compat() .await } + pub async fn block_number(&self) -> Result { + self.web3.eth().block_number().compat().await + } + pub async fn get_gas_price(&self) -> Result { let mut network_gas_price = self.web3.eth().gas_price().compat().await?; network_gas_price *= U256::from(self.gas_price_factor); Ok(network_gas_price) } - /// Fills in gas/nonce if not supplied inside options. - pub async fn sign_call_tx( - &self, - func: &str, - params: P, - options: Options, - ) -> Result { + /// Encodes the transaction data (smart contract method and its input) to the bytes + /// without creating an actual transaction. + pub fn encode_tx_data(&self, func: &str, params: P) -> Vec { let f = self .contract .function(func) .expect("failed to get function parameters"); - let data = f - .encode_input(¶ms.into_tokens()) - .expect("failed to encode parameters"); + f.encode_input(¶ms.into_tokens()) + .expect("failed to encode parameters") + } + /// Signs the transaction given the previously encoded data. + /// Fills in gas/nonce if not supplied inside options. + pub async fn sign_prepared_tx( + &self, + data: Vec, + options: Options, + ) -> Result { // fetch current gas_price let gas_price = match options.gas_price { Some(gas_price) => gas_price, @@ -128,6 +141,27 @@ impl ETHClient { }) } + /// Encodes the transaction data and signs the transaction. + /// Fills in gas/nonce if not supplied inside options. + pub async fn sign_call_tx( + &self, + func: &str, + params: P, + options: Options, + ) -> Result { + let f = self + .contract + .function(func) + .expect("failed to get function parameters"); + let data = f + .encode_input(¶ms.into_tokens()) + .expect("failed to encode parameters"); + + self.sign_prepared_tx(data, options).await + } + + /// Sends the transaction to the Ethereum blockchain. + /// Transaction is expected to be encoded as the byte sequence. pub async fn send_raw_tx(&self, tx: Vec) -> Result { Ok(self .web3 From 6c1d96fdf5a43f06a69de68cc9ef0497199aa8cd Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 19 Mar 2020 09:28:28 +0300 Subject: [PATCH 042/186] Fix some bugs & add a basic test for the TxQueue --- core/server/src/eth_sender/tx_queue/mod.rs | 131 +++++++++++++++++++-- 1 file changed, 118 insertions(+), 13 deletions(-) diff --git a/core/server/src/eth_sender/tx_queue/mod.rs b/core/server/src/eth_sender/tx_queue/mod.rs index 61290b1395..e0c90d894d 100644 --- a/core/server/src/eth_sender/tx_queue/mod.rs +++ b/core/server/src/eth_sender/tx_queue/mod.rs @@ -1,13 +1,17 @@ // TODO: Remove me #![allow(dead_code)] -use eth_client::SignedCallResult; - use self::{counter_queue::CounterQueue, sparse_queue::SparseQueue}; mod counter_queue; mod sparse_queue; +pub type RawTxData = Vec; + +/// `TxQueueBuilder` is a structure aiming to simplify the process +/// of restoring of the `TxQueue` state after restart. +/// This structure allows to configure the sub-queues state (amount of processed +/// operations). #[derive(Debug)] pub struct TxQueueBuilder { max_pending_txs: usize, @@ -19,6 +23,7 @@ pub struct TxQueueBuilder { } impl TxQueueBuilder { + /// Initializes queue building process. pub fn new(max_pending_txs: usize) -> Self { Self { max_pending_txs, @@ -29,6 +34,7 @@ impl TxQueueBuilder { } } + /// Sets the amount of transactions sent to the Ethereum blockchain, but not confirmed yet. pub fn with_sent_pending_txs(self, sent_pending_txs: usize) -> Self { Self { sent_pending_txs, @@ -36,6 +42,7 @@ impl TxQueueBuilder { } } + /// Sets the amount of operations sent for the `commit` queue. pub fn with_commit_operations_count(self, commit_operations_count: usize) -> Self { Self { commit_operations_count, @@ -43,6 +50,7 @@ impl TxQueueBuilder { } } + /// Sets the amount of operations sent for the `pending` queue. pub fn with_verify_operations_count(self, verify_operations_count: usize) -> Self { Self { verify_operations_count, @@ -50,6 +58,7 @@ impl TxQueueBuilder { } } + /// Sets the amount of operations sent for the `withdraw` queue. pub fn with_withdraw_operations_count(self, withdraw_operations_count: usize) -> Self { Self { withdraw_operations_count, @@ -57,6 +66,7 @@ impl TxQueueBuilder { } } + /// Finishes the queue building process. pub fn build(self) -> TxQueue { TxQueue { max_pending_txs: self.max_pending_txs, @@ -88,11 +98,9 @@ pub struct TxQueue { max_pending_txs: usize, sent_pending_txs: usize, - // TODO: SignedCallResult isn't appropriate, since it means an assigned nonce. We don't want - // to assign nonce until the actual tx send. - commit_operations: CounterQueue, - verify_operations: SparseQueue, - withdraw_operations: CounterQueue, + commit_operations: CounterQueue, + verify_operations: SparseQueue, + withdraw_operations: CounterQueue, } impl TxQueue { @@ -119,27 +127,40 @@ impl TxQueue { } /// Adds the `commit` operation to the queue. - pub fn add_commit_operation(&mut self, commit_operation: SignedCallResult) { + pub fn add_commit_operation(&mut self, commit_operation: RawTxData) { self.commit_operations.push_back(commit_operation); } /// Adds the `verify` operation to the queue. - pub fn add_verify_operation(&mut self, block_idx: usize, verify_operation: SignedCallResult) { + pub fn add_verify_operation(&mut self, block_idx: usize, verify_operation: RawTxData) { self.verify_operations.insert(block_idx, verify_operation); } /// Adds the `withdraw` operation to the queue. - pub fn add_withdraw_operation(&mut self, withdraw_operation: SignedCallResult) { + pub fn add_withdraw_operation(&mut self, withdraw_operation: RawTxData) { self.withdraw_operations.push_back(withdraw_operation); } /// Gets the next transaction to send, according to the transaction sending policy. /// For details, see the structure doc-comment. - pub fn pop_front(&mut self) -> Option { + pub fn pop_front(&mut self) -> Option { if self.sent_pending_txs >= self.max_pending_txs { return None; } + // Get the next operation and increment the sent counter if needed. + match self.get_next_operation() { + Some(op) => { + self.sent_pending_txs += 1; + Some(op) + } + None => None, + } + } + + /// Obtains the next operation from the underlying queues. + /// This method does not use/affect `sent_pending_tx` counter. + fn get_next_operation(&mut self) -> Option { // 1. Highest priority: verify operations. // If we've committed a corresponding `Commit` operation, and @@ -153,8 +174,11 @@ impl TxQueue { // 2. After verify operations we should process withdraw operation. - if let Some(withdraw_operation) = self.withdraw_operations.pop_front() { - return Some(withdraw_operation); + // We don't want to be ahead of the last verify operation. + if self.withdraw_operations.get_count() < next_verify_op_id { + if let Some(withdraw_operation) = self.withdraw_operations.pop_front() { + return Some(withdraw_operation); + } } // 3. Finally, check the commit queue. @@ -167,4 +191,85 @@ impl TxQueue { None } + + /// Notifies the queue about the transaction being confirmed on the Ethereum blockchain. + /// Decrements the amount of transactions "in the fly". + pub fn report_commitment(&mut self) { + assert!( + self.sent_pending_txs > 0, + "No transactions are expected to be confirmed" + ); + + self.sent_pending_txs -= 1; + } +} + +#[cfg(test)] +mod tests { + use super::*; + + /// Checks the basic workflow of the queue including adding several operations + /// and retrieving them later. + #[test] + fn basic_operations() { + const MAX_IN_FLY: usize = 3; + const COMMIT_MARK: u8 = 0; + const VERIFY_MARK: u8 = 1; + const WITHDRAW_MARK: u8 = 2; + + let mut queue = TxQueue::new(MAX_IN_FLY); + + // Add 2 commit, 2 verify and 2 withdraw operations. + queue.add_commit_operation(vec![COMMIT_MARK, 0]); + queue.add_commit_operation(vec![COMMIT_MARK, 1]); + queue.add_verify_operation(0, vec![VERIFY_MARK, 0]); + queue.add_verify_operation(1, vec![VERIFY_MARK, 1]); + queue.add_withdraw_operation(vec![WITHDRAW_MARK, 0]); + queue.add_withdraw_operation(vec![WITHDRAW_MARK, 1]); + + // Retrieve the next {MAX_IN_FLY} operations. + + // The first operation should be `commit`, since we can't send `verify` before the commitment. + let op_1 = queue.pop_front().unwrap(); + assert_eq!(op_1, vec![COMMIT_MARK, 0]); + + // The second operation should be `verify`, since it has the highest priority. + let op_2 = queue.pop_front().unwrap(); + assert_eq!(op_2, vec![VERIFY_MARK, 0]); + + // The third operation should be `withdraw`, since it has higher priority than `commit`, and we can't + // send the `verify` before the corresponding `commit` operation. + let op_3 = queue.pop_front().unwrap(); + assert_eq!(op_3, vec![WITHDRAW_MARK, 0]); + + // After that we have {MAX_IN_FLY} operations, and `pop_front` should yield nothing. + assert_eq!(queue.pop_front(), None); + + // Report that one operation is completed. + queue.report_commitment(); + + // Now we should obtain the next commit operation. + let op_4 = queue.pop_front().unwrap(); + assert_eq!(op_4, vec![COMMIT_MARK, 1]); + + // The limit should be met again, and nothing more should be yielded. + assert_eq!(queue.pop_front(), None); + + // Report the remaining three operations as completed. + assert_eq!(queue.sent_pending_txs, MAX_IN_FLY); + for _ in 0..MAX_IN_FLY { + queue.report_commitment(); + } + assert_eq!(queue.sent_pending_txs, 0); + + // Pop remaining operations. + let op_5 = queue.pop_front().unwrap(); + assert_eq!(op_5, vec![VERIFY_MARK, 1]); + + let op_6 = queue.pop_front().unwrap(); + assert_eq!(op_6, vec![WITHDRAW_MARK, 1]); + + // Though the limit is not met (2 txs in fly, and limit is 3), there should be no txs in the queue. + assert_eq!(queue.pop_front(), None); + } } From 7ac77380550d58e74e79a536d1bd271418e33431 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 19 Mar 2020 10:35:09 +0300 Subject: [PATCH 043/186] Use the database nonce in eth_sender --- core/server/src/eth_sender/database.rs | 8 ++ .../src/eth_sender/ethereum_interface.rs | 7 -- core/server/src/eth_sender/mod.rs | 23 +++-- core/server/src/eth_sender/tests/mock.rs | 51 +++++++--- core/server/src/eth_sender/tests/mod.rs | 99 +++++++++---------- 5 files changed, 107 insertions(+), 81 deletions(-) diff --git a/core/server/src/eth_sender/database.rs b/core/server/src/eth_sender/database.rs index 81d2e0bad9..8a7fcf56ed 100644 --- a/core/server/src/eth_sender/database.rs +++ b/core/server/src/eth_sender/database.rs @@ -24,6 +24,9 @@ pub(super) trait DatabaseAccess { /// Marks an operation as completed in the database. fn confirm_operation(&self, hash: &H256) -> Result<(), failure::Error>; + + /// Gets the next nonce to use from the database. + fn next_nonce(&self) -> Result; } /// The actual database wrapper. @@ -74,4 +77,9 @@ impl DatabaseAccess for Database { let storage = self.db_pool.access_storage()?; Ok(storage.ethereum_schema().confirm_eth_tx(hash)?) } + + fn next_nonce(&self) -> Result { + let storage = self.db_pool.access_storage()?; + Ok(storage.ethereum_schema().get_next_nonce()?) + } } diff --git a/core/server/src/eth_sender/ethereum_interface.rs b/core/server/src/eth_sender/ethereum_interface.rs index 5160333979..a1236cf845 100644 --- a/core/server/src/eth_sender/ethereum_interface.rs +++ b/core/server/src/eth_sender/ethereum_interface.rs @@ -36,9 +36,6 @@ pub(super) trait EthereumInterface { /// Gets the current gas price. fn gas_price(&self) -> Result; - /// Gets the current nonce to be used in the transactions. - fn current_nonce(&self) -> Result; - /// Sends a signed transaction to the Ethereum blockchain. fn send_tx(&self, signed_tx: &SignedCallResult) -> Result<(), failure::Error>; @@ -135,10 +132,6 @@ impl EthereumInterface for EthereumHttpClient { block_on(self.eth_client.get_gas_price()) } - fn current_nonce(&self) -> Result { - block_on(self.eth_client.current_nonce()).map_err(From::from) - } - fn sign_call_tx( &self, func: &str, diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index c8b89ef7dd..4788e6344a 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -338,7 +338,10 @@ impl ETHSender { let tx_options = if let Some(stuck_tx) = stuck_tx { self.tx_options_from_stuck_tx(stuck_tx)? } else { - Options::default() + let mut options = Options::default(); + let nonce = self.db.next_nonce()?; + options.nonce = Some(nonce.into()); + options }; let signed_tx = self.sign_operation_tx(op, tx_options)?; @@ -367,20 +370,16 @@ impl ETHSender { U256::from_dec_str(&stuck_tx.signed_tx.gas_price.to_string()).unwrap(); let new_gas_price = self.scale_gas(old_tx_gas_price)?; - let new_nonce = self.ethereum.current_nonce()?; + let nonce = stuck_tx.signed_tx.nonce; info!( - "Replacing tx: hash: {:#x}, old_gas: {}, new_gas: {}, old_nonce: {}, new_nonce: {}", - stuck_tx.signed_tx.hash, - old_tx_gas_price, - new_gas_price, - stuck_tx.signed_tx.nonce, - new_nonce + "Replacing tx: hash: {:#x}, old_gas: {}, new_gas: {}, used nonce: {}", + stuck_tx.signed_tx.hash, old_tx_gas_price, new_gas_price, nonce ); Ok(Options::with(move |opt| { opt.gas_price = Some(new_gas_price); - opt.nonce = Some(new_nonce); + opt.nonce = Some(nonce); })) } @@ -436,12 +435,16 @@ impl ETHSender { fn call_complete_withdrawals(&self) -> Result<(), failure::Error> { // function completeWithdrawals(uint32 _n) external { + let mut options = Options::default(); + let nonce = self.db.next_nonce()?; + options.nonce = Some(nonce.into()); + let tx = self .ethereum .sign_call_tx( "completeWithdrawals", config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, - Options::default(), + options, ) .map_err(|e| failure::format_err!("completeWithdrawals: {}", e))?; info!("Sending completeWithdrawals tx with hash: {:#?}", tx.hash); diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 9a7db1d695..39b2806f8e 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -1,7 +1,7 @@ //! Mocking utilities for tests. // Built-in deps -use std::cell::RefCell; +use std::cell::{Cell, RefCell}; use std::collections::{HashMap, VecDeque}; // External uses use futures::channel::mpsc; @@ -24,6 +24,7 @@ pub(super) struct MockDatabase { restore_state: VecDeque, unconfirmed_operations: RefCell>, confirmed_operations: RefCell>, + nonce: Cell, } impl MockDatabase { @@ -108,13 +109,20 @@ impl DatabaseAccess for MockDatabase { Ok(()) } + + fn next_nonce(&self) -> Result { + let old_value = self.nonce.get(); + let new_value = old_value + 1; + self.nonce.set(new_value); + + Ok(old_value) + } } /// Mock Ethereum client is capable of recording all the incoming requests for the further analysis. #[derive(Debug)] pub(super) struct MockEthereum { pub block_number: u64, - pub nonce: U256, pub gas_price: U256, pub tx_statuses: RefCell>, pub sent_txs: RefCell>, @@ -124,7 +132,6 @@ impl Default for MockEthereum { fn default() -> Self { Self { block_number: 1, - nonce: Default::default(), gas_price: 100.into(), tx_statuses: Default::default(), sent_txs: Default::default(), @@ -174,7 +181,6 @@ impl MockEthereum { /// as a success. pub fn add_successfull_execution(&mut self, tx: &TransactionETHState, confirmations: u64) { self.block_number += confirmations; - self.nonce += 1.into(); let status = ExecutedTxStatus { confirmations, @@ -189,7 +195,6 @@ impl MockEthereum { /// Same as `add_successfull_execution`, but marks the transaction as a failure. pub fn add_failed_execution(&mut self, tx: &TransactionETHState, confirmations: u64) { self.block_number += confirmations; - self.nonce += 1.into(); let status = ExecutedTxStatus { confirmations, @@ -202,7 +207,10 @@ impl MockEthereum { } /// Replicates the `ETHCLient::sign_operation_tx` method for testing. - pub fn create_signed_tx_replica(&self, op: &Operation) -> SignedCallResult { + pub fn create_signed_tx_replica(&self, op: &Operation, nonce: i64) -> SignedCallResult { + let mut options = Options::default(); + options.nonce = Some(nonce.into()); + match &op.action { Action::Commit => { let root = op.block.get_eth_encoded_root(); @@ -218,7 +226,7 @@ impl MockEthereum { witness_data.0, witness_data.1, ), - Options::default(), + options, ) .unwrap() } @@ -226,7 +234,7 @@ impl MockEthereum { .sign_call_tx( "verifyBlock", (u64::from(op.block.block_number), *proof.clone()), - Options::default(), + options, ) .unwrap(), } @@ -246,10 +254,6 @@ impl EthereumInterface for MockEthereum { Ok(self.gas_price) } - fn current_nonce(&self) -> Result { - Ok(self.nonce) - } - fn send_tx(&self, signed_tx: &SignedCallResult) -> Result<(), failure::Error> { self.sent_txs .borrow_mut() @@ -265,7 +269,7 @@ impl EthereumInterface for MockEthereum { options: Options, ) -> Result { let gas_price = options.gas_price.unwrap_or(self.gas_price); - let nonce = options.nonce.unwrap_or(self.nonce); + let nonce = options.nonce.expect("Nonce must be set for every tx"); // Nonce and gas_price are appended to distinguish the same transactions // with different gas by their hash in tests. @@ -314,3 +318,24 @@ pub(super) fn restored_eth_sender( notify_receiver, ) } + +/// Behaves the same as `ETHSender::sign_new_tx`, but does not affect nonce. +/// This method should be used to create expected tx copies which won't affect +/// the internal `ETHSender` state. +pub(super) fn create_signed_tx( + eth_sender: ÐSender, + operation: &Operation, + deadline_block: u64, + nonce: i64, +) -> TransactionETHState { + let mut options = Options::default(); + options.nonce = Some(nonce.into()); + + let signed_tx = eth_sender.sign_operation_tx(operation, options).unwrap(); + + TransactionETHState { + op_id: operation.id.unwrap(), + deadline_block, + signed_tx, + } +} diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index 3060dba82c..9c9e3b84af 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -1,5 +1,7 @@ +// External uses +use web3::contract::Options; // Local uses -use self::mock::{default_eth_sender, restored_eth_sender}; +use self::mock::{create_signed_tx, default_eth_sender, restored_eth_sender}; use super::{ ethereum_interface::EthereumInterface, transactions::{ExecutedTxStatus, OperationETHState, TransactionETHState, TxCheckOutcome}, @@ -62,7 +64,7 @@ fn tx_creation() { test_data::verify_operation(0), ]; - for operation in operations { + for (nonce, operation) in operations.iter().enumerate() { let actual_tx = eth_sender .sign_new_tx( &operation, @@ -70,7 +72,9 @@ fn tx_creation() { None, ) .unwrap(); - let expected_tx = eth_sender.ethereum.create_signed_tx_replica(&operation); + let expected_tx = eth_sender + .ethereum + .create_signed_tx_replica(&operation, nonce as i64); assert_eq!(actual_tx.signed_tx, expected_tx); } } @@ -201,7 +205,7 @@ fn operation_commitment_workflow() { let verify_operation_id = operations[1].id; - for operation in operations { + for (nonce, operation) in operations.iter().enumerate() { // Send an operation to `ETHSender`. sender.try_send(operation.clone()).unwrap(); @@ -210,13 +214,8 @@ fn operation_commitment_workflow() { eth_sender.proceed_next_operation(); // Now we should see that transaction is stored in the database and sent to the Ethereum. - let expected_tx = eth_sender - .sign_new_tx( - &operation, - eth_sender.get_deadline_block(eth_sender.ethereum.block_number), - None, - ) - .unwrap(); + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); eth_sender.db.assert_stored(&expected_tx); eth_sender.ethereum.assert_sent(&expected_tx); @@ -235,12 +234,15 @@ fn operation_commitment_workflow() { } // Check that `completeWithdrawals` transaction is sent. + let mut options = Options::default(); + let nonce = operations.len().into(); + options.nonce = Some(nonce); let tx = eth_sender .ethereum .sign_call_tx( "completeWithdrawals", models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, - Default::default(), + options, ) .unwrap(); eth_sender.ethereum.assert_sent_by_hash(&tx.hash); @@ -268,13 +270,9 @@ fn stuck_transaction() { eth_sender.retrieve_operations(); eth_sender.proceed_next_operation(); - let stuck_tx = eth_sender - .sign_new_tx( - &operation, - eth_sender.get_deadline_block(eth_sender.ethereum.block_number), - None, - ) - .unwrap(); + let nonce = 0; + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); // Skip some blocks and expect sender to send a new tx. eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; @@ -312,27 +310,39 @@ fn operations_order() { // We send multiple the operations at once to the channel. let operations_count = 3; let mut operations = Vec::new(); - operations.extend_from_slice(&test_data::COMMIT_OPERATIONS[..operations_count]); - operations.extend_from_slice(&test_data::VERIFY_OPERATIONS[..operations_count]); + let commit_operations = &test_data::COMMIT_OPERATIONS[..operations_count]; + let verify_operations = &test_data::VERIFY_OPERATIONS[..operations_count]; + operations.extend_from_slice(commit_operations); + operations.extend_from_slice(verify_operations); // Also we create the list of expected transactions. let mut expected_txs = Vec::new(); - for (idx, operation) in operations.iter().enumerate() { + // Create expected txs from the commit operations. + for (idx, operation) in commit_operations.iter().enumerate() { // We start from the 1 block, and step logic is: // N blocks to confirm, repeated `idx` times. let start_block = 1 + super::WAIT_CONFIRMATIONS * idx as u64; - let expected_tx = eth_sender - .sign_new_tx(operation, eth_sender.get_deadline_block(start_block), None) - .unwrap(); + let deadline_block = eth_sender.get_deadline_block(start_block); + let nonce = idx; - // Update nonce as well (it will be reset below). - eth_sender.ethereum.nonce += 1.into(); + let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); expected_txs.push(expected_tx); } - // Reset nonce (it was affected by creating expected transactions). - eth_sender.ethereum.nonce = 0.into(); + // Create expected txs from the verify operations. + for (idx, operation) in verify_operations.iter().enumerate() { + let start_block = 1 + super::WAIT_CONFIRMATIONS * (commit_operations.len() + idx) as u64; + let deadline_block = eth_sender.get_deadline_block(start_block); + // For verify operations the logic for nonce is slightly different: + // After each verify operation we send the withdraw operation as well, + // thus every verify operation increases the nonce by two. + let nonce = commit_operations.len() + idx * 2; + + let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); + + expected_txs.push(expected_tx); + } for operation in operations.iter() { sender.try_send(operation.clone()).unwrap(); @@ -374,13 +384,9 @@ fn transaction_failure() { let operation = test_data::commit_operation(0); sender.try_send(operation.clone()).unwrap(); - let failing_tx = eth_sender - .sign_new_tx( - &operation, - eth_sender.get_deadline_block(eth_sender.ethereum.block_number), - None, - ) - .unwrap(); + let nonce = 0; + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let failing_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); eth_sender.retrieve_operations(); eth_sender.proceed_next_operation(); @@ -408,20 +414,15 @@ fn restore_state() { let (mut eth_sender, _, mut receiver) = restored_eth_sender(stored_operations); - for operation in operations { + for (nonce, operation) in operations.iter().enumerate() { // Note that we DO NOT send an operation to `ETHSender` and neither receive it. // We do process operations restored from the DB though. // The rest of this test is the same as in `operation_commitment_workflow`. eth_sender.proceed_next_operation(); - let expected_tx = eth_sender - .sign_new_tx( - &operation, - eth_sender.get_deadline_block(eth_sender.ethereum.block_number), - None, - ) - .unwrap(); + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); eth_sender .ethereum @@ -449,13 +450,9 @@ fn confirmations_independence() { eth_sender.retrieve_operations(); eth_sender.proceed_next_operation(); - let stuck_tx = eth_sender - .sign_new_tx( - &operation, - eth_sender.get_deadline_block(eth_sender.ethereum.block_number), - None, - ) - .unwrap(); + let nonce = 0; + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; eth_sender.proceed_next_operation(); From 3456ea65fbb2450caae6205af261c001868720f8 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Wed, 18 Mar 2020 14:53:55 +0200 Subject: [PATCH 044/186] Improved errors --- js/zksync.js/src/wallet.ts | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/js/zksync.js/src/wallet.ts b/js/zksync.js/src/wallet.ts index 9a6db0dc17..fa8b12a57a 100644 --- a/js/zksync.js/src/wallet.ts +++ b/js/zksync.js/src/wallet.ts @@ -16,6 +16,12 @@ import { SYNC_MAIN_CONTRACT_INTERFACE, } from "./utils"; +class ZKSyncTxError extends Error { + constructor(message: string, public value: PriorityOperationReceipt | TransactionReceipt) { + super(message); + } +} + export class Wallet { public provider: Provider; @@ -468,11 +474,12 @@ class ETHOperation { "COMMIT" ); + this.state = "Committed"; + if (receipt.executed == false) { - throw receipt; + throw new ZKSyncTxError("Priority operation failed", receipt); } - this.state = "Committed"; return receipt; } @@ -485,11 +492,12 @@ class ETHOperation { "VERIFY" ); + this.state = "Verified"; + if (receipt.executed == false) { - throw receipt; + throw new ZKSyncTxError("Priority operation failed", receipt); } - this.state = "Verified"; return receipt; } } @@ -514,8 +522,8 @@ class Transaction { ); this.state = "Committed"; - if (!receipt.success) { - throw receipt; + if (receipt.success == false) { + throw new ZKSyncTxError(`ZKSync transaction failed: ${receipt.failReason}`, receipt); } return receipt; @@ -529,8 +537,8 @@ class Transaction { ); this.state = "Verified"; - if (!receipt.success) { - throw receipt; + if (receipt.success == false) { + throw new ZKSyncTxError(`ZKSync transaction failed: ${receipt.failReason}`, receipt); } return receipt; From c1fd78ffd080e6c8a8fc71a73182c3486e4f70f1 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 19 Mar 2020 14:48:29 +0300 Subject: [PATCH 045/186] Initialize eth_nonce table using scripts on init --- Makefile | 5 ++++- bin/db-insert-eth-nonce.sh | 29 +++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) create mode 100755 bin/db-insert-eth-nonce.sh diff --git a/Makefile b/Makefile index bb918f8c60..de23989bb5 100644 --- a/Makefile +++ b/Makefile @@ -49,7 +49,10 @@ db-setup: db-insert-contract: @bin/db-insert-contract.sh -db-reset: confirm_action db-wait db-drop db-setup db-insert-contract +db-insert-eth-nonce: + @bin/db-insert-eth-nonce.sh + +db-reset: confirm_action db-wait db-drop db-setup db-insert-contract db-insert-eth-nonce @echo database is ready db-migrate: confirm_action diff --git a/bin/db-insert-eth-nonce.sh b/bin/db-insert-eth-nonce.sh new file mode 100755 index 0000000000..2511ebe844 --- /dev/null +++ b/bin/db-insert-eth-nonce.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +# Force read env -- this is important, sp that we re-ready the new contract value after redeploy!!! +ZKSYNC_ENV= +. .setup_env + +# Retrieve pending nonce from the node and obtain the value via `jq`. +# NONCE variable will have the value like `"0x123"`. +# Log output is redirected to the `/dev/null` to avoid garbage in the overall command output. +NONCE=`curl \ + -H "Accept: application/json" \ + -H "Content-Type: application/json" \ + -X POST \ + --data '{"jsonrpc":"2.0","method":"eth_getTransactionCount","params":['"\"$OPERATOR_ETH_ADDRESS\""',"pending"],"id":1}' \ + $WEB3_URL 2> /dev/null \ + | jq '.result'` + +# Strip quotes around the nonce value. Result will be like `0x123`. +eval NONCE=$NONCE + +# Convert the number from the hexadecimal form to the decimal. The result will be like `291`. +NONCE=`printf "%d\n" $NONCE` + + +psql "$DATABASE_URL" -c "INSERT INTO eth_nonce (nonce) \ + VALUES ('$NONCE') \ + ON CONFLICT (id) DO UPDATE \ + SET nonce = '$NONCE'" || exit 1 +echo "successfully inserted the Ethereum nonce ($NONCE) into the database" \ No newline at end of file From fe9d27d706e7939ff99f516dbc401654545bbe96 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 19 Mar 2020 14:48:50 +0300 Subject: [PATCH 046/186] Adapt the code to expect nonce to be initialized from outside --- core/server/src/eth_sender/mod.rs | 6 +-- core/storage/src/ethereum/mod.rs | 60 +++++++++++++++++----------- core/storage/src/ethereum/records.rs | 6 --- core/storage/src/tests/ethereum.rs | 2 + 4 files changed, 42 insertions(+), 32 deletions(-) diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 4788e6344a..a9f8ab695b 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -265,8 +265,8 @@ impl ETHSender { op.txs.push(new_tx.clone()); info!( - "Sending tx for op, op_id: {} tx_hash: {:#x}", - new_tx.op_id, new_tx.signed_tx.hash + "Sending tx for op, op_id: {} tx_hash: {:#x}, nonce: {}", + new_tx.op_id, new_tx.signed_tx.hash, new_tx.signed_tx.nonce, ); self.ethereum.send_tx(&new_tx.signed_tx)?; @@ -276,7 +276,7 @@ impl ETHSender { /// Handles a transaction execution failure by reporting the issue to the log /// and terminating the node. fn failure_handler(&self, receipt: &TransactionReceipt) -> ! { - info!( + error!( "Ethereum transaction unexpectedly failed. Receipt: {:#?}", receipt ); diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 850ae5534d..2fbd9ac9a9 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -8,7 +8,7 @@ use web3::types::H256; // Workspace imports use models::Operation; // Local imports -use self::records::{ETHNonce, NewETHNonce, NewETHOperation, StorageETHOperation}; +use self::records::{ETHNonce, NewETHOperation, StorageETHOperation}; use crate::chain::operations::records::StoredOperation; use crate::schema::*; use crate::StorageProcessor; @@ -128,34 +128,48 @@ impl<'a> EthereumSchema<'a> { }) } + /// Obtains the next nonce to use and updates the corresponding entry in the database + /// for the next invocation. + /// + /// This method expects the database to be initially prepared with inserting the actual + /// nonce value. Currently the script `db-insert-eth-nonce.sh` is responsible for that + /// and it's invoked within `db-reset` subcommand. pub fn get_next_nonce(&self) -> QueryResult { - let nonce: Option = eth_nonce::table.first(self.0.conn()).optional()?; + let old_nonce: ETHNonce = eth_nonce::table.first(self.0.conn())?; - let old_nonce_value = if let Some(old_nonce) = nonce { - // There is a stored nonce. We take its value and update the entry with a new nonce. - let new_nonce_value = old_nonce.nonce + 1; + let new_nonce_value = old_nonce.nonce + 1; - update(eth_nonce::table.filter(eth_nonce::id.eq(true))) - .set(eth_nonce::nonce.eq(new_nonce_value)) - .execute(self.0.conn())?; + update(eth_nonce::table.filter(eth_nonce::id.eq(true))) + .set(eth_nonce::nonce.eq(new_nonce_value)) + .execute(self.0.conn())?; - old_nonce.nonce - } else { - // There is no stored value. We start with 0, and store the incremented nonce (1). - let old_nonce_value = 0; - let new_nonce_value = old_nonce_value + 1; - let new_nonce = NewETHNonce { - nonce: new_nonce_value, - }; + let old_nonce_value = old_nonce.nonce; - insert_into(eth_nonce::table) - .values(new_nonce) - .execute(self.0.conn()) - .map(drop)?; + Ok(old_nonce_value) + } - old_nonce_value - }; + /// Method that internally initializes the `eth_nonce` table. + /// Since in db tests the database is empty, we must provide a possibility + /// to initialize required db fields. + #[cfg(test)] + pub fn initialize_eth_nonce(&self) -> QueryResult<()> { + #[derive(Debug, Insertable)] + #[table_name = "eth_nonce"] + pub struct NewETHNonce { + pub nonce: i64, + } - Ok(old_nonce_value) + let old_nonce: Option = eth_nonce::table.first(self.0.conn()).optional()?; + + if old_nonce.is_none() { + // There is no nonce, we have to insert it manually. + let nonce = NewETHNonce { nonce: 0 }; + + insert_into(eth_nonce::table) + .values(&nonce) + .execute(self.0.conn())?; + } + + Ok(()) } } diff --git a/core/storage/src/ethereum/records.rs b/core/storage/src/ethereum/records.rs index 2ff2543285..f1333f0fa0 100644 --- a/core/storage/src/ethereum/records.rs +++ b/core/storage/src/ethereum/records.rs @@ -28,12 +28,6 @@ pub struct NewETHOperation { pub raw_tx: Vec, } -#[derive(Debug, Insertable, PartialEq)] -#[table_name = "eth_nonce"] -pub struct NewETHNonce { - pub nonce: i64, -} - #[derive(Debug, Queryable, QueryableByName, PartialEq)] #[table_name = "eth_nonce"] pub struct ETHNonce { diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index 5aa7c23629..98c0c72c80 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -159,6 +159,8 @@ fn ethereum_storage() { fn eth_nonce() { let conn = StorageProcessor::establish_connection().unwrap(); db_test(conn.conn(), || { + EthereumSchema(&conn).initialize_eth_nonce()?; + for expected_next_nonce in 0..5 { let actual_next_nonce = EthereumSchema(&conn).get_next_nonce()?; From b06951d760d514f31a06952e25f089942dcf9a0e Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 19 Mar 2020 15:03:11 +0300 Subject: [PATCH 047/186] Fix the nonce calculation for mock database --- core/server/src/eth_sender/tests/mock.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 39b2806f8e..ed7b5b3d51 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -32,8 +32,11 @@ impl MockDatabase { pub fn with_restorable_state( restore_state: impl IntoIterator, ) -> Self { + let restore_state: VecDeque<_> = restore_state.into_iter().collect(); + let nonce = restore_state.iter().fold(0, |acc, op| acc + op.txs.len()); Self { - restore_state: restore_state.into_iter().collect(), + restore_state, + nonce: Cell::new(nonce as i64), ..Default::default() } } From 504563aee01dd7df5d24938e385ca8586c00a57c Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Thu, 19 Mar 2020 12:34:56 +0200 Subject: [PATCH 048/186] Fix typo --- js/tests/WalletDecorator.ts | 1 - js/tests/simple-integration-test.ts | 2 +- js/zksync.js/src/signer.ts | 8 ++++---- js/zksync.js/src/wallet.ts | 6 +++--- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/js/tests/WalletDecorator.ts b/js/tests/WalletDecorator.ts index 1a8dd5d041..8d2f98d9fc 100644 --- a/js/tests/WalletDecorator.ts +++ b/js/tests/WalletDecorator.ts @@ -261,7 +261,6 @@ export class WalletDecorator { depositTo: this.syncWallet.address(), token, amount, - // maxFeeInETHToken: bigNumberify(0), ethTxOptions: { nonce, }, diff --git a/js/tests/simple-integration-test.ts b/js/tests/simple-integration-test.ts index 1c46111d16..54cab5cf13 100644 --- a/js/tests/simple-integration-test.ts +++ b/js/tests/simple-integration-test.ts @@ -53,7 +53,7 @@ async function testDeposit(depositWallet: Wallet, syncWallet: Wallet, token: typ if (await depositWallet.isERC20DepositsApproved(token)){ throw new Error("Token should not be approved"); } - const approveERC20 = await depositWallet.apporveERC20TokenDeposits(token); + const approveERC20 = await depositWallet.approveERC20TokenDeposits(token); await approveERC20.wait(); console.log(`Deposit approved: ${(new Date().getTime()) - startTime} ms`); if (!await depositWallet.isERC20DepositsApproved(token)){ diff --git a/js/zksync.js/src/signer.ts b/js/zksync.js/src/signer.ts index ae5d08b470..055431d103 100644 --- a/js/zksync.js/src/signer.ts +++ b/js/zksync.js/src/signer.ts @@ -124,10 +124,10 @@ export class Signer { // PubKeyHash or eth address export function serializeAddress(address: Address | PubKeyHash): Buffer { - const prefixlessAddress = address.startsWith("0x") - ? address.substr(2) - : address.startsWith("sync:") - ? address.substr(5) + // prettier-ignore + const prefixlessAddress + = address.startsWith("0x") ? address.substr(2) + : address.startsWith("sync:") ? address.substr(5) : null; if (prefixlessAddress === null) { diff --git a/js/zksync.js/src/wallet.ts b/js/zksync.js/src/wallet.ts index f12813ee75..c882a72891 100644 --- a/js/zksync.js/src/wallet.ts +++ b/js/zksync.js/src/wallet.ts @@ -344,7 +344,7 @@ export class Wallet { .eq(MAX_ERC20_APPROVE_AMOUNT); } - async apporveERC20TokenDeposits( + async approveERC20TokenDeposits( token: TokenLike ): Promise { if (isTokenETH(token)) { @@ -553,7 +553,7 @@ class ETHOperation { this.state = "Committed"; - if (receipt.executed == false) { + if (!receipt.executed) { throw new ZKSyncTxError("Priority operation failed", receipt); } @@ -571,7 +571,7 @@ class ETHOperation { this.state = "Verified"; - if (receipt.executed == false) { + if (!receipt.executed) { throw new ZKSyncTxError("Priority operation failed", receipt); } From 59b7de5fbc4ae28438b9cb4d5131dc897817f0d8 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 20 Mar 2020 08:34:21 +0300 Subject: [PATCH 049/186] Split sign_call_tx into two smaller methods --- .../src/eth_sender/ethereum_interface.rs | 23 +++++++----- core/server/src/eth_sender/mod.rs | 23 ++++++------ core/server/src/eth_sender/tests/mock.rs | 36 ++++++++++--------- core/server/src/eth_sender/tests/mod.rs | 10 +++--- 4 files changed, 52 insertions(+), 40 deletions(-) diff --git a/core/server/src/eth_sender/ethereum_interface.rs b/core/server/src/eth_sender/ethereum_interface.rs index a1236cf845..997ee390c2 100644 --- a/core/server/src/eth_sender/ethereum_interface.rs +++ b/core/server/src/eth_sender/ethereum_interface.rs @@ -39,11 +39,15 @@ pub(super) trait EthereumInterface { /// Sends a signed transaction to the Ethereum blockchain. fn send_tx(&self, signed_tx: &SignedCallResult) -> Result<(), failure::Error>; - /// Creates a transaction based on the provided parameters and signs it. - fn sign_call_tx( + /// Encodes the transaction data (smart contract method and its input) to the bytes + /// without creating an actual transaction. + fn encode_tx_data(&self, func: &str, params: P) -> Vec; + + /// Signs the transaction given the previously encoded data. + /// Fills in gas/nonce if not supplied inside options. + fn sign_prepared_tx( &self, - func: &str, - params: P, + data: Vec, options: Options, ) -> Result; } @@ -132,12 +136,15 @@ impl EthereumInterface for EthereumHttpClient { block_on(self.eth_client.get_gas_price()) } - fn sign_call_tx( + fn encode_tx_data(&self, func: &str, params: P) -> Vec { + self.eth_client.encode_tx_data(func, params) + } + + fn sign_prepared_tx( &self, - func: &str, - params: P, + data: Vec, options: Options, ) -> Result { - block_on(self.eth_client.sign_call_tx(func, params, options)) + block_on(self.eth_client.sign_prepared_tx(data, options)) } } diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index a9f8ab695b..cb99ee5b67 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -389,7 +389,7 @@ impl ETHSender { op: &Operation, tx_options: Options, ) -> Result { - match &op.action { + let raw_tx = match &op.action { Action::Commit => { let root = op.block.get_eth_encoded_root(); @@ -409,7 +409,7 @@ impl ETHSender { ); // function commitBlock(uint32 _blockNumber, uint24 _feeAccount, bytes32 _newRoot, bytes calldata _publicData) - self.ethereum.sign_call_tx( + self.ethereum.encode_tx_data( "commitBlock", ( u64::from(op.block.block_number), @@ -419,18 +419,18 @@ impl ETHSender { witness_data.0, witness_data.1, ), - tx_options, ) } Action::Verify { proof } => { // function verifyBlock(uint32 _blockNumber, uint256[8] calldata proof) external { - self.ethereum.sign_call_tx( + self.ethereum.encode_tx_data( "verifyBlock", (u64::from(op.block.block_number), *proof.clone()), - tx_options, ) } - } + }; + + self.ethereum.sign_prepared_tx(raw_tx, tx_options) } fn call_complete_withdrawals(&self) -> Result<(), failure::Error> { @@ -439,13 +439,14 @@ impl ETHSender { let nonce = self.db.next_nonce()?; options.nonce = Some(nonce.into()); + let raw_tx = self.ethereum.encode_tx_data( + "completeWithdrawals", + config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, + ); + let tx = self .ethereum - .sign_call_tx( - "completeWithdrawals", - config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, - options, - ) + .sign_prepared_tx(raw_tx, options) .map_err(|e| failure::format_err!("completeWithdrawals: {}", e))?; info!("Sending completeWithdrawals tx with hash: {:#?}", tx.hash); self.ethereum.send_tx(&tx) diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index ed7b5b3d51..4b8b5fdeff 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -219,7 +219,7 @@ impl MockEthereum { let root = op.block.get_eth_encoded_root(); let public_data = op.block.get_eth_public_data(); let witness_data = op.block.get_eth_witness_data(); - self.sign_call_tx( + let raw_tx = self.encode_tx_data( "commitBlock", ( u64::from(op.block.block_number), @@ -229,17 +229,18 @@ impl MockEthereum { witness_data.0, witness_data.1, ), - options, - ) - .unwrap() + ); + + self.sign_prepared_tx(raw_tx, options).unwrap() } - Action::Verify { proof } => self - .sign_call_tx( + Action::Verify { proof } => { + let raw_tx = self.encode_tx_data( "verifyBlock", (u64::from(op.block.block_number), *proof.clone()), - options, - ) - .unwrap(), + ); + + self.sign_prepared_tx(raw_tx, options).unwrap() + } } } } @@ -265,10 +266,13 @@ impl EthereumInterface for MockEthereum { Ok(()) } - fn sign_call_tx( + fn encode_tx_data(&self, _func: &str, params: P) -> Vec { + ethabi::encode(params.into_tokens().as_ref()) + } + + fn sign_prepared_tx( &self, - _func: &str, - params: P, + raw_tx: Vec, options: Options, ) -> Result { let gas_price = options.gas_price.unwrap_or(self.gas_price); @@ -276,10 +280,10 @@ impl EthereumInterface for MockEthereum { // Nonce and gas_price are appended to distinguish the same transactions // with different gas by their hash in tests. - let mut raw_tx = ethabi::encode(params.into_tokens().as_ref()); - raw_tx.append(&mut ethabi::encode(gas_price.into_tokens().as_ref())); - raw_tx.append(&mut ethabi::encode(nonce.into_tokens().as_ref())); - let hash = Self::fake_sha256(raw_tx.as_ref()); // Okay for test purposes. + let mut data_for_hash = raw_tx.clone(); + data_for_hash.append(&mut ethabi::encode(gas_price.into_tokens().as_ref())); + data_for_hash.append(&mut ethabi::encode(nonce.into_tokens().as_ref())); + let hash = Self::fake_sha256(data_for_hash.as_ref()); // Okay for test purposes. Ok(SignedCallResult { raw_tx, diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index 9c9e3b84af..ae3416789a 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -237,13 +237,13 @@ fn operation_commitment_workflow() { let mut options = Options::default(); let nonce = operations.len().into(); options.nonce = Some(nonce); + let raw_tx = eth_sender.ethereum.encode_tx_data( + "completeWithdrawals", + models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, + ); let tx = eth_sender .ethereum - .sign_call_tx( - "completeWithdrawals", - models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, - options, - ) + .sign_prepared_tx(raw_tx, options) .unwrap(); eth_sender.ethereum.assert_sent_by_hash(&tx.hash); From 80a2cce55066cf744408fb3a75bb608cdcd73ba0 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 20 Mar 2020 09:23:02 +0300 Subject: [PATCH 050/186] Add a table for eth operation stats --- Makefile | 6 +- ...ert-eth-nonce.sh => db-insert-eth-data.sh} | 9 ++- core/server/src/eth_sender/mod.rs | 8 ++ .../2020-03-20-055334_add_eth_stats/down.sql | 2 + .../2020-03-20-055334_add_eth_stats/up.sql | 8 ++ core/storage/src/ethereum/mod.rs | 80 ++++++++++++++++++- core/storage/src/ethereum/records.rs | 9 +++ core/storage/src/schema.rs | 10 +++ core/storage/src/tests/ethereum.rs | 40 +++++++++- 9 files changed, 162 insertions(+), 10 deletions(-) rename bin/{db-insert-eth-nonce.sh => db-insert-eth-data.sh} (76%) create mode 100644 core/storage/migrations/2020-03-20-055334_add_eth_stats/down.sql create mode 100644 core/storage/migrations/2020-03-20-055334_add_eth_stats/up.sql diff --git a/Makefile b/Makefile index de23989bb5..fab13cc77c 100644 --- a/Makefile +++ b/Makefile @@ -49,10 +49,10 @@ db-setup: db-insert-contract: @bin/db-insert-contract.sh -db-insert-eth-nonce: - @bin/db-insert-eth-nonce.sh +db-insert-eth-data: + @bin/db-insert-eth-data.sh -db-reset: confirm_action db-wait db-drop db-setup db-insert-contract db-insert-eth-nonce +db-reset: confirm_action db-wait db-drop db-setup db-insert-contract db-insert-eth-data @echo database is ready db-migrate: confirm_action diff --git a/bin/db-insert-eth-nonce.sh b/bin/db-insert-eth-data.sh similarity index 76% rename from bin/db-insert-eth-nonce.sh rename to bin/db-insert-eth-data.sh index 2511ebe844..2ef6622f12 100755 --- a/bin/db-insert-eth-nonce.sh +++ b/bin/db-insert-eth-data.sh @@ -26,4 +26,11 @@ psql "$DATABASE_URL" -c "INSERT INTO eth_nonce (nonce) \ VALUES ('$NONCE') \ ON CONFLICT (id) DO UPDATE \ SET nonce = '$NONCE'" || exit 1 -echo "successfully inserted the Ethereum nonce ($NONCE) into the database" \ No newline at end of file +echo "successfully inserted the Ethereum nonce ($NONCE) into the database" + +psql "$DATABASE_URL" -c "INSERT INTO eth_stats (commit_ops, verify_ops, withdraw_ops) \ + VALUES (0, 0, 0) \ + ON CONFLICT (id) DO UPDATE \ + SET (commit_ops, verify_ops, withdraw_ops) = (0, 0, 0)" || exit 1 + +echo "successfully initialized the Ethereum stats" diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index cb99ee5b67..6c75410ff2 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -22,6 +22,7 @@ use storage::ConnectionPool; use self::database::{Database, DatabaseAccess}; use self::ethereum_interface::{EthereumHttpClient, EthereumInterface}; use self::transactions::*; +use self::tx_queue::TxQueue; mod database; mod ethereum_interface; @@ -80,6 +81,8 @@ struct ETHSender { rx_for_eth: mpsc::Receiver, /// Channel to notify about committed operations. op_notify: mpsc::Sender, + /// Queue for ordered transaction processing. + tx_queue: TxQueue, } impl ETHSender { @@ -89,16 +92,21 @@ impl ETHSender { rx_for_eth: mpsc::Receiver, op_notify: mpsc::Sender, ) -> Self { + const MAX_TXS_IN_FLIGHT: usize = 5; // TODO: Should be configurable. + let unconfirmed_ops = db .restore_state() .expect("Failed loading unconfirmed operations from the storage"); + let tx_queue = TxQueue::new(MAX_TXS_IN_FLIGHT); + Self { ethereum, unconfirmed_ops, db, rx_for_eth, op_notify, + tx_queue, } } diff --git a/core/storage/migrations/2020-03-20-055334_add_eth_stats/down.sql b/core/storage/migrations/2020-03-20-055334_add_eth_stats/down.sql new file mode 100644 index 0000000000..b3544a593c --- /dev/null +++ b/core/storage/migrations/2020-03-20-055334_add_eth_stats/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE IF EXISTS eth_stats CASCADE; \ No newline at end of file diff --git a/core/storage/migrations/2020-03-20-055334_add_eth_stats/up.sql b/core/storage/migrations/2020-03-20-055334_add_eth_stats/up.sql new file mode 100644 index 0000000000..8b924130a2 --- /dev/null +++ b/core/storage/migrations/2020-03-20-055334_add_eth_stats/up.sql @@ -0,0 +1,8 @@ +-- Your SQL goes here +CREATE TABLE eth_stats ( + -- enforce single record + id bool PRIMARY KEY NOT NULL DEFAULT true, + commit_ops BIGINT NOT NULL, + verify_ops BIGINT NOT NULL, + withdraw_ops BIGINT NOT NULL +); diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 2fbd9ac9a9..4d3723e66d 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -8,13 +8,20 @@ use web3::types::H256; // Workspace imports use models::Operation; // Local imports -use self::records::{ETHNonce, NewETHOperation, StorageETHOperation}; +use self::records::{ETHNonce, ETHStats, NewETHOperation, StorageETHOperation}; use crate::chain::operations::records::StoredOperation; use crate::schema::*; use crate::StorageProcessor; pub mod records; +#[derive(Debug, Clone, Copy)] +pub enum OperationType { + Commit, + Verify, + Withdraw, +} + /// Ethereum schema is capable of storing the information about the /// interaction with the Ethereum blockchain (mainly the list of sent /// Ethereum transactions). @@ -108,6 +115,49 @@ impl<'a> EthereumSchema<'a> { .map(drop) } + /// Updates the stats counter with the new operation reported. + /// This method should be called once **per operation**. It means that if transaction + /// for some operation was stuck, and another transaction was created for it, this method + /// **should not** be invoked. + /// + /// This method expects the database to be initially prepared with inserting the actual + /// nonce value. Currently the script `db-insert-eth-data.sh` is responsible for that + /// and it's invoked within `db-reset` subcommand. + pub fn report_operation_creates(&self, operation_type: OperationType) -> QueryResult<()> { + self.0.conn().transaction(|| { + let mut current_stats: ETHStats = eth_stats::table.first(self.0.conn())?; + + // Increase the only one type of operations. + match operation_type { + OperationType::Commit => { + current_stats.commit_ops += 1; + } + OperationType::Verify => { + current_stats.verify_ops += 1; + } + OperationType::Withdraw => { + current_stats.withdraw_ops += 1; + } + }; + + // Update the stored stats. + update(eth_stats::table.filter(eth_stats::id.eq(true))) + .set(( + eth_stats::commit_ops.eq(current_stats.commit_ops), + eth_stats::verify_ops.eq(current_stats.verify_ops), + eth_stats::withdraw_ops.eq(current_stats.withdraw_ops), + )) + .execute(self.0.conn())?; + + Ok(()) + }) + } + + /// Loads the stored Ethereum operations stats. + pub fn load_stats(&self) -> QueryResult { + eth_stats::table.first(self.0.conn()) + } + /// Marks the stored Ethereum transaction as confirmed (and thus the associated `Operation` /// is marked as confirmed as well). pub fn confirm_eth_tx(&self, hash: &H256) -> QueryResult<()> { @@ -132,7 +182,7 @@ impl<'a> EthereumSchema<'a> { /// for the next invocation. /// /// This method expects the database to be initially prepared with inserting the actual - /// nonce value. Currently the script `db-insert-eth-nonce.sh` is responsible for that + /// nonce value. Currently the script `db-insert-eth-data.sh` is responsible for that /// and it's invoked within `db-reset` subcommand. pub fn get_next_nonce(&self) -> QueryResult { let old_nonce: ETHNonce = eth_nonce::table.first(self.0.conn())?; @@ -148,17 +198,25 @@ impl<'a> EthereumSchema<'a> { Ok(old_nonce_value) } - /// Method that internally initializes the `eth_nonce` table. + /// Method that internally initializes the `eth_nonce` and `eth_stats` tables. /// Since in db tests the database is empty, we must provide a possibility /// to initialize required db fields. #[cfg(test)] - pub fn initialize_eth_nonce(&self) -> QueryResult<()> { + pub fn initialize_eth_data(&self) -> QueryResult<()> { #[derive(Debug, Insertable)] #[table_name = "eth_nonce"] pub struct NewETHNonce { pub nonce: i64, } + #[derive(Debug, Insertable)] + #[table_name = "eth_stats"] + pub struct NewETHStats { + pub commit_ops: i64, + pub verify_ops: i64, + pub withdraw_ops: i64, + } + let old_nonce: Option = eth_nonce::table.first(self.0.conn()).optional()?; if old_nonce.is_none() { @@ -170,6 +228,20 @@ impl<'a> EthereumSchema<'a> { .execute(self.0.conn())?; } + let old_stats: Option = eth_stats::table.first(self.0.conn()).optional()?; + + if old_stats.is_none() { + let stats = NewETHStats { + commit_ops: 0, + verify_ops: 0, + withdraw_ops: 0, + }; + + insert_into(eth_stats::table) + .values(&stats) + .execute(self.0.conn())?; + } + Ok(()) } } diff --git a/core/storage/src/ethereum/records.rs b/core/storage/src/ethereum/records.rs index f1333f0fa0..0c29c3a627 100644 --- a/core/storage/src/ethereum/records.rs +++ b/core/storage/src/ethereum/records.rs @@ -34,3 +34,12 @@ pub struct ETHNonce { pub id: bool, pub nonce: i64, } + +#[derive(Debug, Queryable, QueryableByName, PartialEq)] +#[table_name = "eth_stats"] +pub struct ETHStats { + pub id: bool, + pub commit_ops: i64, + pub verify_ops: i64, + pub withdraw_ops: i64, +} diff --git a/core/storage/src/schema.rs b/core/storage/src/schema.rs index 7b8fb28eeb..96560e89ed 100644 --- a/core/storage/src/schema.rs +++ b/core/storage/src/schema.rs @@ -102,6 +102,15 @@ table! { } } +table! { + eth_stats (id) { + id -> Bool, + commit_ops -> Int8, + verify_ops -> Int8, + withdraw_ops -> Int8, + } +} + table! { events_state (id) { id -> Int4, @@ -230,6 +239,7 @@ allow_tables_to_appear_in_same_query!( data_restore_last_watched_eth_block, eth_nonce, eth_operations, + eth_stats, events_state, executed_priority_operations, executed_transactions, diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index 98c0c72c80..00111e8fac 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -10,7 +10,7 @@ use models::{ use crate::tests::db_test; use crate::{ chain::block::BlockSchema, - ethereum::{records::StorageETHOperation, EthereumSchema}, + ethereum::{records::StorageETHOperation, EthereumSchema, OperationType}, StorageProcessor, }; @@ -159,7 +159,7 @@ fn ethereum_storage() { fn eth_nonce() { let conn = StorageProcessor::establish_connection().unwrap(); db_test(conn.conn(), || { - EthereumSchema(&conn).initialize_eth_nonce()?; + EthereumSchema(&conn).initialize_eth_data()?; for expected_next_nonce in 0..5 { let actual_next_nonce = EthereumSchema(&conn).get_next_nonce()?; @@ -170,3 +170,39 @@ fn eth_nonce() { Ok(()) }); } + +/// Checks that Ethereum stats are incremented as expected. +#[test] +#[cfg_attr(not(feature = "db_test"), ignore)] +fn eth_stats() { + let conn = StorageProcessor::establish_connection().unwrap(); + db_test(conn.conn(), || { + EthereumSchema(&conn).initialize_eth_data()?; + + let initial_stats = EthereumSchema(&conn).load_stats()?; + + assert_eq!(initial_stats.commit_ops, 0); + assert_eq!(initial_stats.verify_ops, 0); + assert_eq!(initial_stats.withdraw_ops, 0); + + let ops_to_add = vec![ + (OperationType::Commit, 5), + (OperationType::Verify, 3), + (OperationType::Withdraw, 2), + ]; + + for (op, count) in ops_to_add.iter() { + for _ in 0..*count { + EthereumSchema(&conn).report_operation_creates(*op)?; + } + } + + let updated_stats = EthereumSchema(&conn).load_stats()?; + + assert_eq!(updated_stats.commit_ops, ops_to_add[0].1); + assert_eq!(updated_stats.verify_ops, ops_to_add[1].1); + assert_eq!(updated_stats.withdraw_ops, ops_to_add[2].1); + + Ok(()) + }); +} From 4b02ff2b7c32b858c0cbd1efd45f64e5dca3d70e Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 20 Mar 2020 09:23:39 +0300 Subject: [PATCH 051/186] Rename ethereum schema method --- core/storage/src/ethereum/mod.rs | 2 +- core/storage/src/tests/ethereum.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 4d3723e66d..1f4010ab7a 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -123,7 +123,7 @@ impl<'a> EthereumSchema<'a> { /// This method expects the database to be initially prepared with inserting the actual /// nonce value. Currently the script `db-insert-eth-data.sh` is responsible for that /// and it's invoked within `db-reset` subcommand. - pub fn report_operation_creates(&self, operation_type: OperationType) -> QueryResult<()> { + pub fn report_created_operation(&self, operation_type: OperationType) -> QueryResult<()> { self.0.conn().transaction(|| { let mut current_stats: ETHStats = eth_stats::table.first(self.0.conn())?; diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index 00111e8fac..41d5a4eef8 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -193,7 +193,7 @@ fn eth_stats() { for (op, count) in ops_to_add.iter() { for _ in 0..*count { - EthereumSchema(&conn).report_operation_creates(*op)?; + EthereumSchema(&conn).report_created_operation(*op)?; } } From 28c64efaf8d936c931e8ffb9af2088b6d00c1563 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Fri, 20 Mar 2020 10:30:30 +0200 Subject: [PATCH 052/186] Add failed state to TransactionReceipt --- js/zksync.js/src/signer.ts | 22 +++++++------- js/zksync.js/src/wallet.ts | 60 +++++++++++++++++++++++++------------- 2 files changed, 50 insertions(+), 32 deletions(-) diff --git a/js/zksync.js/src/signer.ts b/js/zksync.js/src/signer.ts index 055431d103..6f2548f58c 100644 --- a/js/zksync.js/src/signer.ts +++ b/js/zksync.js/src/signer.ts @@ -122,19 +122,19 @@ export class Signer { } } +function removeAddressPrefix(address: Address | PubKeyHash): string { + if (address.startsWith("0x")) return address.substr(2); + + if (address.startsWith("sync:")) return address.substr(5); + + throw new Error( + "ETH address must start with '0x' and PubKeyHash must start with 'sync:'" + ); +} + // PubKeyHash or eth address export function serializeAddress(address: Address | PubKeyHash): Buffer { - // prettier-ignore - const prefixlessAddress - = address.startsWith("0x") ? address.substr(2) - : address.startsWith("sync:") ? address.substr(5) - : null; - - if (prefixlessAddress === null) { - throw new Error( - "ETH address must start with '0x' and PubKeyHash must start with 'sync:'" - ); - } + const prefixlessAddress = removeAddressPrefix(address); const addressBytes = Buffer.from(prefixlessAddress, "hex"); if (addressBytes.length != 20) { diff --git a/js/zksync.js/src/wallet.ts b/js/zksync.js/src/wallet.ts index c882a72891..6e6303a19a 100644 --- a/js/zksync.js/src/wallet.ts +++ b/js/zksync.js/src/wallet.ts @@ -515,7 +515,8 @@ export class Wallet { } class ETHOperation { - state: "Sent" | "Mined" | "Committed" | "Verified"; + state: "Sent" | "Mined" | "Committed" | "Verified" | "Failed"; + error?: ZKSyncTxError; priorityOpId?: utils.BigNumber; constructor( @@ -544,6 +545,8 @@ class ETHOperation { } async awaitReceipt(): Promise { + this.throwErrorIfFailedState(); + await this.awaitEthereumTxCommit(); if (this.state != "Mined") return; const receipt = await this.zkSyncProvider.notifyPriorityOp( @@ -551,12 +554,14 @@ class ETHOperation { "COMMIT" ); - this.state = "Committed"; - if (!receipt.executed) { - throw new ZKSyncTxError("Priority operation failed", receipt); + this.setErrorState( + new ZKSyncTxError("Priority operation failed", receipt) + ); + this.throwErrorIfFailedState(); } + this.state = "Committed"; return receipt; } @@ -571,16 +576,22 @@ class ETHOperation { this.state = "Verified"; - if (!receipt.executed) { - throw new ZKSyncTxError("Priority operation failed", receipt); - } - return receipt; } + + private setErrorState(error: ZKSyncTxError) { + this.state = "Failed"; + this.error = error; + } + + private throwErrorIfFailedState() { + if (this.state == "Failed") throw this.error; + } } class Transaction { - state: "Sent" | "Committed" | "Verified"; + state: "Sent" | "Committed" | "Verified" | "Failed"; + error?: ZKSyncTxError; constructor( public txData, @@ -591,21 +602,26 @@ class Transaction { } async awaitReceipt(): Promise { + this.throwErrorIfFailedState(); + if (this.state !== "Sent") return; const receipt = await this.sidechainProvider.notifyTransaction( this.txHash, "COMMIT" ); - this.state = "Committed"; - if (receipt.success == false) { - throw new ZKSyncTxError( - `ZKSync transaction failed: ${receipt.failReason}`, - receipt + if (!receipt.success) { + this.setErrorState( + new ZKSyncTxError( + `ZKSync transaction failed: ${receipt.failReason}`, + receipt + ) ); + this.throwErrorIfFailedState(); } + this.state = "Committed"; return receipt; } @@ -615,15 +631,17 @@ class Transaction { this.txHash, "VERIFY" ); + this.state = "Verified"; + return receipt; + } - if (receipt.success == false) { - throw new ZKSyncTxError( - `ZKSync transaction failed: ${receipt.failReason}`, - receipt - ); - } + private setErrorState(error: ZKSyncTxError) { + this.state = "Failed"; + this.error = error; + } - return receipt; + private throwErrorIfFailedState() { + if (this.state == "Failed") throw this.error; } } From 87b2e67cc4a157832b8b1bfca237de1fe7c5485f Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Fri, 20 Mar 2020 10:31:01 +0200 Subject: [PATCH 053/186] Add test for TransactionHandlle throwing --- js/tests/simple-integration-test.ts | 61 ++++++++++++++++++++++++----- 1 file changed, 52 insertions(+), 9 deletions(-) diff --git a/js/tests/simple-integration-test.ts b/js/tests/simple-integration-test.ts index 54cab5cf13..00d2a475e8 100644 --- a/js/tests/simple-integration-test.ts +++ b/js/tests/simple-integration-test.ts @@ -9,6 +9,15 @@ import { ethers, utils, Contract } from "ethers"; import {parseEther} from "ethers/utils"; +const WEB3_URL = process.env.WEB3_URL; +// Mnemonic for eth wallet. +const MNEMONIC = process.env.TEST_MNEMONIC; +const ERC_20TOKEN = process.env.TEST_ERC20; + +const network = process.env.ETH_NETWORK == "localhost" ? "localhost" : "testnet"; +console.log("Running integration test on the ", network, " network"); +const ethersProvider = new ethers.providers.JsonRpcProvider(WEB3_URL); + let syncProvider: Provider; async function getOperatorBalance(token: types.TokenLike, type: "committed" | "verified" = "committed") { @@ -165,6 +174,47 @@ async function testChangePubkeyOffchain(syncWallet: Wallet) { } } +async function testThrowingErrorOnTxFail(syncWalletRich: Wallet) { + let testPassed = true; + + const ethWallet = ethers.Wallet.createRandom().connect(ethersProvider); + const syncWallet = await Wallet.fromEthSigner( + ethWallet, + syncProvider, + ); + + try { + const tx = await syncWalletRich.depositToSyncFromEthereum({ + depositTo: syncWallet.address(), + token: "ETH", + amount: utils.parseEther('0.01'), + maxFeeInETHToken: utils.parseEther('0'), + approveDepositAmountForERC20: true, + }); + await tx.awaitVerifyReceipt(); + testPassed = false; + } catch (e) { + console.log(`Error (expected) on priorityop fail:${e}`.slice(0,50)); + } + + try { + const tx = await syncWallet.syncTransfer({ + to: syncWalletRich.address(), + token: "ETH", + amount: utils.parseEther('0.01'), + fee: utils.parseEther('10'), + }); + await tx.awaitVerifyReceipt(); + testPassed = false; + } catch (e) { + console.log('Error (expected) on sync tx fail:', e); + } + + if (!testPassed) { + throw new Error("testThrowingErrorOnTxFail failed"); + } +} + async function moveFunds(contract: Contract, ethProxy: ETHProxy, depositWallet: Wallet, syncWallet1: Wallet, syncWallet2: Wallet, token: types.TokenLike, depositAmountETH: string) { const depositAmount = utils.parseEther(depositAmountETH); @@ -193,17 +243,8 @@ async function moveFunds(contract: Contract, ethProxy: ETHProxy, depositWallet: (async () => { try { - const WEB3_URL = process.env.WEB3_URL; - // Mnemonic for eth wallet. - const MNEMONIC = process.env.TEST_MNEMONIC; - const ERC_20TOKEN = process.env.TEST_ERC20; - - const network = process.env.ETH_NETWORK == "localhost" ? "localhost" : "testnet"; - console.log("Running integration test on the ", network, " network"); - syncProvider = await Provider.newWebsocketProvider(process.env.WS_API_ADDR); - const ethersProvider = new ethers.providers.JsonRpcProvider(WEB3_URL); const ethProxy = new ETHProxy(ethersProvider, syncProvider.contractAddress); const ethWallet = ethers.Wallet.fromMnemonic( @@ -239,6 +280,8 @@ async function moveFunds(contract: Contract, ethProxy: ETHProxy, depositWallet: syncProvider, ); + await testThrowingErrorOnTxFail(syncWalletRich); + await moveFunds(contract, ethProxy, syncWalletRich, syncWallet, syncWallet2, ERC_20TOKEN, "0.018"); await moveFunds(contract, ethProxy, syncWalletRich, syncWallet, syncWallet3, "ETH", "0.018"); From 5f82d43ae2e928241e2da74e5174357b313170aa Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 20 Mar 2020 13:08:26 +0300 Subject: [PATCH 054/186] Merge eth-related migrations into one --- .../migrations/2020-03-19-042712_add_eth_nonce/down.sql | 2 -- .../migrations/2020-03-19-042712_add_eth_nonce/up.sql | 6 ------ .../migrations/2020-03-19-042712_more_eth_data/down.sql | 3 +++ .../up.sql | 8 ++++++++ .../migrations/2020-03-20-055334_add_eth_stats/down.sql | 2 -- 5 files changed, 11 insertions(+), 10 deletions(-) delete mode 100644 core/storage/migrations/2020-03-19-042712_add_eth_nonce/down.sql delete mode 100644 core/storage/migrations/2020-03-19-042712_add_eth_nonce/up.sql create mode 100644 core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql rename core/storage/migrations/{2020-03-20-055334_add_eth_stats => 2020-03-19-042712_more_eth_data}/up.sql (52%) delete mode 100644 core/storage/migrations/2020-03-20-055334_add_eth_stats/down.sql diff --git a/core/storage/migrations/2020-03-19-042712_add_eth_nonce/down.sql b/core/storage/migrations/2020-03-19-042712_add_eth_nonce/down.sql deleted file mode 100644 index c932806e59..0000000000 --- a/core/storage/migrations/2020-03-19-042712_add_eth_nonce/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- This file should undo anything in `up.sql` -DROP TABLE IF EXISTS eth_nonce CASCADE; \ No newline at end of file diff --git a/core/storage/migrations/2020-03-19-042712_add_eth_nonce/up.sql b/core/storage/migrations/2020-03-19-042712_add_eth_nonce/up.sql deleted file mode 100644 index c50f650707..0000000000 --- a/core/storage/migrations/2020-03-19-042712_add_eth_nonce/up.sql +++ /dev/null @@ -1,6 +0,0 @@ --- Your SQL goes here -CREATE TABLE eth_nonce ( - -- enforce single record - id bool PRIMARY KEY NOT NULL DEFAULT true, - nonce BIGINT NOT NULL -); diff --git a/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql b/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql new file mode 100644 index 0000000000..f686943a1a --- /dev/null +++ b/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql @@ -0,0 +1,3 @@ +-- This file should undo anything in `up.sql` +DROP TABLE IF EXISTS eth_nonce CASCADE; +DROP TABLE IF EXISTS eth_stats CASCADE; diff --git a/core/storage/migrations/2020-03-20-055334_add_eth_stats/up.sql b/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql similarity index 52% rename from core/storage/migrations/2020-03-20-055334_add_eth_stats/up.sql rename to core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql index 8b924130a2..0046e957e2 100644 --- a/core/storage/migrations/2020-03-20-055334_add_eth_stats/up.sql +++ b/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql @@ -1,4 +1,12 @@ -- Your SQL goes here +-- Locally stored Ethereum nonce +CREATE TABLE eth_nonce ( + -- enforce single record + id bool PRIMARY KEY NOT NULL DEFAULT true, + nonce BIGINT NOT NULL +); + +-- Gathered operations statistics CREATE TABLE eth_stats ( -- enforce single record id bool PRIMARY KEY NOT NULL DEFAULT true, diff --git a/core/storage/migrations/2020-03-20-055334_add_eth_stats/down.sql b/core/storage/migrations/2020-03-20-055334_add_eth_stats/down.sql deleted file mode 100644 index b3544a593c..0000000000 --- a/core/storage/migrations/2020-03-20-055334_add_eth_stats/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- This file should undo anything in `up.sql` -DROP TABLE IF EXISTS eth_stats CASCADE; \ No newline at end of file From e05e16876b2c6914c46e20bd5428e986a6876817 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 20 Mar 2020 14:03:02 +0300 Subject: [PATCH 055/186] Make eth_sender use tx queue and update most of the tests --- core/server/src/eth_sender/database.rs | 32 +- core/server/src/eth_sender/mod.rs | 225 ++++++++----- core/server/src/eth_sender/tests/mock.rs | 86 +++-- core/server/src/eth_sender/tests/mod.rs | 306 ++++++++++-------- core/server/src/eth_sender/transactions.rs | 28 +- .../src/eth_sender/tx_queue/counter_queue.rs | 4 +- core/server/src/eth_sender/tx_queue/mod.rs | 84 +++-- .../src/eth_sender/tx_queue/sparse_queue.rs | 4 +- 8 files changed, 466 insertions(+), 303 deletions(-) diff --git a/core/server/src/eth_sender/database.rs b/core/server/src/eth_sender/database.rs index 8a7fcf56ed..e82e558895 100644 --- a/core/server/src/eth_sender/database.rs +++ b/core/server/src/eth_sender/database.rs @@ -12,7 +12,7 @@ use web3::types::H256; // Workspace uses use storage::ConnectionPool; // Local uses -use super::transactions::{OperationETHState, TransactionETHState}; +use super::transactions::{ETHStats, OperationETHState, OperationType, TransactionETHState}; /// Abstract database access trait, optimized for the needs of `ETHSender`. pub(super) trait DatabaseAccess { @@ -27,6 +27,20 @@ pub(super) trait DatabaseAccess { /// Gets the next nonce to use from the database. fn next_nonce(&self) -> Result; + + /// Loads the stored Ethereum operations stats. + fn load_stats(&self) -> Result; + + /// Updates the stats counter with the new operation reported. + /// This method should be called once **per operation**. It means that if transaction + /// for some operation was stuck, and another transaction was created for it, this method + /// **should not** be invoked. + /// + /// This method expects the database to be initially prepared with inserting the actual + /// nonce value. Currently the script `db-insert-eth-data.sh` is responsible for that + /// and it's invoked within `db-reset` subcommand. + fn report_created_operation(&self, operation_type: OperationType) + -> Result<(), failure::Error>; } /// The actual database wrapper. @@ -82,4 +96,20 @@ impl DatabaseAccess for Database { let storage = self.db_pool.access_storage()?; Ok(storage.ethereum_schema().get_next_nonce()?) } + + fn load_stats(&self) -> Result { + let storage = self.db_pool.access_storage()?; + let stats = storage.ethereum_schema().load_stats()?; + Ok(stats.into()) + } + + fn report_created_operation( + &self, + operation_type: OperationType, + ) -> Result<(), failure::Error> { + let storage = self.db_pool.access_storage()?; + Ok(storage + .ethereum_schema() + .report_created_operation(operation_type)?) + } } diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 6c75410ff2..1fa8588578 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -13,7 +13,6 @@ use tokio::time; use web3::contract::Options; use web3::types::{TransactionReceipt, U256}; // Workspace uses -use eth_client::SignedCallResult; use models::config_options::{ConfigurationOptions, ThreadPanicNotify}; use models::node::config; use models::{Action, ActionType, Operation}; @@ -22,7 +21,7 @@ use storage::ConnectionPool; use self::database::{Database, DatabaseAccess}; use self::ethereum_interface::{EthereumHttpClient, EthereumInterface}; use self::transactions::*; -use self::tx_queue::TxQueue; +use self::tx_queue::{TxData, TxQueue, TxQueueBuilder}; mod database; mod ethereum_interface; @@ -71,8 +70,8 @@ const WAIT_CONFIRMATIONS: u64 = 1; /// erroneous conditions. Failure handling policy is determined by a corresponding callback, /// which can be changed if needed. struct ETHSender { - /// Unconfirmed operations queue. - unconfirmed_ops: VecDeque, + /// Ongoing operations queue. + ongoing_ops: VecDeque, /// Connection to the database. db: DB, /// Ethereum intermediator. @@ -92,17 +91,26 @@ impl ETHSender { rx_for_eth: mpsc::Receiver, op_notify: mpsc::Sender, ) -> Self { - const MAX_TXS_IN_FLIGHT: usize = 5; // TODO: Should be configurable. + const MAX_TXS_IN_FLIGHT: usize = 1; // TODO: Should be configurable. - let unconfirmed_ops = db + let ongoing_ops = db .restore_state() .expect("Failed loading unconfirmed operations from the storage"); - let tx_queue = TxQueue::new(MAX_TXS_IN_FLIGHT); + let stats = db + .load_stats() + .expect("Failed loading ETH operations stats"); + + let tx_queue = TxQueueBuilder::new(MAX_TXS_IN_FLIGHT) + .with_sent_pending_txs(ongoing_ops.len()) + .with_commit_operations_count(stats.commit_ops) + .with_verify_operations_count(stats.verify_ops) + .with_withdraw_operations_count(stats.withdraw_ops) + .build(); Self { ethereum, - unconfirmed_ops, + ongoing_ops, db, rx_for_eth, op_notify, @@ -120,47 +128,77 @@ impl ETHSender { timer.tick().await; // ...and proceed them. - self.proceed_next_operation(); + self.proceed_next_operations(); } } - /// Obtains all the available operations to commit through the channel - /// and stores them within self for further processing. fn retrieve_operations(&mut self) { while let Ok(Some(operation)) = self.rx_for_eth.try_next() { - self.unconfirmed_ops.push_back(OperationETHState { - operation, - txs: Vec::new(), - }); + self.add_operation_to_queue(operation); } } - fn proceed_next_operation(&mut self) { + fn proceed_next_operations(&mut self) { + while let Some(tx) = self.tx_queue.pop_front() { + self.initialize_operation(tx).unwrap_or_else(|e| { + warn!("Error while trying to complete uncommitted op: {}", e); + }); + } + // Commit the next operation (if any). - if let Some(current_op) = self.unconfirmed_ops.pop_front() { + // TODO: should not be `if let`, but rather `while let`. + if let Some(current_op) = self.ongoing_ops.pop_front() { self.try_commit(current_op); } } - /// Attempts to commit the provided operation to the Ethereum blockchain. - /// - /// The strategy is the following: - /// - First we check the transactions associated with the operation. - /// If there are none, we create and send one, storing it locally. No more - /// processing at this step; we need to wait. - /// If there are some transactions, we check their state. If one of them - /// is committed and has enough approvals, we're all good. - /// Otherwise, we check if the last pending transaction is "stuck", meaning - /// that it is not being included in a block for a decent amount of time. If - /// so, we create a new transaction (with increased gas) and send it. - /// - If there was no confirmation of a transaction in a previous step, we return - /// the operation to the beginning of the unprocessed operations queue. We will - /// check it again after some time. - /// - If transaction was confirmed, there may be two possible outcomes: - /// 1. Transaction is executed successfully. Desirable outcome, in which we - /// consider the commitment completed and notify about it through the channel. - /// 2. Transaction erred. This should never happen, but if so, such an incident is - /// reported according to the chosen failure report policy. + fn initialize_operation(&mut self, tx: TxData) -> Result<(), failure::Error> { + let current_block = self.ethereum.block_number()?; + let deadline_block = self.get_deadline_block(current_block); + + if let Some(operation) = tx.operation { + let mut eth_op = OperationETHState { + operation, + txs: Vec::new(), + }; + + let new_tx = + self.sign_raw_tx(eth_op.operation.id.unwrap(), tx.raw, deadline_block, None)?; + + self.db.save_unconfirmed_operation(&new_tx)?; + self.db.report_created_operation( + self.operation_type_for_action(ð_op.operation.action), + )?; + + eth_op.txs.push(new_tx.clone()); + info!( + "Sending tx for op, op_id: {} tx_hash: {:#x}, nonce: {}", + new_tx.op_id, new_tx.signed_tx.hash, new_tx.signed_tx.nonce, + ); + self.ethereum.send_tx(&new_tx.signed_tx)?; + + self.ongoing_ops.push_back(eth_op); + } else { + let mut options = Options::default(); + let nonce = self.db.next_nonce()?; + options.nonce = Some(nonce.into()); + + let tx = self + .ethereum + .sign_prepared_tx(tx.raw, options) + .map_err(|e| failure::format_err!("Failed to sign a prepared tx: {}", e))?; + + // TODO: Operations w/o `Operation` field (e.g. withdrawals) should be stored to the DB as well. + // self.db + // .report_created_operation(self.operation_type_for_action(&op.operation.action))?; + + info!("Sending tx with hash: {:#?}", tx.hash); + self.ethereum.send_tx(&tx)?; + } + + Ok(()) + } + fn try_commit(&mut self, mut operation: OperationETHState) { // Check the transactions associated with the operation, and send a new one if required. @@ -185,6 +223,9 @@ impl ETHSender { operation.operation.block.block_number, ); + // Free a slot for the next tx in the queue. + self.tx_queue.report_commitment(); + if operation.operation.action.get_type() == ActionType::VERIFY { // We notify about verify only when commit is confirmed on the Ethereum. self.op_notify @@ -193,33 +234,25 @@ impl ETHSender { .unwrap_or_default(); // Complete pending withdrawals after each verify. - self.call_complete_withdrawals() - .map_err(|e| { - warn!("Error: {}", e); - }) - .unwrap_or_default(); + self.add_complete_withdrawals_to_queue(); } } OperationCommitment::Pending => { // Retry the operation again the next time. - self.unconfirmed_ops.push_front(operation); + self.ongoing_ops.push_front(operation); } } } - /// Checks the state of the operation commitment, choosing the necessary action to perform. - /// Initially this method sends the first transaction to the Ethereum blockchain. - /// Within next invocations for the same operation, state of sent transaction is checked. - /// If transaction(s) will be pending yet, this method won't do anything. - /// If one of transactions will be successfully confirmed on chain, the commitment will be considered - /// finished. - /// In case of stuck transaction, another transaction with increased gas limit will be sent. - /// In case of transaction failure, it will be reported and processed according to failure handling - /// policy. fn perform_commitment_step( &mut self, op: &mut OperationETHState, ) -> Result { + assert!( + !op.txs.is_empty(), + "OperationETHState should have at least one transaction" + ); + let current_block = self.ethereum.block_number()?; // Check statuses of existing transactions. @@ -262,18 +295,29 @@ impl ETHSender { } } - // Reaching this point will mean that either there were no transactions to process, - // or the latest transaction got stuck. - // Either way we should create a new transaction (the approach is the same, - // `sign_new_tx` will adapt its logic based on `last_stuck_tx`). + // Reaching this point will mean that the latest transaction got stuck. + // We should create another tx based on it, and send it. + assert!( + last_stuck_tx.is_some(), + "Loop didn't exit without a stuck tx" + ); let deadline_block = self.get_deadline_block(current_block); - let new_tx = self.sign_new_tx(&op.operation, deadline_block, last_stuck_tx)?; + // Raw tx contents are the same for every transaction, so we just + // clone them from the first tx. + let raw_tx = op.txs[0].signed_tx.raw_tx.clone(); + let new_tx = self.sign_raw_tx( + op.operation.id.unwrap(), + raw_tx, + deadline_block, + last_stuck_tx, + )?; // New transaction should be persisted in the DB *before* sending it. self.db.save_unconfirmed_operation(&new_tx)?; + // Since we're processing the stuck operation, no need to invoke `report_created_operation`. op.txs.push(new_tx.clone()); info!( - "Sending tx for op, op_id: {} tx_hash: {:#x}, nonce: {}", + "Stuck tx processing: sending tx for op, op_id: {} tx_hash: {:#x}, nonce: {}", new_tx.op_id, new_tx.signed_tx.hash, new_tx.signed_tx.nonce, ); self.ethereum.send_tx(&new_tx.signed_tx)?; @@ -281,6 +325,13 @@ impl ETHSender { Ok(OperationCommitment::Pending) } + fn operation_type_for_action(&self, action: &Action) -> OperationType { + match action { + Action::Commit => OperationType::Commit, + Action::Verify { .. } => OperationType::Verify, + } + } + /// Handles a transaction execution failure by reporting the issue to the log /// and terminating the node. fn failure_handler(&self, receipt: &TransactionReceipt) -> ! { @@ -337,9 +388,10 @@ impl ETHSender { /// Creates a new transaction. If stuck tx is provided, the new transaction will be /// and updated version of it; otherwise a brand new transaction will be created. - fn sign_new_tx( + fn sign_raw_tx( &self, - op: &Operation, + op_id: i64, + raw_tx: Vec, deadline_block: u64, stuck_tx: Option<&TransactionETHState>, ) -> Result { @@ -352,9 +404,9 @@ impl ETHSender { options }; - let signed_tx = self.sign_operation_tx(op, tx_options)?; + let signed_tx = self.ethereum.sign_prepared_tx(raw_tx, tx_options)?; Ok(TransactionETHState { - op_id: op.id.unwrap(), + op_id, deadline_block, signed_tx, }) @@ -391,13 +443,8 @@ impl ETHSender { })) } - /// Creates a signed transaction according to the operation action. - fn sign_operation_tx( - &self, - op: &Operation, - tx_options: Options, - ) -> Result { - let raw_tx = match &op.action { + fn operation_to_raw_tx(&self, op: &Operation) -> Vec { + match &op.action { Action::Commit => { let root = op.block.get_eth_encoded_root(); @@ -431,33 +478,41 @@ impl ETHSender { } Action::Verify { proof } => { // function verifyBlock(uint32 _blockNumber, uint256[8] calldata proof) external { - self.ethereum.encode_tx_data( - "verifyBlock", - (u64::from(op.block.block_number), *proof.clone()), - ) + let block_number = op.block.block_number; + self.ethereum + .encode_tx_data("verifyBlock", (u64::from(block_number), *proof.clone())) } - }; + } + } - self.ethereum.sign_prepared_tx(raw_tx, tx_options) + fn add_operation_to_queue(&mut self, op: Operation) { + let raw_tx = self.operation_to_raw_tx(&op); + + match &op.action { + Action::Commit => { + self.tx_queue + .add_commit_operation(TxData::from_operation(op, raw_tx)); + } + Action::Verify { proof } => { + let block_number = op.block.block_number; + + self.tx_queue.add_verify_operation( + block_number as usize, + TxData::from_operation(op, raw_tx), + ); + } + } } - fn call_complete_withdrawals(&self) -> Result<(), failure::Error> { + fn add_complete_withdrawals_to_queue(&mut self) { // function completeWithdrawals(uint32 _n) external { - let mut options = Options::default(); - let nonce = self.db.next_nonce()?; - options.nonce = Some(nonce.into()); - let raw_tx = self.ethereum.encode_tx_data( "completeWithdrawals", config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, ); - let tx = self - .ethereum - .sign_prepared_tx(raw_tx, options) - .map_err(|e| failure::format_err!("completeWithdrawals: {}", e))?; - info!("Sending completeWithdrawals tx with hash: {:#?}", tx.hash); - self.ethereum.send_tx(&tx) + self.tx_queue + .add_withdraw_operation(TxData::from_raw(raw_tx)); } } diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 4b8b5fdeff..85a3e9a427 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -9,12 +9,14 @@ use web3::contract::{tokens::Tokenize, Options}; use web3::types::{H256, U256}; // Workspace uses use eth_client::SignedCallResult; -use models::{Action, Operation}; +use models::Operation; // Local uses use super::ETHSender; use crate::eth_sender::database::DatabaseAccess; use crate::eth_sender::ethereum_interface::EthereumInterface; -use crate::eth_sender::transactions::{ExecutedTxStatus, OperationETHState, TransactionETHState}; +use crate::eth_sender::transactions::{ + ETHStats, ExecutedTxStatus, OperationETHState, OperationType, TransactionETHState, +}; const CHANNEL_CAPACITY: usize = 16; @@ -25,18 +27,21 @@ pub(super) struct MockDatabase { unconfirmed_operations: RefCell>, confirmed_operations: RefCell>, nonce: Cell, + stats: RefCell, } impl MockDatabase { /// Creates a database with emulation of previously stored uncommitted requests. pub fn with_restorable_state( restore_state: impl IntoIterator, + stats: ETHStats, ) -> Self { let restore_state: VecDeque<_> = restore_state.into_iter().collect(); let nonce = restore_state.iter().fold(0, |acc, op| acc + op.txs.len()); Self { restore_state, nonce: Cell::new(nonce as i64), + stats: RefCell::new(stats), ..Default::default() } } @@ -120,6 +125,31 @@ impl DatabaseAccess for MockDatabase { Ok(old_value) } + + fn load_stats(&self) -> Result { + Ok(self.stats.borrow().clone()) + } + + fn report_created_operation( + &self, + operation_type: OperationType, + ) -> Result<(), failure::Error> { + let mut stats = self.stats.borrow_mut(); + + match operation_type { + OperationType::Commit => { + stats.commit_ops += 1; + } + OperationType::Verify => { + stats.verify_ops += 1; + } + OperationType::Withdraw => { + stats.withdraw_ops += 1; + } + } + + Ok(()) + } } /// Mock Ethereum client is capable of recording all the incoming requests for the further analysis. @@ -182,7 +212,7 @@ impl MockEthereum { /// Increments the blocks by a provided `confirmations` and marks the sent transaction /// as a success. - pub fn add_successfull_execution(&mut self, tx: &TransactionETHState, confirmations: u64) { + pub fn add_successfull_execution(&mut self, tx_hash: H256, confirmations: u64) { self.block_number += confirmations; let status = ExecutedTxStatus { @@ -190,9 +220,7 @@ impl MockEthereum { success: true, receipt: None, }; - self.tx_statuses - .borrow_mut() - .insert(tx.signed_tx.hash, status); + self.tx_statuses.borrow_mut().insert(tx_hash, status); } /// Same as `add_successfull_execution`, but marks the transaction as a failure. @@ -208,41 +236,6 @@ impl MockEthereum { .borrow_mut() .insert(tx.signed_tx.hash, status); } - - /// Replicates the `ETHCLient::sign_operation_tx` method for testing. - pub fn create_signed_tx_replica(&self, op: &Operation, nonce: i64) -> SignedCallResult { - let mut options = Options::default(); - options.nonce = Some(nonce.into()); - - match &op.action { - Action::Commit => { - let root = op.block.get_eth_encoded_root(); - let public_data = op.block.get_eth_public_data(); - let witness_data = op.block.get_eth_witness_data(); - let raw_tx = self.encode_tx_data( - "commitBlock", - ( - u64::from(op.block.block_number), - u64::from(op.block.fee_account), - root, - public_data, - witness_data.0, - witness_data.1, - ), - ); - - self.sign_prepared_tx(raw_tx, options).unwrap() - } - Action::Verify { proof } => { - let raw_tx = self.encode_tx_data( - "verifyBlock", - (u64::from(op.block.block_number), *proof.clone()), - ); - - self.sign_prepared_tx(raw_tx, options).unwrap() - } - } - } } impl EthereumInterface for MockEthereum { @@ -301,20 +294,21 @@ pub(super) fn default_eth_sender() -> ( mpsc::Sender, mpsc::Receiver, ) { - restored_eth_sender(Vec::new()) + restored_eth_sender(Vec::new(), Default::default()) } /// Creates an `ETHSender` with mock Ethereum connection/database and restores its state "from DB". /// Returns the `ETHSender` itself along with communication channels to interact with it. pub(super) fn restored_eth_sender( restore_state: impl IntoIterator, + stats: ETHStats, ) -> ( ETHSender, mpsc::Sender, mpsc::Receiver, ) { let ethereum = MockEthereum::default(); - let db = MockDatabase::with_restorable_state(restore_state); + let db = MockDatabase::with_restorable_state(restore_state, stats); let (operation_sender, operation_receiver) = mpsc::channel(CHANNEL_CAPACITY); let (notify_sender, notify_receiver) = mpsc::channel(CHANNEL_CAPACITY); @@ -338,7 +332,11 @@ pub(super) fn create_signed_tx( let mut options = Options::default(); options.nonce = Some(nonce.into()); - let signed_tx = eth_sender.sign_operation_tx(operation, options).unwrap(); + let raw_tx = eth_sender.operation_to_raw_tx(&operation); + let signed_tx = eth_sender + .ethereum + .sign_prepared_tx(raw_tx, options) + .unwrap(); TransactionETHState { op_id: operation.id.unwrap(), diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index ae3416789a..60e20ff8c4 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -3,8 +3,11 @@ use web3::contract::Options; // Local uses use self::mock::{create_signed_tx, default_eth_sender, restored_eth_sender}; use super::{ + database::DatabaseAccess, ethereum_interface::EthereumInterface, - transactions::{ExecutedTxStatus, OperationETHState, TransactionETHState, TxCheckOutcome}, + transactions::{ + ETHStats, ExecutedTxStatus, OperationETHState, TransactionETHState, TxCheckOutcome, + }, ETHSender, }; @@ -17,7 +20,7 @@ fn basic_test() { let (eth_sender, _, _) = default_eth_sender(); // Check that there are no unconfirmed operations by default. - assert!(eth_sender.unconfirmed_ops.is_empty()); + assert!(eth_sender.ongoing_ops.is_empty()); } /// Check for the gas scaling: gas is expected to be increased by 15% or set equal @@ -53,32 +56,6 @@ fn deadline_block() { ); } -/// Checks that `ETHSender` invokes `EthereumInterface::sign_call_tx` to obtain the -/// raw transaction contents (and does not mess with it). -#[test] -fn tx_creation() { - let (eth_sender, _, _) = default_eth_sender(); - - let operations = vec![ - test_data::commit_operation(0), - test_data::verify_operation(0), - ]; - - for (nonce, operation) in operations.iter().enumerate() { - let actual_tx = eth_sender - .sign_new_tx( - &operation, - eth_sender.get_deadline_block(eth_sender.ethereum.block_number), - None, - ) - .unwrap(); - let expected_tx = eth_sender - .ethereum - .create_signed_tx_replica(&operation, nonce as i64); - assert_eq!(actual_tx.signed_tx, expected_tx); - } -} - /// Checks that received transaction response is reduced to the /// `TxCheckOutcome` correctly. /// @@ -96,7 +73,8 @@ fn transaction_state() { test_data::commit_operation(4), // Will be pending due no response. ] .iter() - .map(|op| eth_sender.sign_new_tx(op, deadline_block, None).unwrap()) + .enumerate() + .map(|(nonce, op)| create_signed_tx(ð_sender, op, deadline_block, nonce as i64)) .collect(); // Committed operation. @@ -211,7 +189,7 @@ fn operation_commitment_workflow() { // Retrieve it there and then process. eth_sender.retrieve_operations(); - eth_sender.proceed_next_operation(); + eth_sender.proceed_next_operations(); // Now we should see that transaction is stored in the database and sent to the Ethereum. let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); @@ -226,14 +204,15 @@ fn operation_commitment_workflow() { // operation again. eth_sender .ethereum - .add_successfull_execution(&expected_tx, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operation(); + .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); // Check that operation is confirmed. eth_sender.db.assert_confirmed(&expected_tx); } - // Check that `completeWithdrawals` transaction is sent. + // Process the next operation and check that `completeWithdrawals` transaction is sent. + eth_sender.proceed_next_operations(); let mut options = Options::default(); let nonce = operations.len().into(); options.nonce = Some(nonce); @@ -268,7 +247,7 @@ fn stuck_transaction() { sender.try_send(operation.clone()).unwrap(); eth_sender.retrieve_operations(); - eth_sender.proceed_next_operation(); + eth_sender.proceed_next_operations(); let nonce = 0; let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); @@ -276,12 +255,14 @@ fn stuck_transaction() { // Skip some blocks and expect sender to send a new tx. eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; - eth_sender.proceed_next_operation(); + eth_sender.proceed_next_operations(); // Check that new transaction is sent (and created based on the previous stuck tx). + let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); let expected_tx = eth_sender - .sign_new_tx( - &operation, + .sign_raw_tx( + stuck_tx.op_id, + raw_tx, eth_sender.get_deadline_block(eth_sender.ethereum.block_number), Some(&stuck_tx), ) @@ -293,86 +274,110 @@ fn stuck_transaction() { // operation again. eth_sender .ethereum - .add_successfull_execution(&expected_tx, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operation(); + .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); // Check that operation is confirmed. eth_sender.db.assert_confirmed(&expected_tx); } -/// This test verifies that with multiple operations received all-together, -/// their order is respected and no processing of the next operation is started until -/// the previous one is committed. -#[test] -fn operations_order() { - let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); - - // We send multiple the operations at once to the channel. - let operations_count = 3; - let mut operations = Vec::new(); - let commit_operations = &test_data::COMMIT_OPERATIONS[..operations_count]; - let verify_operations = &test_data::VERIFY_OPERATIONS[..operations_count]; - operations.extend_from_slice(commit_operations); - operations.extend_from_slice(verify_operations); - - // Also we create the list of expected transactions. - let mut expected_txs = Vec::new(); - // Create expected txs from the commit operations. - for (idx, operation) in commit_operations.iter().enumerate() { - // We start from the 1 block, and step logic is: - // N blocks to confirm, repeated `idx` times. - let start_block = 1 + super::WAIT_CONFIRMATIONS * idx as u64; - let deadline_block = eth_sender.get_deadline_block(start_block); - let nonce = idx; - - let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); - - expected_txs.push(expected_tx); - } - - // Create expected txs from the verify operations. - for (idx, operation) in verify_operations.iter().enumerate() { - let start_block = 1 + super::WAIT_CONFIRMATIONS * (commit_operations.len() + idx) as u64; - let deadline_block = eth_sender.get_deadline_block(start_block); - // For verify operations the logic for nonce is slightly different: - // After each verify operation we send the withdraw operation as well, - // thus every verify operation increases the nonce by two. - let nonce = commit_operations.len() + idx * 2; - - let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); - - expected_txs.push(expected_tx); - } - - for operation in operations.iter() { - sender.try_send(operation.clone()).unwrap(); - } - eth_sender.retrieve_operations(); - - // Then we go through the operations and check that the order of operations is preserved. - for (idx, tx) in expected_txs.iter().enumerate() { - eth_sender.proceed_next_operation(); - - // Check that current expected tx is stored, but the next ones are not. - eth_sender.db.assert_stored(tx); - eth_sender.ethereum.assert_sent(tx); - - for following_tx in expected_txs[idx + 1..].iter() { - eth_sender.db.assert_not_stored(following_tx) - } - - eth_sender - .ethereum - .add_successfull_execution(tx, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operation(); - eth_sender.db.assert_confirmed(tx); - } - - // We should be notified about all the verify operations being completed. - for _ in 0..operations_count { - assert!(receiver.try_next().unwrap().is_some()); - } -} +// TODO: Restore once withdraw operations are fixed in `eth_sender`. +// Currently this test is too hard to implement, since withdraw txs are not stored in the database. +// /// This test verifies that with multiple operations received all-together, +// /// their order is respected and no processing of the next operation is started until +// /// the previous one is committed. +// #[test] +// fn operations_order() { +// let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); + +// // We send multiple the operations at once to the channel. +// let operations_count = 3; +// let mut operations = Vec::new(); +// let commit_operations = &test_data::COMMIT_OPERATIONS[..operations_count]; +// let verify_operations = &test_data::VERIFY_OPERATIONS[..operations_count]; +// operations.extend_from_slice(commit_operations); +// operations.extend_from_slice(verify_operations); + +// // Also we create the list of expected transactions. +// let mut expected_txs = Vec::new(); + +// // Create expected txs from all the operations. +// for (idx, (commit_operation, verify_operation)) in +// commit_operations.iter().zip(verify_operations).enumerate() +// { +// // Create the commit operation. +// let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; +// let deadline_block = eth_sender.get_deadline_block(start_block); +// let nonce = idx * 3; + +// let commit_op_tx = +// create_signed_tx(ð_sender, commit_operation, deadline_block, nonce as i64); + +// expected_txs.push(commit_op_tx); + +// // Create the verify operation, as by priority it will be processed right after `commit`. +// let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 1) as u64; +// let deadline_block = eth_sender.get_deadline_block(start_block); +// let nonce = idx * 3 + 1; + +// let verify_op_tx = +// create_signed_tx(ð_sender, verify_operation, deadline_block, nonce as i64); + +// expected_txs.push(verify_op_tx); +// } + +// for operation in operations.iter() { +// sender.try_send(operation.clone()).unwrap(); +// } +// eth_sender.retrieve_operations(); + +// // Then we go through the operations and check that the order of operations is preserved. +// for (idx, tx) in expected_txs.iter().enumerate() { +// eth_sender.proceed_next_operations(); + +// // Check that current expected tx is stored, but the next ones are not. +// eth_sender.db.assert_stored(tx); +// eth_sender.ethereum.assert_sent(tx); + +// for following_tx in expected_txs[idx + 1..].iter() { +// eth_sender.db.assert_not_stored(following_tx) +// } + +// eth_sender +// .ethereum +// .add_successfull_execution(tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); +// eth_sender.proceed_next_operations(); +// eth_sender.db.assert_confirmed(tx); + +// if idx % 2 == 1 { +// // For every verify operation, we should also add a withdraw operation and process it. +// let raw_tx = eth_sender.ethereum.encode_tx_data( +// "completeWithdrawals", +// models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, +// ); + +// let nonce = (idx / 2) * 3 + 2; +// let mut options = Options::default(); +// options.nonce = Some(nonce.into()); + +// let signed_tx = eth_sender +// .ethereum +// .sign_prepared_tx(raw_tx, options) +// .unwrap(); + +// eth_sender +// .ethereum +// .add_successfull_execution(signed_tx.hash, super::WAIT_CONFIRMATIONS); +// eth_sender.proceed_next_operations(); +// eth_sender.proceed_next_operations(); +// } +// } + +// // We should be notified about all the verify operations being completed. +// for _ in 0..operations_count { +// assert!(receiver.try_next().unwrap().is_some()); +// } +// } /// Check that upon a transaction failure the incident causes a panic by default. #[test] @@ -389,45 +394,78 @@ fn transaction_failure() { let failing_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); eth_sender.retrieve_operations(); - eth_sender.proceed_next_operation(); + eth_sender.proceed_next_operations(); eth_sender .ethereum .add_failed_execution(&failing_tx, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operation(); + eth_sender.proceed_next_operations(); } /// Check that after recovering state with several non-processed operations /// they will be processed normally. #[test] fn restore_state() { - let operations = vec![ - test_data::commit_operation(0), - test_data::verify_operation(0), - ]; + let (operations, stored_operations) = { + // This `eth_sender` is required to generate the input only. + let (eth_sender, _, _) = default_eth_sender(); + + let commit_op = test_data::commit_operation(0); + let verify_op = test_data::verify_operation(0); + + let deadline_block = eth_sender.get_deadline_block(1); + let commit_op_tx = create_signed_tx(ð_sender, &commit_op, deadline_block, 0); + let deadline_block = eth_sender.get_deadline_block(2); + let verify_op_tx = create_signed_tx(ð_sender, &verify_op, deadline_block, 1); + + let operations = vec![commit_op.clone(), verify_op.clone()]; + + // Create `OperationETHState` objects from operations and restore state + let stored_operations = vec![ + OperationETHState { + operation: commit_op, + txs: vec![commit_op_tx], + }, + OperationETHState { + operation: verify_op, + txs: vec![verify_op_tx], + }, + ]; + + (operations, stored_operations) + }; - // Create `OperationETHState` objects from operations and restore state. - let stored_operations = operations.iter().map(|operation| OperationETHState { - operation: operation.clone(), - txs: Vec::new(), - }); + let stats = ETHStats { + commit_ops: 1, + verify_ops: 1, + withdraw_ops: 1, + }; + let (mut eth_sender, _, mut receiver) = restored_eth_sender(stored_operations.clone(), stats); - let (mut eth_sender, _, mut receiver) = restored_eth_sender(stored_operations); + // We have to store txs in the database, since we've used them for the data restore. + eth_sender + .db + .save_unconfirmed_operation(&stored_operations[0].txs[0]) + .unwrap(); + eth_sender + .db + .save_unconfirmed_operation(&stored_operations[1].txs[0]) + .unwrap(); for (nonce, operation) in operations.iter().enumerate() { // Note that we DO NOT send an operation to `ETHSender` and neither receive it. // We do process operations restored from the DB though. // The rest of this test is the same as in `operation_commitment_workflow`. - eth_sender.proceed_next_operation(); + eth_sender.proceed_next_operations(); let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); eth_sender .ethereum - .add_successfull_execution(&expected_tx, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operation(); + .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); eth_sender.db.assert_confirmed(&expected_tx); } @@ -448,18 +486,20 @@ fn confirmations_independence() { sender.try_send(operation.clone()).unwrap(); eth_sender.retrieve_operations(); - eth_sender.proceed_next_operation(); + eth_sender.proceed_next_operations(); let nonce = 0; let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; - eth_sender.proceed_next_operation(); + eth_sender.proceed_next_operations(); + let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); let next_tx = eth_sender - .sign_new_tx( - &operation, + .sign_raw_tx( + stuck_tx.op_id, + raw_tx, eth_sender.get_deadline_block(eth_sender.ethereum.block_number), Some(&stuck_tx), ) @@ -470,8 +510,8 @@ fn confirmations_independence() { // Add a confirmation for a *stuck* transaction. eth_sender .ethereum - .add_successfull_execution(&stuck_tx, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operation(); + .add_successfull_execution(stuck_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); // Check that operation is confirmed. eth_sender.db.assert_confirmed(&stuck_tx); diff --git a/core/server/src/eth_sender/transactions.rs b/core/server/src/eth_sender/transactions.rs index 3f52d35ea1..13e4fbfe24 100644 --- a/core/server/src/eth_sender/transactions.rs +++ b/core/server/src/eth_sender/transactions.rs @@ -10,7 +10,33 @@ use web3::types::{TransactionReceipt, H256, U256}; // Workspace uses use eth_client::SignedCallResult; use models::Operation; -use storage::ethereum::records::StorageETHOperation; +use storage::ethereum::records::{ETHStats as StorageETHStats, StorageETHOperation}; + +pub use storage::ethereum::OperationType; + +/// Collected statistics of the amount of operations sent to the Ethereum. +/// This structure represents the count of **operations**, and not transactions. +/// It means that if for some operation there were N txs sent, it will be counted as +/// 1 operation anyway. +#[derive(Debug, Default, Clone, PartialEq)] +pub struct ETHStats { + /// Amount of sent commit operations. + pub commit_ops: usize, + /// Amount of sent verify operations. + pub verify_ops: usize, + /// Amount of sent withdraw operations. + pub withdraw_ops: usize, +} + +impl From for ETHStats { + fn from(stored: StorageETHStats) -> Self { + Self { + commit_ops: stored.commit_ops as usize, + verify_ops: stored.verify_ops as usize, + withdraw_ops: stored.withdraw_ops as usize, + } + } +} /// An intermediate state of the operation to be stored on /// the Ethereum chain. diff --git a/core/server/src/eth_sender/tx_queue/counter_queue.rs b/core/server/src/eth_sender/tx_queue/counter_queue.rs index afb447da3d..0a146ce761 100644 --- a/core/server/src/eth_sender/tx_queue/counter_queue.rs +++ b/core/server/src/eth_sender/tx_queue/counter_queue.rs @@ -1,6 +1,4 @@ -// TODO: Remove me -#![allow(dead_code)] - +// Built-in imports use std::{collections::VecDeque, fmt}; /// Counter queue is basically a queue which diff --git a/core/server/src/eth_sender/tx_queue/mod.rs b/core/server/src/eth_sender/tx_queue/mod.rs index e0c90d894d..5e202d9217 100644 --- a/core/server/src/eth_sender/tx_queue/mod.rs +++ b/core/server/src/eth_sender/tx_queue/mod.rs @@ -1,6 +1,6 @@ -// TODO: Remove me -#![allow(dead_code)] - +// Workspace imports +use models::Operation; +// Local imports use self::{counter_queue::CounterQueue, sparse_queue::SparseQueue}; mod counter_queue; @@ -8,6 +8,34 @@ mod sparse_queue; pub type RawTxData = Vec; +#[derive(Debug)] +pub struct TxData { + pub raw: RawTxData, + pub operation: Option, +} + +impl PartialEq for TxData { + fn eq(&self, other: &Self) -> bool { + self.raw == other.raw + } +} + +impl TxData { + pub fn from_operation(operation: Operation, raw: RawTxData) -> Self { + Self { + raw, + operation: Some(operation), + } + } + + pub fn from_raw(raw: RawTxData) -> Self { + Self { + raw, + operation: None, + } + } +} + /// `TxQueueBuilder` is a structure aiming to simplify the process /// of restoring of the `TxQueue` state after restart. /// This structure allows to configure the sub-queues state (amount of processed @@ -98,9 +126,9 @@ pub struct TxQueue { max_pending_txs: usize, sent_pending_txs: usize, - commit_operations: CounterQueue, - verify_operations: SparseQueue, - withdraw_operations: CounterQueue, + commit_operations: CounterQueue, + verify_operations: SparseQueue, + withdraw_operations: CounterQueue, } impl TxQueue { @@ -116,34 +144,24 @@ impl TxQueue { } } - /// Creates a new empty transactions queue with the custom expected next ID - /// for the `Verify` operations queue. - /// This method is used to restore the state of the queue. - pub fn new_from(max_pending_txs: usize, idx: usize) -> Self { - Self { - verify_operations: SparseQueue::new_from(idx), - ..Self::new(max_pending_txs) - } - } - /// Adds the `commit` operation to the queue. - pub fn add_commit_operation(&mut self, commit_operation: RawTxData) { + pub fn add_commit_operation(&mut self, commit_operation: TxData) { self.commit_operations.push_back(commit_operation); } /// Adds the `verify` operation to the queue. - pub fn add_verify_operation(&mut self, block_idx: usize, verify_operation: RawTxData) { + pub fn add_verify_operation(&mut self, block_idx: usize, verify_operation: TxData) { self.verify_operations.insert(block_idx, verify_operation); } /// Adds the `withdraw` operation to the queue. - pub fn add_withdraw_operation(&mut self, withdraw_operation: RawTxData) { + pub fn add_withdraw_operation(&mut self, withdraw_operation: TxData) { self.withdraw_operations.push_back(withdraw_operation); } /// Gets the next transaction to send, according to the transaction sending policy. /// For details, see the structure doc-comment. - pub fn pop_front(&mut self) -> Option { + pub fn pop_front(&mut self) -> Option { if self.sent_pending_txs >= self.max_pending_txs { return None; } @@ -160,7 +178,7 @@ impl TxQueue { /// Obtains the next operation from the underlying queues. /// This method does not use/affect `sent_pending_tx` counter. - fn get_next_operation(&mut self) -> Option { + fn get_next_operation(&mut self) -> Option { // 1. Highest priority: verify operations. // If we've committed a corresponding `Commit` operation, and @@ -220,27 +238,27 @@ mod tests { let mut queue = TxQueue::new(MAX_IN_FLY); // Add 2 commit, 2 verify and 2 withdraw operations. - queue.add_commit_operation(vec![COMMIT_MARK, 0]); - queue.add_commit_operation(vec![COMMIT_MARK, 1]); - queue.add_verify_operation(0, vec![VERIFY_MARK, 0]); - queue.add_verify_operation(1, vec![VERIFY_MARK, 1]); - queue.add_withdraw_operation(vec![WITHDRAW_MARK, 0]); - queue.add_withdraw_operation(vec![WITHDRAW_MARK, 1]); + queue.add_commit_operation(TxData::from_raw(vec![COMMIT_MARK, 0])); + queue.add_commit_operation(TxData::from_raw(vec![COMMIT_MARK, 1])); + queue.add_verify_operation(0, TxData::from_raw(vec![VERIFY_MARK, 0])); + queue.add_verify_operation(1, TxData::from_raw(vec![VERIFY_MARK, 1])); + queue.add_withdraw_operation(TxData::from_raw(vec![WITHDRAW_MARK, 0])); + queue.add_withdraw_operation(TxData::from_raw(vec![WITHDRAW_MARK, 1])); // Retrieve the next {MAX_IN_FLY} operations. // The first operation should be `commit`, since we can't send `verify` before the commitment. let op_1 = queue.pop_front().unwrap(); - assert_eq!(op_1, vec![COMMIT_MARK, 0]); + assert_eq!(op_1.raw, vec![COMMIT_MARK, 0]); // The second operation should be `verify`, since it has the highest priority. let op_2 = queue.pop_front().unwrap(); - assert_eq!(op_2, vec![VERIFY_MARK, 0]); + assert_eq!(op_2.raw, vec![VERIFY_MARK, 0]); // The third operation should be `withdraw`, since it has higher priority than `commit`, and we can't // send the `verify` before the corresponding `commit` operation. let op_3 = queue.pop_front().unwrap(); - assert_eq!(op_3, vec![WITHDRAW_MARK, 0]); + assert_eq!(op_3.raw, vec![WITHDRAW_MARK, 0]); // After that we have {MAX_IN_FLY} operations, and `pop_front` should yield nothing. assert_eq!(queue.pop_front(), None); @@ -250,7 +268,7 @@ mod tests { // Now we should obtain the next commit operation. let op_4 = queue.pop_front().unwrap(); - assert_eq!(op_4, vec![COMMIT_MARK, 1]); + assert_eq!(op_4.raw, vec![COMMIT_MARK, 1]); // The limit should be met again, and nothing more should be yielded. assert_eq!(queue.pop_front(), None); @@ -264,10 +282,10 @@ mod tests { // Pop remaining operations. let op_5 = queue.pop_front().unwrap(); - assert_eq!(op_5, vec![VERIFY_MARK, 1]); + assert_eq!(op_5.raw, vec![VERIFY_MARK, 1]); let op_6 = queue.pop_front().unwrap(); - assert_eq!(op_6, vec![WITHDRAW_MARK, 1]); + assert_eq!(op_6.raw, vec![WITHDRAW_MARK, 1]); // Though the limit is not met (2 txs in fly, and limit is 3), there should be no txs in the queue. assert_eq!(queue.pop_front(), None); diff --git a/core/server/src/eth_sender/tx_queue/sparse_queue.rs b/core/server/src/eth_sender/tx_queue/sparse_queue.rs index 4bb82c1fe8..87f72e174d 100644 --- a/core/server/src/eth_sender/tx_queue/sparse_queue.rs +++ b/core/server/src/eth_sender/tx_queue/sparse_queue.rs @@ -1,6 +1,4 @@ -// TODO: Remove me -#![allow(dead_code)] - +// Built-in imports use std::{collections::HashMap, fmt}; /// Sparse queue is a sparse queue which allows inserting an element From 5e77769ee70c5cc32ff4b661d7602ef085ee01e4 Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Fri, 20 Mar 2020 14:08:56 +0200 Subject: [PATCH 056/186] wait 30 confirmations --- core/models/src/params.rs | 2 ++ core/server/src/eth_watch.rs | 61 +++++++++++++++++++++++++----------- 2 files changed, 45 insertions(+), 18 deletions(-) diff --git a/core/models/src/params.rs b/core/models/src/params.rs index ab2b798060..c2aa99e255 100644 --- a/core/models/src/params.rs +++ b/core/models/src/params.rs @@ -123,6 +123,8 @@ pub fn max_block_chunk_size() -> usize { /// Priority op should be executed for this number of eth blocks. pub const PRIORITY_EXPIRATION: u64 = 250; pub const FR_ADDRESS_LEN: usize = 20; +/// All ethereum events are accepted after sufficient confirmations to eliminate risk of block reorg. +pub const CONFIRMATIONS_FOR_ETH_EVENT: u64 = 30; pub const PAD_MSG_BEFORE_HASH_BITS_LEN: usize = 736; diff --git a/core/server/src/eth_watch.rs b/core/server/src/eth_watch.rs index 22bb8517a9..2cc2e28a8d 100644 --- a/core/server/src/eth_watch.rs +++ b/core/server/src/eth_watch.rs @@ -1,3 +1,11 @@ +//! Ethereum watcher polls ethereum node for new events +//! such as PriorityQueue events or NewToken events. +//! New events are accepted to the ZK Sync network only after sufficient confirmations. +//! +//! Poll interval is configured using `ETH_POLL_INTERVAL` constant. +//! Number of confirmations are configured using `CONFIRMATIONS_FOR_ETH_EVENT`. +//! + // Built-in deps use std::collections::HashMap; use std::convert::TryFrom; @@ -16,12 +24,14 @@ use web3::{Transport, Web3}; use models::abi::{governance_contract, zksync_contract}; use models::config_options::ConfigurationOptions; use models::node::{Nonce, PriorityOp, PubKeyHash, TokenId}; -use models::params::PRIORITY_EXPIRATION; +use models::params::{CONFIRMATIONS_FOR_ETH_EVENT, PRIORITY_EXPIRATION}; use models::TokenAddedEvent; use storage::ConnectionPool; use tokio::{runtime::Runtime, time}; use web3::transports::EventLoopHandle; +const ETH_POLL_INTERVAL: Duration = Duration::from_secs(3); + pub enum EthWatchRequest { PollETHNode, IsPubkeyChangeAuthorized { @@ -40,7 +50,8 @@ pub enum EthWatchRequest { pub struct EthWatch { gov_contract: (ethabi::Contract, Contract), zksync_contract: (ethabi::Contract, Contract), - processed_block: u64, + /// The last block of the ethereum network known to ethereum watcher. + last_ethereum_block: u64, eth_state: ETHState, web3: Web3, _web3_event_loop_handle: EventLoopHandle, @@ -87,7 +98,7 @@ impl EthWatch { Self { gov_contract, zksync_contract, - processed_block: 0, + last_ethereum_block: 0, eth_state: ETHState { tokens: HashMap::new(), priority_queue: HashMap::new(), @@ -170,12 +181,17 @@ impl EthWatch { .collect() } - async fn restore_state_from_eth(&mut self, block: u64) { + async fn restore_state_from_eth(&mut self, current_ethereum_block: u64) { + let new_block_with_accepted_events = + current_ethereum_block.saturating_sub(CONFIRMATIONS_FOR_ETH_EVENT); + let previous_block_with_accepted_events = + new_block_with_accepted_events.saturating_sub(PRIORITY_EXPIRATION); + // restore priority queue let prior_queue_events = self .get_priority_op_events( - BlockNumber::Number(block.saturating_sub(PRIORITY_EXPIRATION)), - BlockNumber::Number(block), + BlockNumber::Number(previous_block_with_accepted_events), + BlockNumber::Number(new_block_with_accepted_events), ) .await .expect("Failed to restore priority queue events from ETH"); @@ -187,7 +203,10 @@ impl EthWatch { // restore token list from governance contract let new_tokens = self - .get_new_token_events(BlockNumber::Earliest, BlockNumber::Number(block)) + .get_new_token_events( + BlockNumber::Earliest, + BlockNumber::Number(new_block_with_accepted_events), + ) .await .expect("Failed to restore token list from ETH"); for token in new_tokens.into_iter() { @@ -198,19 +217,24 @@ impl EthWatch { trace!("ETH state: {:#?}", self.eth_state); } - async fn process_new_blocks(&mut self, last_block: u64) -> Result<(), failure::Error> { - debug_assert!(self.processed_block < last_block); + async fn process_new_blocks(&mut self, current_eth_block: u64) -> Result<(), failure::Error> { + debug_assert!(self.last_ethereum_block < current_eth_block); + + let previous_block_with_accepted_events = + (self.last_ethereum_block + 1).saturating_sub(CONFIRMATIONS_FOR_ETH_EVENT); + let new_block_with_accepted_events = + current_eth_block.saturating_sub(CONFIRMATIONS_FOR_ETH_EVENT); let new_tokens = self .get_new_token_events( - BlockNumber::Number(self.processed_block + 1), - BlockNumber::Number(last_block), + BlockNumber::Number(previous_block_with_accepted_events), + BlockNumber::Number(new_block_with_accepted_events), ) .await?; let priority_op_events = self .get_priority_op_events( - BlockNumber::Number(self.processed_block + 1), - BlockNumber::Number(last_block), + BlockNumber::Number(previous_block_with_accepted_events), + BlockNumber::Number(new_block_with_accepted_events), ) .await?; @@ -225,7 +249,7 @@ impl EthWatch { self.eth_state .add_new_token(token.id as TokenId, token.address); } - self.processed_block = last_block; + self.last_ethereum_block = current_eth_block; Ok(()) } @@ -297,8 +321,9 @@ impl EthWatch { .await .expect("Block number") .as_u64(); - self.processed_block = block; - self.restore_state_from_eth(block).await; + self.last_ethereum_block = block; + self.restore_state_from_eth(block.saturating_sub(CONFIRMATIONS_FOR_ETH_EVENT as u64)) + .await; while let Some(request) = self.eth_watch_req.next().await { match request { @@ -310,7 +335,7 @@ impl EthWatch { continue; }; - if block > self.processed_block { + if block > self.last_ethereum_block { self.process_new_blocks(block) .await .map_err(|e| warn!("Failed to process new blocks {}", e)) @@ -365,7 +390,7 @@ pub fn start_eth_watch( runtime.spawn(eth_watch.run()); runtime.spawn(async move { - let mut timer = time::interval(Duration::from_secs(5)); + let mut timer = time::interval(ETH_POLL_INTERVAL); loop { timer.tick().await; From 54ac9700e658a3382d4922aae901b3dc783d8f10 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Fri, 20 Mar 2020 15:15:08 +0200 Subject: [PATCH 057/186] Simplify wasm build --- js/client/yarn.lock | 15 +- js/tests/yarn.lock | 10 +- js/zksync-crypto/.gitignore | 2 +- js/zksync-crypto/indexx.js | 1 - js/zksync-crypto/package.json | 31 +- js/zksync-crypto/rollup.config.js | 27 - js/zksync-crypto/webpack.config.js | 33 - js/zksync-crypto/yarn.lock | 3542 ---------------------------- js/zksync.js/package.json | 4 +- js/zksync.js/src/crypto.ts | 8 +- js/zksync.js/yarn.lock | 10 +- 11 files changed, 19 insertions(+), 3664 deletions(-) delete mode 100644 js/zksync-crypto/indexx.js delete mode 100644 js/zksync-crypto/rollup.config.js delete mode 100644 js/zksync-crypto/webpack.config.js diff --git a/js/client/yarn.lock b/js/client/yarn.lock index 8049350250..60fb70ffca 100644 --- a/js/client/yarn.lock +++ b/js/client/yarn.lock @@ -1859,11 +1859,6 @@ browserslist@^4.3.4: electron-to-chromium "^1.3.247" node-releases "^1.1.29" -buffer-es6@^4.9.3: - version "4.9.3" - resolved "https://registry.yarnpkg.com/buffer-es6/-/buffer-es6-4.9.3.tgz#f26347b82df76fd37e18bcb5288c4970cfd5c404" - integrity sha1-8mNHuC33b9N+GLy1KIxJcM/VxAQ= - buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" @@ -3706,12 +3701,6 @@ evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: md5.js "^1.3.4" safe-buffer "^5.1.1" -example-node-wasm@../zksync-crypto: - version "0.0.0" - dependencies: - buffer-es6 "^4.9.3" - zksync-crypto "file:../../../../Library/Caches/Yarn/v6/npm-example-node-wasm-0.0.0-7801641d-ec0b-4594-a519-2b9a8eeb179e-1584117788720/node_modules/example-node-wasm/pkg" - execa@^0.8.0: version "0.8.0" resolved "https://registry.yarnpkg.com/execa/-/execa-0.8.0.tgz#d8d76bbc1b55217ed190fd6dd49d3c774ecfc8da" @@ -9683,8 +9672,8 @@ yorkie@^2.0.0: normalize-path "^1.0.0" strip-indent "^2.0.0" -zksync-crypto@../zksync-crypto/pkg, "zksync-crypto@file:../zksync-crypto/pkg": - version "0.1.0" +zksync-crypto@../zksync-crypto: + version "0.0.0" "zksync@link:../zksync.js": version "0.0.0" diff --git a/js/tests/yarn.lock b/js/tests/yarn.lock index 4b8f025592..4ff58f36d2 100644 --- a/js/tests/yarn.lock +++ b/js/tests/yarn.lock @@ -231,7 +231,7 @@ example-node-wasm@../zksync-crypto: version "0.0.0" dependencies: buffer-es6 "^4.9.3" - zksync-crypto "file:../../../../Library/Caches/Yarn/v6/npm-example-node-wasm-0.0.0-9a0cf1ac-bdad-461e-97bb-b869071b95cc-1584116293040/node_modules/example-node-wasm/pkg" + zksync-crypto "file:../../../../Library/Caches/Yarn/v6/npm-example-node-wasm-0.0.0-1da603b1-efda-4ef8-b578-586eccd35c48-1584708911723/node_modules/example-node-wasm/pkg" ext@^1.1.2: version "1.4.0" @@ -545,9 +545,15 @@ yn@3.1.1: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== -zksync-crypto@../zksync-crypto/pkg, "zksync-crypto@file:../zksync-crypto/pkg": +zksync-crypto-node@../zksync-crypto/nodejspgk: + version "0.1.0" + +zksync-crypto-web@../zksync-crypto/webpgk: version "0.1.0" +zksync-crypto@../zksync-crypto/pkg, "zksync-crypto@file:../zksync-crypto/pkg": + version "0.0.0" + "zksync@link:../zksync.js": version "0.0.0" uid "" diff --git a/js/zksync-crypto/.gitignore b/js/zksync-crypto/.gitignore index 4e301317e5..bb9a2539a7 100644 --- a/js/zksync-crypto/.gitignore +++ b/js/zksync-crypto/.gitignore @@ -2,5 +2,5 @@ **/*.rs.bk Cargo.lock bin/ -pkg/ +pkg wasm-pack.log diff --git a/js/zksync-crypto/indexx.js b/js/zksync-crypto/indexx.js deleted file mode 100644 index 4f81fc4777..0000000000 --- a/js/zksync-crypto/indexx.js +++ /dev/null @@ -1 +0,0 @@ -export default import("zksync-crypto"); diff --git a/js/zksync-crypto/package.json b/js/zksync-crypto/package.json index 42e0cf9b50..2ff4cc03a3 100644 --- a/js/zksync-crypto/package.json +++ b/js/zksync-crypto/package.json @@ -1,33 +1,10 @@ { - "name": "example-node-wasm", + "name": "zksync-crypto", "version": "0.0.0", - "browser": "dist/index.web.js", - "main": "dist/index.node.js", + "browser": "pkg/web.js", + "main": "pkg/node.js", "scripts": { - "build": "rollup -c", + "build": "wasm-pack build --out-name=web && wasm-pack build --target=nodejs --out-name=node && rm pkg/package.json", "test": "echo \"Error: no test specified\" && exit 1" - }, - "dependencies": { - "buffer-es6": "^4.9.3", - "zksync-crypto": "file:pkg" - }, - "devDependencies": { - "@babel/core": "^7.8.7", - "@babel/plugin-transform-modules-commonjs": "^7.8.3", - "@rollup/plugin-commonjs": "^11.0.2", - "@rollup/plugin-node-resolve": "^7.1.1", - "@rollup/plugin-typescript": "^4.0.0", - "@rollup/plugin-wasm": "^3.0.0", - "@wasm-tool/wasm-pack-plugin": "^1.1.0", - "html-webpack-plugin": "^3.2.0", - "rollup": "^2.0.6", - "rollup-plugin-dts": "^1.3.0", - "rollup-plugin-rust": "^1.2.0", - "rollup-plugin-terser": "^5.3.0", - "text-encoding": "^0.7.0", - "typescript": "^3.8.3", - "wasm-loader": "^1.3.0", - "webpack": "^4.42.0", - "webpack-cli": "^3.3.11" } } diff --git a/js/zksync-crypto/rollup.config.js b/js/zksync-crypto/rollup.config.js deleted file mode 100644 index 2bcc254b09..0000000000 --- a/js/zksync-crypto/rollup.config.js +++ /dev/null @@ -1,27 +0,0 @@ -import resolve from '@rollup/plugin-node-resolve'; -import commonjs from '@rollup/plugin-commonjs'; -import { terser } from 'rollup-plugin-terser'; -import wasm from '@rollup/plugin-wasm'; -import typescript from '@rollup/plugin-typescript'; -import dts from "rollup-plugin-dts"; - -// `npm run build` -> `production` is true -// `npm run dev` -> `production` is false -const production = !process.env.ROLLUP_WATCH; - -export default { - input: [ - "pkg/zksync_crypto.js", - ], - output: { - file: 'public/bundle.js', - format: 'cjs' - }, - plugins: [ - resolve(), // tells Rollup how to find date-fns in node_modules - dts(), - commonjs(), // converts date-fns to ES modules - wasm(), - production && terser() // minify, but only in production - ] -}; diff --git a/js/zksync-crypto/webpack.config.js b/js/zksync-crypto/webpack.config.js deleted file mode 100644 index e47cf96508..0000000000 --- a/js/zksync-crypto/webpack.config.js +++ /dev/null @@ -1,33 +0,0 @@ -const HtmlWebpackPlugin = require('html-webpack-plugin'); -const path = require('path'); -const webpack = require('webpack'); -const WasmPackPlugin = require("@wasm-tool/wasm-pack-plugin"); - -const config = target => ({ - entry: './indexx.js', - // entry: `./index_${target}.js`, - output: { - path: path.resolve(__dirname, 'dist'), - filename: `index.${target}.js`, - libraryTarget: 'umd', - }, - plugins: [ - new HtmlWebpackPlugin(), - new WasmPackPlugin({ - crateDirectory: path.resolve(__dirname, "."), - extraArgs - : target == 'web' ? '' - : target == 'node' ? '--target=nodejs' - : null, - }), - // Have this example work in Edge which doesn't ship `TextEncoder` or - // `TextDecoder` at this time. - new webpack.ProvidePlugin({ - TextDecoder: ['text-encoding', 'TextDecoder'], - TextEncoder: ['text-encoding', 'TextEncoder'] - }) - ], - mode: 'development', -}); - -module.exports = ['web', 'node'].map(target => ({...config(target), target})); diff --git a/js/zksync-crypto/yarn.lock b/js/zksync-crypto/yarn.lock index e26368102b..fb57ccd13a 100644 --- a/js/zksync-crypto/yarn.lock +++ b/js/zksync-crypto/yarn.lock @@ -2,3545 +2,3 @@ # yarn lockfile v1 -"@babel/code-frame@^7.0.0-beta.36", "@babel/code-frame@^7.5.5", "@babel/code-frame@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.8.3.tgz#33e25903d7481181534e12ec0a25f16b6fcf419e" - integrity sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g== - dependencies: - "@babel/highlight" "^7.8.3" - -"@babel/core@^7.0.0-beta.39", "@babel/core@^7.8.7": - version "7.8.7" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.8.7.tgz#b69017d221ccdeb203145ae9da269d72cf102f3b" - integrity sha512-rBlqF3Yko9cynC5CCFy6+K/w2N+Sq/ff2BPy+Krp7rHlABIr5epbA7OxVeKoMHB39LZOp1UY5SuLjy6uWi35yA== - dependencies: - "@babel/code-frame" "^7.8.3" - "@babel/generator" "^7.8.7" - "@babel/helpers" "^7.8.4" - "@babel/parser" "^7.8.7" - "@babel/template" "^7.8.6" - "@babel/traverse" "^7.8.6" - "@babel/types" "^7.8.7" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.1" - json5 "^2.1.0" - lodash "^4.17.13" - resolve "^1.3.2" - semver "^5.4.1" - source-map "^0.5.0" - -"@babel/generator@^7.8.6", "@babel/generator@^7.8.7": - version "7.8.8" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.8.8.tgz#cdcd58caab730834cee9eeadb729e833b625da3e" - integrity sha512-HKyUVu69cZoclptr8t8U5b6sx6zoWjh8jiUhnuj3MpZuKT2dJ8zPTuiy31luq32swhI0SpwItCIlU8XW7BZeJg== - dependencies: - "@babel/types" "^7.8.7" - jsesc "^2.5.1" - lodash "^4.17.13" - source-map "^0.5.0" - -"@babel/helper-function-name@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz#eeeb665a01b1f11068e9fb86ad56a1cb1a824cca" - integrity sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA== - dependencies: - "@babel/helper-get-function-arity" "^7.8.3" - "@babel/template" "^7.8.3" - "@babel/types" "^7.8.3" - -"@babel/helper-get-function-arity@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz#b894b947bd004381ce63ea1db9f08547e920abd5" - integrity sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA== - dependencies: - "@babel/types" "^7.8.3" - -"@babel/helper-member-expression-to-functions@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.8.3.tgz#659b710498ea6c1d9907e0c73f206eee7dadc24c" - integrity sha512-fO4Egq88utkQFjbPrSHGmGLFqmrshs11d46WI+WZDESt7Wu7wN2G2Iu+NMMZJFDOVRHAMIkB5SNh30NtwCA7RA== - dependencies: - "@babel/types" "^7.8.3" - -"@babel/helper-module-imports@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.8.3.tgz#7fe39589b39c016331b6b8c3f441e8f0b1419498" - integrity sha512-R0Bx3jippsbAEtzkpZ/6FIiuzOURPcMjHp+Z6xPe6DtApDJx+w7UYyOLanZqO8+wKR9G10s/FmHXvxaMd9s6Kg== - dependencies: - "@babel/types" "^7.8.3" - -"@babel/helper-module-transforms@^7.8.3": - version "7.8.6" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.8.6.tgz#6a13b5eecadc35692047073a64e42977b97654a4" - integrity sha512-RDnGJSR5EFBJjG3deY0NiL0K9TO8SXxS9n/MPsbPK/s9LbQymuLNtlzvDiNS7IpecuL45cMeLVkA+HfmlrnkRg== - dependencies: - "@babel/helper-module-imports" "^7.8.3" - "@babel/helper-replace-supers" "^7.8.6" - "@babel/helper-simple-access" "^7.8.3" - "@babel/helper-split-export-declaration" "^7.8.3" - "@babel/template" "^7.8.6" - "@babel/types" "^7.8.6" - lodash "^4.17.13" - -"@babel/helper-optimise-call-expression@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.8.3.tgz#7ed071813d09c75298ef4f208956006b6111ecb9" - integrity sha512-Kag20n86cbO2AvHca6EJsvqAd82gc6VMGule4HwebwMlwkpXuVqrNRj6CkCV2sKxgi9MyAUnZVnZ6lJ1/vKhHQ== - dependencies: - "@babel/types" "^7.8.3" - -"@babel/helper-plugin-utils@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.8.3.tgz#9ea293be19babc0f52ff8ca88b34c3611b208670" - integrity sha512-j+fq49Xds2smCUNYmEHF9kGNkhbet6yVIBp4e6oeQpH1RUs/Ir06xUKzDjDkGcaaokPiTNs2JBWHjaE4csUkZQ== - -"@babel/helper-replace-supers@^7.8.6": - version "7.8.6" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.8.6.tgz#5ada744fd5ad73203bf1d67459a27dcba67effc8" - integrity sha512-PeMArdA4Sv/Wf4zXwBKPqVj7n9UF/xg6slNRtZW84FM7JpE1CbG8B612FyM4cxrf4fMAMGO0kR7voy1ForHHFA== - dependencies: - "@babel/helper-member-expression-to-functions" "^7.8.3" - "@babel/helper-optimise-call-expression" "^7.8.3" - "@babel/traverse" "^7.8.6" - "@babel/types" "^7.8.6" - -"@babel/helper-simple-access@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.8.3.tgz#7f8109928b4dab4654076986af575231deb639ae" - integrity sha512-VNGUDjx5cCWg4vvCTR8qQ7YJYZ+HBjxOgXEl7ounz+4Sn7+LMD3CFrCTEU6/qXKbA2nKg21CwhhBzO0RpRbdCw== - dependencies: - "@babel/template" "^7.8.3" - "@babel/types" "^7.8.3" - -"@babel/helper-split-export-declaration@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz#31a9f30070f91368a7182cf05f831781065fc7a9" - integrity sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA== - dependencies: - "@babel/types" "^7.8.3" - -"@babel/helpers@^7.8.4": - version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.8.4.tgz#754eb3ee727c165e0a240d6c207de7c455f36f73" - integrity sha512-VPbe7wcQ4chu4TDQjimHv/5tj73qz88o12EPkO2ValS2QiQS/1F2SsjyIGNnAD0vF/nZS6Cf9i+vW6HIlnaR8w== - dependencies: - "@babel/template" "^7.8.3" - "@babel/traverse" "^7.8.4" - "@babel/types" "^7.8.3" - -"@babel/highlight@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.8.3.tgz#28f173d04223eaaa59bc1d439a3836e6d1265797" - integrity sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg== - dependencies: - chalk "^2.0.0" - esutils "^2.0.2" - js-tokens "^4.0.0" - -"@babel/parser@^7.8.6", "@babel/parser@^7.8.7": - version "7.8.8" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.8.8.tgz#4c3b7ce36db37e0629be1f0d50a571d2f86f6cd4" - integrity sha512-mO5GWzBPsPf6865iIbzNE0AvkKF3NE+2S3eRUpE+FE07BOAkXh6G+GW/Pj01hhXjve1WScbaIO4UlY1JKeqCcA== - -"@babel/plugin-transform-modules-commonjs@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.8.3.tgz#df251706ec331bd058a34bdd72613915f82928a5" - integrity sha512-JpdMEfA15HZ/1gNuB9XEDlZM1h/gF/YOH7zaZzQu2xCFRfwc01NXBMHHSTT6hRjlXJJs5x/bfODM3LiCk94Sxg== - dependencies: - "@babel/helper-module-transforms" "^7.8.3" - "@babel/helper-plugin-utils" "^7.8.3" - "@babel/helper-simple-access" "^7.8.3" - babel-plugin-dynamic-import-node "^2.3.0" - -"@babel/template@^7.8.3", "@babel/template@^7.8.6": - version "7.8.6" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.8.6.tgz#86b22af15f828dfb086474f964dcc3e39c43ce2b" - integrity sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg== - dependencies: - "@babel/code-frame" "^7.8.3" - "@babel/parser" "^7.8.6" - "@babel/types" "^7.8.6" - -"@babel/traverse@^7.0.0-beta.39", "@babel/traverse@^7.8.4", "@babel/traverse@^7.8.6": - version "7.8.6" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.8.6.tgz#acfe0c64e1cd991b3e32eae813a6eb564954b5ff" - integrity sha512-2B8l0db/DPi8iinITKuo7cbPznLCEk0kCxDoB9/N6gGNg/gxOXiR/IcymAFPiBwk5w6TtQ27w4wpElgp9btR9A== - dependencies: - "@babel/code-frame" "^7.8.3" - "@babel/generator" "^7.8.6" - "@babel/helper-function-name" "^7.8.3" - "@babel/helper-split-export-declaration" "^7.8.3" - "@babel/parser" "^7.8.6" - "@babel/types" "^7.8.6" - debug "^4.1.0" - globals "^11.1.0" - lodash "^4.17.13" - -"@babel/types@^7.0.0-beta.39", "@babel/types@^7.8.3", "@babel/types@^7.8.6", "@babel/types@^7.8.7": - version "7.8.7" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.8.7.tgz#1fc9729e1acbb2337d5b6977a63979b4819f5d1d" - integrity sha512-k2TreEHxFA4CjGkL+GYjRyx35W0Mr7DP5+9q6WMkyKXB+904bYmG40syjMFV0oLlhhFCwWl0vA0DyzTDkwAiJw== - dependencies: - esutils "^2.0.2" - lodash "^4.17.13" - to-fast-properties "^2.0.0" - -"@rollup/plugin-commonjs@^11.0.2": - version "11.0.2" - resolved "https://registry.yarnpkg.com/@rollup/plugin-commonjs/-/plugin-commonjs-11.0.2.tgz#837cc6950752327cb90177b608f0928a4e60b582" - integrity sha512-MPYGZr0qdbV5zZj8/2AuomVpnRVXRU5XKXb3HVniwRoRCreGlf5kOE081isNWeiLIi6IYkwTX9zE0/c7V8g81g== - dependencies: - "@rollup/pluginutils" "^3.0.0" - estree-walker "^1.0.1" - is-reference "^1.1.2" - magic-string "^0.25.2" - resolve "^1.11.0" - -"@rollup/plugin-node-resolve@^7.1.1": - version "7.1.1" - resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-7.1.1.tgz#8c6e59c4b28baf9d223028d0e450e06a485bb2b7" - integrity sha512-14ddhD7TnemeHE97a4rLOhobfYvUVcaYuqTnL8Ti7Jxi9V9Jr5LY7Gko4HZ5k4h4vqQM0gBQt6tsp9xXW94WPA== - dependencies: - "@rollup/pluginutils" "^3.0.6" - "@types/resolve" "0.0.8" - builtin-modules "^3.1.0" - is-module "^1.0.0" - resolve "^1.14.2" - -"@rollup/plugin-typescript@^4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@rollup/plugin-typescript/-/plugin-typescript-4.0.0.tgz#7a4f7b2844d28669e58c03c880f6ed0d6e926685" - integrity sha512-qA3r4WlR8JnTm+VdBzvQSIkfXt802keGxXuE4SAjUjRMKK3nMXTUCvOGSzFkav2qf0QiGv6yijfbjuf+bhwmZQ== - dependencies: - "@rollup/pluginutils" "^3.0.1" - resolve "^1.14.1" - -"@rollup/plugin-wasm@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@rollup/plugin-wasm/-/plugin-wasm-3.0.0.tgz#a757ba29d63621bdb6f4f3dfbb08d6aa2b3fd3ab" - integrity sha512-ggiUAwvjKN3jbGbdYs0zLus4rDa8ug08tigb4vyur/miPcaDJG178cgeJw+zVV1rOQk9rtzK5V+nZg31AcgU/w== - -"@rollup/pluginutils@^3.0.0", "@rollup/pluginutils@^3.0.1", "@rollup/pluginutils@^3.0.6": - version "3.0.8" - resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-3.0.8.tgz#4e94d128d94b90699e517ef045422960d18c8fde" - integrity sha512-rYGeAc4sxcZ+kPG/Tw4/fwJODC3IXHYDH4qusdN/b6aLw5LPUbzpecYbEJh4sVQGPFJxd2dBU4kc1H3oy9/bnw== - dependencies: - estree-walker "^1.0.1" - -"@types/estree@0.0.39": - version "0.0.39" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" - integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== - -"@types/node@*": - version "13.9.1" - resolved "https://registry.yarnpkg.com/@types/node/-/node-13.9.1.tgz#96f606f8cd67fb018847d9b61e93997dabdefc72" - integrity sha512-E6M6N0blf/jiZx8Q3nb0vNaswQeEyn0XlupO+xN6DtJ6r6IT4nXrTry7zhIfYvFCl3/8Cu6WIysmUBKiqV0bqQ== - -"@types/resolve@0.0.8": - version "0.0.8" - resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-0.0.8.tgz#f26074d238e02659e323ce1a13d041eee280e194" - integrity sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ== - dependencies: - "@types/node" "*" - -"@wasm-tool/wasm-pack-plugin@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@wasm-tool/wasm-pack-plugin/-/wasm-pack-plugin-1.1.0.tgz#94016deba0f59306d1a9f0cb3b15144d8cd9ab34" - integrity sha512-44vbq7MyZzavE7g5Q7RKlnFtI35BhUkNiUANTeOivbpRfsRw0d0n9lA2ytmiVS4O+AVRsjjPLVSv35kPvL+OWg== - dependencies: - chalk "^2.4.1" - command-exists "^1.2.7" - watchpack "^1.6.0" - -"@webassemblyjs/ast@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.8.5.tgz#51b1c5fe6576a34953bf4b253df9f0d490d9e359" - integrity sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ== - dependencies: - "@webassemblyjs/helper-module-context" "1.8.5" - "@webassemblyjs/helper-wasm-bytecode" "1.8.5" - "@webassemblyjs/wast-parser" "1.8.5" - -"@webassemblyjs/floating-point-hex-parser@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz#1ba926a2923613edce496fd5b02e8ce8a5f49721" - integrity sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ== - -"@webassemblyjs/helper-api-error@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz#c49dad22f645227c5edb610bdb9697f1aab721f7" - integrity sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA== - -"@webassemblyjs/helper-buffer@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz#fea93e429863dd5e4338555f42292385a653f204" - integrity sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q== - -"@webassemblyjs/helper-code-frame@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz#9a740ff48e3faa3022b1dff54423df9aa293c25e" - integrity sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ== - dependencies: - "@webassemblyjs/wast-printer" "1.8.5" - -"@webassemblyjs/helper-fsm@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz#ba0b7d3b3f7e4733da6059c9332275d860702452" - integrity sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow== - -"@webassemblyjs/helper-module-context@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz#def4b9927b0101dc8cbbd8d1edb5b7b9c82eb245" - integrity sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g== - dependencies: - "@webassemblyjs/ast" "1.8.5" - mamacro "^0.0.3" - -"@webassemblyjs/helper-wasm-bytecode@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz#537a750eddf5c1e932f3744206551c91c1b93e61" - integrity sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ== - -"@webassemblyjs/helper-wasm-section@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz#74ca6a6bcbe19e50a3b6b462847e69503e6bfcbf" - integrity sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA== - dependencies: - "@webassemblyjs/ast" "1.8.5" - "@webassemblyjs/helper-buffer" "1.8.5" - "@webassemblyjs/helper-wasm-bytecode" "1.8.5" - "@webassemblyjs/wasm-gen" "1.8.5" - -"@webassemblyjs/ieee754@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz#712329dbef240f36bf57bd2f7b8fb9bf4154421e" - integrity sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g== - dependencies: - "@xtuc/ieee754" "^1.2.0" - -"@webassemblyjs/leb128@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.8.5.tgz#044edeb34ea679f3e04cd4fd9824d5e35767ae10" - integrity sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A== - dependencies: - "@xtuc/long" "4.2.2" - -"@webassemblyjs/utf8@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.8.5.tgz#a8bf3b5d8ffe986c7c1e373ccbdc2a0915f0cedc" - integrity sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw== - -"@webassemblyjs/wasm-edit@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz#962da12aa5acc1c131c81c4232991c82ce56e01a" - integrity sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q== - dependencies: - "@webassemblyjs/ast" "1.8.5" - "@webassemblyjs/helper-buffer" "1.8.5" - "@webassemblyjs/helper-wasm-bytecode" "1.8.5" - "@webassemblyjs/helper-wasm-section" "1.8.5" - "@webassemblyjs/wasm-gen" "1.8.5" - "@webassemblyjs/wasm-opt" "1.8.5" - "@webassemblyjs/wasm-parser" "1.8.5" - "@webassemblyjs/wast-printer" "1.8.5" - -"@webassemblyjs/wasm-gen@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz#54840766c2c1002eb64ed1abe720aded714f98bc" - integrity sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg== - dependencies: - "@webassemblyjs/ast" "1.8.5" - "@webassemblyjs/helper-wasm-bytecode" "1.8.5" - "@webassemblyjs/ieee754" "1.8.5" - "@webassemblyjs/leb128" "1.8.5" - "@webassemblyjs/utf8" "1.8.5" - -"@webassemblyjs/wasm-opt@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz#b24d9f6ba50394af1349f510afa8ffcb8a63d264" - integrity sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q== - dependencies: - "@webassemblyjs/ast" "1.8.5" - "@webassemblyjs/helper-buffer" "1.8.5" - "@webassemblyjs/wasm-gen" "1.8.5" - "@webassemblyjs/wasm-parser" "1.8.5" - -"@webassemblyjs/wasm-parser@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz#21576f0ec88b91427357b8536383668ef7c66b8d" - integrity sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw== - dependencies: - "@webassemblyjs/ast" "1.8.5" - "@webassemblyjs/helper-api-error" "1.8.5" - "@webassemblyjs/helper-wasm-bytecode" "1.8.5" - "@webassemblyjs/ieee754" "1.8.5" - "@webassemblyjs/leb128" "1.8.5" - "@webassemblyjs/utf8" "1.8.5" - -"@webassemblyjs/wast-parser@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz#e10eecd542d0e7bd394f6827c49f3df6d4eefb8c" - integrity sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg== - dependencies: - "@webassemblyjs/ast" "1.8.5" - "@webassemblyjs/floating-point-hex-parser" "1.8.5" - "@webassemblyjs/helper-api-error" "1.8.5" - "@webassemblyjs/helper-code-frame" "1.8.5" - "@webassemblyjs/helper-fsm" "1.8.5" - "@xtuc/long" "4.2.2" - -"@webassemblyjs/wast-printer@1.8.5": - version "1.8.5" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz#114bbc481fd10ca0e23b3560fa812748b0bae5bc" - integrity sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg== - dependencies: - "@webassemblyjs/ast" "1.8.5" - "@webassemblyjs/wast-parser" "1.8.5" - "@xtuc/long" "4.2.2" - -"@xtuc/ieee754@^1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" - integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== - -"@xtuc/long@4.2.2": - version "4.2.2" - resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" - integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== - -acorn@^6.2.1: - version "6.4.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.1.tgz#531e58ba3f51b9dacb9a6646ca4debf5b14ca474" - integrity sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA== - -ajv-errors@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" - integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== - -ajv-keywords@^3.1.0, ajv-keywords@^3.4.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.4.1.tgz#ef916e271c64ac12171fd8384eaae6b2345854da" - integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ== - -ajv@^6.1.0, ajv@^6.10.2: - version "6.12.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.0.tgz#06d60b96d87b8454a5adaba86e7854da629db4b7" - integrity sha512-D6gFiFA0RRLyUbvijN74DWAjXSFxWKaWP7mldxkVhyhAV3+SWA9HEJPHQ2c9soIeTFJqcSdFDGFgdqs1iUU2Hw== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ansi-regex@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" - integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= - -ansi-regex@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" - integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== - -ansi-styles@^3.2.0, ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -anymatch@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" - integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== - dependencies: - micromatch "^3.1.4" - normalize-path "^2.1.1" - -aproba@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" - integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== - -arr-diff@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" - integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= - -arr-flatten@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" - integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== - -arr-union@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" - integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= - -array-unique@^0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" - integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= - -asn1.js@^4.0.0: - version "4.10.1" - resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-4.10.1.tgz#b9c2bf5805f1e64aadeed6df3a2bfafb5a73f5a0" - integrity sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw== - dependencies: - bn.js "^4.0.0" - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - -assert@^1.1.1: - version "1.5.0" - resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" - integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== - dependencies: - object-assign "^4.1.1" - util "0.10.3" - -assign-symbols@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" - integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= - -async-each@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" - integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== - -atob@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" - integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== - -babel-plugin-dynamic-import-node@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" - integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== - dependencies: - object.assign "^4.1.0" - -babylon@^7.0.0-beta.39: - version "7.0.0-beta.47" - resolved "https://registry.yarnpkg.com/babylon/-/babylon-7.0.0-beta.47.tgz#6d1fa44f0abec41ab7c780481e62fd9aafbdea80" - integrity sha512-+rq2cr4GDhtToEzKFD6KZZMDBXhjFAr9JjPw9pAppZACeEWqNM294j+NdBzkSHYXwzzBmVjZ3nEVJlOhbR2gOQ== - -balanced-match@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" - integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= - -base64-js@^1.0.2: - version "1.3.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" - integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== - -base@^0.11.1: - version "0.11.2" - resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" - integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== - dependencies: - cache-base "^1.0.1" - class-utils "^0.3.5" - component-emitter "^1.2.1" - define-property "^1.0.0" - isobject "^3.0.1" - mixin-deep "^1.2.0" - pascalcase "^0.1.1" - -big.js@^3.1.3: - version "3.2.0" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" - integrity sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q== - -big.js@^5.2.2: - version "5.2.2" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" - integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== - -binary-extensions@^1.0.0: - version "1.13.1" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" - integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== - -bindings@^1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" - integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== - dependencies: - file-uri-to-path "1.0.0" - -bluebird@^3.5.5: - version "3.7.2" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" - integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== - -bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0: - version "4.11.8" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f" - integrity sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA== - -boolbase@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" - integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -braces@^2.3.1, braces@^2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" - integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== - dependencies: - arr-flatten "^1.1.0" - array-unique "^0.3.2" - extend-shallow "^2.0.1" - fill-range "^4.0.0" - isobject "^3.0.1" - repeat-element "^1.1.2" - snapdragon "^0.8.1" - snapdragon-node "^2.0.1" - split-string "^3.0.2" - to-regex "^3.0.1" - -brorand@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" - integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= - -browserify-aes@^1.0.0, browserify-aes@^1.0.4: - version "1.2.0" - resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" - integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== - dependencies: - buffer-xor "^1.0.3" - cipher-base "^1.0.0" - create-hash "^1.1.0" - evp_bytestokey "^1.0.3" - inherits "^2.0.1" - safe-buffer "^5.0.1" - -browserify-cipher@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" - integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== - dependencies: - browserify-aes "^1.0.4" - browserify-des "^1.0.0" - evp_bytestokey "^1.0.0" - -browserify-des@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" - integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== - dependencies: - cipher-base "^1.0.1" - des.js "^1.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - -browserify-rsa@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" - integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= - dependencies: - bn.js "^4.1.0" - randombytes "^2.0.1" - -browserify-sign@^4.0.0: - version "4.0.4" - resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.0.4.tgz#aa4eb68e5d7b658baa6bf6a57e630cbd7a93d298" - integrity sha1-qk62jl17ZYuqa/alfmMMvXqT0pg= - dependencies: - bn.js "^4.1.1" - browserify-rsa "^4.0.0" - create-hash "^1.1.0" - create-hmac "^1.1.2" - elliptic "^6.0.0" - inherits "^2.0.1" - parse-asn1 "^5.0.0" - -browserify-zlib@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" - integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== - dependencies: - pako "~1.0.5" - -buffer-es6@^4.9.3: - version "4.9.3" - resolved "https://registry.yarnpkg.com/buffer-es6/-/buffer-es6-4.9.3.tgz#f26347b82df76fd37e18bcb5288c4970cfd5c404" - integrity sha1-8mNHuC33b9N+GLy1KIxJcM/VxAQ= - -buffer-from@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" - integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== - -buffer-xor@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" - integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= - -buffer@^4.3.0: - version "4.9.2" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" - integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== - dependencies: - base64-js "^1.0.2" - ieee754 "^1.1.4" - isarray "^1.0.0" - -builtin-modules@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.1.0.tgz#aad97c15131eb76b65b50ef208e7584cd76a7484" - integrity sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw== - -builtin-status-codes@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" - integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= - -cacache@^12.0.2: - version "12.0.3" - resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.3.tgz#be99abba4e1bf5df461cd5a2c1071fc432573390" - integrity sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw== - dependencies: - bluebird "^3.5.5" - chownr "^1.1.1" - figgy-pudding "^3.5.1" - glob "^7.1.4" - graceful-fs "^4.1.15" - infer-owner "^1.0.3" - lru-cache "^5.1.1" - mississippi "^3.0.0" - mkdirp "^0.5.1" - move-concurrently "^1.0.1" - promise-inflight "^1.0.1" - rimraf "^2.6.3" - ssri "^6.0.1" - unique-filename "^1.1.1" - y18n "^4.0.0" - -cache-base@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" - integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== - dependencies: - collection-visit "^1.0.0" - component-emitter "^1.2.1" - get-value "^2.0.6" - has-value "^1.0.0" - isobject "^3.0.1" - set-value "^2.0.0" - to-object-path "^0.3.0" - union-value "^1.0.0" - unset-value "^1.0.0" - -camel-case@3.0.x: - version "3.0.0" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" - integrity sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M= - dependencies: - no-case "^2.2.0" - upper-case "^1.1.1" - -camelcase@^5.0.0: - version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - -chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chokidar@^2.0.2: - version "2.1.8" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" - integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== - dependencies: - anymatch "^2.0.0" - async-each "^1.0.1" - braces "^2.3.2" - glob-parent "^3.1.0" - inherits "^2.0.3" - is-binary-path "^1.0.0" - is-glob "^4.0.0" - normalize-path "^3.0.0" - path-is-absolute "^1.0.0" - readdirp "^2.2.1" - upath "^1.1.1" - optionalDependencies: - fsevents "^1.2.7" - -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - -chrome-trace-event@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" - integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== - dependencies: - tslib "^1.9.0" - -cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" - integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -class-utils@^0.3.5: - version "0.3.6" - resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" - integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== - dependencies: - arr-union "^3.1.0" - define-property "^0.2.5" - isobject "^3.0.0" - static-extend "^0.1.1" - -clean-css@4.2.x: - version "4.2.3" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.3.tgz#507b5de7d97b48ee53d84adb0160ff6216380f78" - integrity sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA== - dependencies: - source-map "~0.6.0" - -cliui@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" - integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== - dependencies: - string-width "^3.1.0" - strip-ansi "^5.2.0" - wrap-ansi "^5.1.0" - -collection-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" - integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= - dependencies: - map-visit "^1.0.0" - object-visit "^1.0.0" - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= - -command-exists@^1.2.7: - version "1.2.8" - resolved "https://registry.yarnpkg.com/command-exists/-/command-exists-1.2.8.tgz#715acefdd1223b9c9b37110a149c6392c2852291" - integrity sha512-PM54PkseWbiiD/mMsbvW351/u+dafwTJ0ye2qB60G1aGQP9j3xK2gmMDc+R34L3nDtx4qMCitXT75mkbkGJDLw== - -commander@2.17.x: - version "2.17.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" - integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== - -commander@^2.20.0: - version "2.20.3" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" - integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== - -commander@~2.19.0: - version "2.19.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" - integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== - -commondir@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" - integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= - -component-emitter@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" - integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= - -concat-stream@^1.5.0: - version "1.6.2" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" - integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== - dependencies: - buffer-from "^1.0.0" - inherits "^2.0.3" - readable-stream "^2.2.2" - typedarray "^0.0.6" - -console-browserify@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" - integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== - -constants-browserify@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" - integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= - -convert-source-map@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" - integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== - dependencies: - safe-buffer "~5.1.1" - -copy-concurrently@^1.0.0: - version "1.0.5" - resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" - integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== - dependencies: - aproba "^1.1.1" - fs-write-stream-atomic "^1.0.8" - iferr "^0.1.5" - mkdirp "^0.5.1" - rimraf "^2.5.4" - run-queue "^1.0.0" - -copy-descriptor@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" - integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= - -core-util-is@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" - integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= - -create-ecdh@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.3.tgz#c9111b6f33045c4697f144787f9254cdc77c45ff" - integrity sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw== - dependencies: - bn.js "^4.1.0" - elliptic "^6.0.0" - -create-hash@^1.1.0, create-hash@^1.1.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" - integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== - dependencies: - cipher-base "^1.0.1" - inherits "^2.0.1" - md5.js "^1.3.4" - ripemd160 "^2.0.1" - sha.js "^2.4.0" - -create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: - version "1.1.7" - resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" - integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== - dependencies: - cipher-base "^1.0.3" - create-hash "^1.1.0" - inherits "^2.0.1" - ripemd160 "^2.0.0" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - -cross-spawn@6.0.5, cross-spawn@^6.0.0: - version "6.0.5" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" - integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== - dependencies: - nice-try "^1.0.4" - path-key "^2.0.1" - semver "^5.5.0" - shebang-command "^1.2.0" - which "^1.2.9" - -crypto-browserify@^3.11.0: - version "3.12.0" - resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" - integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== - dependencies: - browserify-cipher "^1.0.0" - browserify-sign "^4.0.0" - create-ecdh "^4.0.0" - create-hash "^1.1.0" - create-hmac "^1.1.0" - diffie-hellman "^5.0.0" - inherits "^2.0.1" - pbkdf2 "^3.0.3" - public-encrypt "^4.0.0" - randombytes "^2.0.0" - randomfill "^1.0.3" - -css-select@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.2.0.tgz#2b3a110539c5355f1cd8d314623e870b121ec858" - integrity sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg= - dependencies: - boolbase "~1.0.0" - css-what "2.1" - domutils "1.5.1" - nth-check "~1.0.1" - -css-what@2.1: - version "2.1.3" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" - integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== - -cyclist@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" - integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= - -debug@^2.2.0, debug@^2.3.3: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" - integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== - dependencies: - ms "^2.1.1" - -decamelize@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" - integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= - -decode-uri-component@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" - integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= - -define-properties@^1.1.2, define-properties@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" - integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== - dependencies: - object-keys "^1.0.12" - -define-property@^0.2.5: - version "0.2.5" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" - integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= - dependencies: - is-descriptor "^0.1.0" - -define-property@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" - integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= - dependencies: - is-descriptor "^1.0.0" - -define-property@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" - integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== - dependencies: - is-descriptor "^1.0.2" - isobject "^3.0.1" - -des.js@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" - integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== - dependencies: - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - -detect-file@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" - integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= - -diffie-hellman@^5.0.0: - version "5.0.3" - resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" - integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== - dependencies: - bn.js "^4.1.0" - miller-rabin "^4.0.0" - randombytes "^2.0.0" - -dom-converter@^0.2: - version "0.2.0" - resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" - integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== - dependencies: - utila "~0.4" - -dom-serializer@0: - version "0.2.2" - resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" - integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== - dependencies: - domelementtype "^2.0.1" - entities "^2.0.0" - -domain-browser@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" - integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== - -domelementtype@1, domelementtype@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" - integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== - -domelementtype@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.1.tgz#1f8bdfe91f5a78063274e803b4bdcedf6e94f94d" - integrity sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ== - -domhandler@^2.3.0: - version "2.4.2" - resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" - integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== - dependencies: - domelementtype "1" - -domutils@1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.5.1.tgz#dcd8488a26f563d61079e48c9f7b7e32373682cf" - integrity sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8= - dependencies: - dom-serializer "0" - domelementtype "1" - -domutils@^1.5.1: - version "1.7.0" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" - integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== - dependencies: - dom-serializer "0" - domelementtype "1" - -duplexify@^3.4.2, duplexify@^3.6.0: - version "3.7.1" - resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" - integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== - dependencies: - end-of-stream "^1.0.0" - inherits "^2.0.1" - readable-stream "^2.0.0" - stream-shift "^1.0.0" - -elliptic@^6.0.0: - version "6.5.2" - resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.2.tgz#05c5678d7173c049d8ca433552224a495d0e3762" - integrity sha512-f4x70okzZbIQl/NSRLkI/+tteV/9WqL98zx+SQ69KbXxmVrmjwsNUPn/gYJJ0sHvEak24cZgHIPegRePAtA/xw== - dependencies: - bn.js "^4.4.0" - brorand "^1.0.1" - hash.js "^1.0.0" - hmac-drbg "^1.0.0" - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.0" - -emoji-regex@^7.0.1: - version "7.0.3" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" - integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== - -emojis-list@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389" - integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k= - -emojis-list@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" - integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== - -end-of-stream@^1.0.0, end-of-stream@^1.1.0: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - -enhanced-resolve@4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" - integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== - dependencies: - graceful-fs "^4.1.2" - memory-fs "^0.4.0" - tapable "^1.0.0" - -enhanced-resolve@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66" - integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA== - dependencies: - graceful-fs "^4.1.2" - memory-fs "^0.5.0" - tapable "^1.0.0" - -entities@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" - integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== - -entities@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4" - integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw== - -errno@^0.1.3, errno@~0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" - integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== - dependencies: - prr "~1.0.1" - -es-abstract@^1.17.0-next.1: - version "1.17.4" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.4.tgz#e3aedf19706b20e7c2594c35fc0d57605a79e184" - integrity sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ== - dependencies: - es-to-primitive "^1.2.1" - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.1" - is-callable "^1.1.5" - is-regex "^1.0.5" - object-inspect "^1.7.0" - object-keys "^1.1.1" - object.assign "^4.1.0" - string.prototype.trimleft "^2.1.1" - string.prototype.trimright "^2.1.1" - -es-to-primitive@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" - integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== - dependencies: - is-callable "^1.1.4" - is-date-object "^1.0.1" - is-symbol "^1.0.2" - -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= - -eslint-scope@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" - integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== - dependencies: - esrecurse "^4.1.0" - estraverse "^4.1.1" - -esrecurse@^4.1.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" - integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ== - dependencies: - estraverse "^4.1.0" - -estraverse@^4.1.0, estraverse@^4.1.1: - version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - -estree-walker@^0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.6.1.tgz#53049143f40c6eb918b23671d1fe3219f3a1b362" - integrity sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w== - -estree-walker@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" - integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -events@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.1.0.tgz#84279af1b34cb75aa88bf5ff291f6d0bd9b31a59" - integrity sha512-Rv+u8MLHNOdMjTAFeT3nCjHn2aGlx435FP/sDHNaRhDEMwyI/aB22Kj2qIN8R0cw3z28psEQLYwxVKLsKrMgWg== - -evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" - integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== - dependencies: - md5.js "^1.3.4" - safe-buffer "^5.1.1" - -execa@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" - integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== - dependencies: - cross-spawn "^6.0.0" - get-stream "^4.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -expand-brackets@^2.1.4: - version "2.1.4" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" - integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= - dependencies: - debug "^2.3.3" - define-property "^0.2.5" - extend-shallow "^2.0.1" - posix-character-classes "^0.1.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -expand-tilde@^2.0.0, expand-tilde@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" - integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= - dependencies: - homedir-polyfill "^1.0.1" - -extend-shallow@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" - integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= - dependencies: - is-extendable "^0.1.0" - -extend-shallow@^3.0.0, extend-shallow@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" - integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= - dependencies: - assign-symbols "^1.0.0" - is-extendable "^1.0.1" - -extglob@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" - integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== - dependencies: - array-unique "^0.3.2" - define-property "^1.0.0" - expand-brackets "^2.1.4" - extend-shallow "^2.0.1" - fragment-cache "^0.2.1" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -fast-deep-equal@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz#545145077c501491e33b15ec408c294376e94ae4" - integrity sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA== - -fast-json-stable-stringify@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -figgy-pudding@^3.5.1: - version "3.5.1" - resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.1.tgz#862470112901c727a0e495a80744bd5baa1d6790" - integrity sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w== - -file-uri-to-path@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" - integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== - -fill-range@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" - integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= - dependencies: - extend-shallow "^2.0.1" - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range "^2.1.0" - -find-cache-dir@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" - integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== - dependencies: - commondir "^1.0.1" - make-dir "^2.0.0" - pkg-dir "^3.0.0" - -find-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" - -findup-sync@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" - integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== - dependencies: - detect-file "^1.0.0" - is-glob "^4.0.0" - micromatch "^3.0.4" - resolve-dir "^1.0.1" - -flush-write-stream@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" - integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== - dependencies: - inherits "^2.0.3" - readable-stream "^2.3.6" - -for-in@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= - -fragment-cache@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" - integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= - dependencies: - map-cache "^0.2.2" - -from2@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" - integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= - dependencies: - inherits "^2.0.1" - readable-stream "^2.0.0" - -fs-write-stream-atomic@^1.0.8: - version "1.0.10" - resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" - integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= - dependencies: - graceful-fs "^4.1.2" - iferr "^0.1.5" - imurmurhash "^0.1.4" - readable-stream "1 || 2" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= - -fsevents@^1.2.7: - version "1.2.11" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.11.tgz#67bf57f4758f02ede88fb2a1712fef4d15358be3" - integrity sha512-+ux3lx6peh0BpvY0JebGyZoiR4D+oYzdPZMKJwkZ+sFkNJzpL7tXc/wehS49gUAxg3tmMHPHZkA8JU2rhhgDHw== - dependencies: - bindings "^1.5.0" - nan "^2.12.1" - -fsevents@~2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805" - integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA== - -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -gensync@^1.0.0-beta.1: - version "1.0.0-beta.1" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269" - integrity sha512-r8EC6NO1sngH/zdD9fiRDLdcgnbayXah+mLgManTaIZJqEC1MZstmnox8KpnI2/fxQwrp5OpCOYWLp4rBl4Jcg== - -get-caller-file@^2.0.1: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-stream@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-value@^2.0.3, get-value@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" - integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= - -glob-parent@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" - integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= - dependencies: - is-glob "^3.1.0" - path-dirname "^1.0.0" - -glob@^7.1.3, glob@^7.1.4: - version "7.1.6" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" - integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -global-modules@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" - integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== - dependencies: - global-prefix "^3.0.0" - -global-modules@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" - integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== - dependencies: - global-prefix "^1.0.1" - is-windows "^1.0.1" - resolve-dir "^1.0.0" - -global-prefix@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" - integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= - dependencies: - expand-tilde "^2.0.2" - homedir-polyfill "^1.0.1" - ini "^1.3.4" - is-windows "^1.0.1" - which "^1.2.14" - -global-prefix@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" - integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== - dependencies: - ini "^1.3.5" - kind-of "^6.0.2" - which "^1.3.1" - -globals@^11.1.0: - version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2: - version "4.2.3" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" - integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= - -has-symbols@^1.0.0, has-symbols@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" - integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== - -has-value@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" - integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= - dependencies: - get-value "^2.0.3" - has-values "^0.1.4" - isobject "^2.0.0" - -has-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" - integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= - dependencies: - get-value "^2.0.6" - has-values "^1.0.0" - isobject "^3.0.0" - -has-values@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" - integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= - -has-values@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" - integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= - dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" - -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" - -hash-base@^3.0.0: - version "3.0.4" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918" - integrity sha1-X8hoaEfs1zSZQDMZprCj8/auSRg= - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -hash.js@^1.0.0, hash.js@^1.0.3: - version "1.1.7" - resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" - integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== - dependencies: - inherits "^2.0.3" - minimalistic-assert "^1.0.1" - -he@1.2.x: - version "1.2.0" - resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" - integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== - -hmac-drbg@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" - integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= - dependencies: - hash.js "^1.0.3" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.1" - -homedir-polyfill@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" - integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== - dependencies: - parse-passwd "^1.0.0" - -html-minifier@^3.2.3: - version "3.5.21" - resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" - integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== - dependencies: - camel-case "3.0.x" - clean-css "4.2.x" - commander "2.17.x" - he "1.2.x" - param-case "2.1.x" - relateurl "0.2.x" - uglify-js "3.4.x" - -html-webpack-plugin@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-3.2.0.tgz#b01abbd723acaaa7b37b6af4492ebda03d9dd37b" - integrity sha1-sBq71yOsqqeze2r0SS69oD2d03s= - dependencies: - html-minifier "^3.2.3" - loader-utils "^0.2.16" - lodash "^4.17.3" - pretty-error "^2.0.2" - tapable "^1.0.0" - toposort "^1.0.0" - util.promisify "1.0.0" - -htmlparser2@^3.3.0: - version "3.10.1" - resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" - integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== - dependencies: - domelementtype "^1.3.1" - domhandler "^2.3.0" - domutils "^1.5.1" - entities "^1.1.1" - inherits "^2.0.1" - readable-stream "^3.1.1" - -https-browserify@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" - integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= - -ieee754@^1.1.4: - version "1.1.13" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" - integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== - -iferr@^0.1.5: - version "0.1.5" - resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" - integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= - -import-local@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" - integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== - dependencies: - pkg-dir "^3.0.0" - resolve-cwd "^2.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= - -infer-owner@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" - integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.1, inherits@~2.0.3: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -inherits@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" - integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= - -inherits@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= - -ini@^1.3.4, ini@^1.3.5: - version "1.3.5" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" - integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== - -interpret@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" - integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== - -invert-kv@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" - integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== - -is-accessor-descriptor@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" - integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= - dependencies: - kind-of "^3.0.2" - -is-accessor-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" - integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== - dependencies: - kind-of "^6.0.0" - -is-binary-path@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" - integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= - dependencies: - binary-extensions "^1.0.0" - -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - -is-callable@^1.1.4, is-callable@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.5.tgz#f7e46b596890456db74e7f6e976cb3273d06faab" - integrity sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q== - -is-data-descriptor@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" - integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= - dependencies: - kind-of "^3.0.2" - -is-data-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" - integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== - dependencies: - kind-of "^6.0.0" - -is-date-object@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" - integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g== - -is-descriptor@^0.1.0: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" - integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== - dependencies: - is-accessor-descriptor "^0.1.6" - is-data-descriptor "^0.1.4" - kind-of "^5.0.0" - -is-descriptor@^1.0.0, is-descriptor@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" - integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== - dependencies: - is-accessor-descriptor "^1.0.0" - is-data-descriptor "^1.0.0" - kind-of "^6.0.2" - -is-extendable@^0.1.0, is-extendable@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= - -is-extendable@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" - integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== - dependencies: - is-plain-object "^2.0.4" - -is-extglob@^2.1.0, is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= - -is-glob@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" - integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= - dependencies: - is-extglob "^2.1.0" - -is-glob@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== - dependencies: - is-extglob "^2.1.1" - -is-module@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" - integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE= - -is-number@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" - integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= - dependencies: - kind-of "^3.0.2" - -is-plain-object@^2.0.3, is-plain-object@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== - dependencies: - isobject "^3.0.1" - -is-reference@^1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-1.1.4.tgz#3f95849886ddb70256a3e6d062b1a68c13c51427" - integrity sha512-uJA/CDPO3Tao3GTrxYn6AwkM4nUPJiGGYu5+cB8qbC7WGFlrKZbiRo7SFKxUAEpFUfiHofWCXBUNhvYJMh+6zw== - dependencies: - "@types/estree" "0.0.39" - -is-regex@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.5.tgz#39d589a358bf18967f726967120b8fc1aed74eae" - integrity sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ== - dependencies: - has "^1.0.3" - -is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= - -is-symbol@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" - integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== - dependencies: - has-symbols "^1.0.1" - -is-windows@^1.0.1, is-windows@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" - integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== - -is-wsl@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" - integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= - -isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= - -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= - dependencies: - isarray "1.0.0" - -isobject@^3.0.0, isobject@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= - -jest-worker@^24.9.0: - version "24.9.0" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-24.9.0.tgz#5dbfdb5b2d322e98567898238a9697bcce67b3e5" - integrity sha512-51PE4haMSXcHohnSMdM42anbvZANYTqMrr52tVKPqqsPJMzoP6FYYDVqahX/HrAoKEKz3uUPzSvKs9A3qR4iVw== - dependencies: - merge-stream "^2.0.0" - supports-color "^6.1.0" - -js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -json-parse-better-errors@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" - integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json5@^0.5.0: - version "0.5.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" - integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= - -json5@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" - integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== - dependencies: - minimist "^1.2.0" - -json5@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6" - integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ== - dependencies: - minimist "^1.2.0" - -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= - dependencies: - is-buffer "^1.1.5" - -kind-of@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" - integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= - dependencies: - is-buffer "^1.1.5" - -kind-of@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" - integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== - -kind-of@^6.0.0, kind-of@^6.0.2: - version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -lcid@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" - integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== - dependencies: - invert-kv "^2.0.0" - -loader-runner@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357" - integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw== - -loader-utils@1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7" - integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA== - dependencies: - big.js "^5.2.2" - emojis-list "^2.0.0" - json5 "^1.0.1" - -loader-utils@^0.2.16: - version "0.2.17" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348" - integrity sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g= - dependencies: - big.js "^3.1.3" - emojis-list "^2.0.0" - json5 "^0.5.0" - object-assign "^4.0.1" - -loader-utils@^1.1.0, loader-utils@^1.2.3: - version "1.4.0" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.0.tgz#c579b5e34cb34b1a74edc6c1fb36bfa371d5a613" - integrity sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA== - dependencies: - big.js "^5.2.2" - emojis-list "^3.0.0" - json5 "^1.0.1" - -locate-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== - dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" - -lodash@^4.17.13, lodash@^4.17.3: - version "4.17.15" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" - integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== - -long@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/long/-/long-3.2.0.tgz#d821b7138ca1cb581c172990ef14db200b5c474b" - integrity sha1-2CG3E4yhy1gcFymQ7xTbIAtcR0s= - -lower-case@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" - integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw= - -lru-cache@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" - integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== - dependencies: - yallist "^3.0.2" - -magic-string@^0.25.2: - version "0.25.7" - resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.7.tgz#3f497d6fd34c669c6798dcb821f2ef31f5445051" - integrity sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA== - dependencies: - sourcemap-codec "^1.4.4" - -make-dir@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" - integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== - dependencies: - pify "^4.0.1" - semver "^5.6.0" - -mamacro@^0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/mamacro/-/mamacro-0.0.3.tgz#ad2c9576197c9f1abf308d0787865bd975a3f3e4" - integrity sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA== - -map-age-cleaner@^0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" - integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== - dependencies: - p-defer "^1.0.0" - -map-cache@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" - integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= - -map-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" - integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= - dependencies: - object-visit "^1.0.0" - -md5.js@^1.3.4: - version "1.3.5" - resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" - integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - -mem@^4.0.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" - integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== - dependencies: - map-age-cleaner "^0.1.1" - mimic-fn "^2.0.0" - p-is-promise "^2.0.0" - -memory-fs@^0.4.0, memory-fs@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" - integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= - dependencies: - errno "^0.1.3" - readable-stream "^2.0.1" - -memory-fs@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" - integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== - dependencies: - errno "^0.1.3" - readable-stream "^2.0.1" - -merge-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" - integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== - -micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: - version "3.1.10" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" - integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - braces "^2.3.1" - define-property "^2.0.2" - extend-shallow "^3.0.2" - extglob "^2.0.4" - fragment-cache "^0.2.1" - kind-of "^6.0.2" - nanomatch "^1.2.9" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.2" - -miller-rabin@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" - integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== - dependencies: - bn.js "^4.0.0" - brorand "^1.0.1" - -mimic-fn@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - -minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - -minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" - integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= - -minimatch@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimist@0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" - integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= - -minimist@^1.2.0: - version "1.2.5" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" - integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== - -mississippi@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" - integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== - dependencies: - concat-stream "^1.5.0" - duplexify "^3.4.2" - end-of-stream "^1.1.0" - flush-write-stream "^1.0.0" - from2 "^2.1.0" - parallel-transform "^1.1.0" - pump "^3.0.0" - pumpify "^1.3.3" - stream-each "^1.1.0" - through2 "^2.0.0" - -mixin-deep@^1.2.0: - version "1.3.2" - resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" - integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== - dependencies: - for-in "^1.0.2" - is-extendable "^1.0.1" - -mkdirp@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" - integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= - dependencies: - minimist "0.0.8" - -move-concurrently@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" - integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= - dependencies: - aproba "^1.1.1" - copy-concurrently "^1.0.0" - fs-write-stream-atomic "^1.0.8" - mkdirp "^0.5.1" - rimraf "^2.5.4" - run-queue "^1.0.3" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= - -ms@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -nan@^2.12.1: - version "2.14.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" - integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== - -nanomatch@^1.2.9: - version "1.2.13" - resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" - integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - define-property "^2.0.2" - extend-shallow "^3.0.2" - fragment-cache "^0.2.1" - is-windows "^1.0.2" - kind-of "^6.0.2" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -neo-async@^2.5.0, neo-async@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.1.tgz#ac27ada66167fa8849a6addd837f6b189ad2081c" - integrity sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw== - -nice-try@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" - integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== - -no-case@^2.2.0: - version "2.3.2" - resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" - integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== - dependencies: - lower-case "^1.1.1" - -node-libs-browser@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" - integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== - dependencies: - assert "^1.1.1" - browserify-zlib "^0.2.0" - buffer "^4.3.0" - console-browserify "^1.1.0" - constants-browserify "^1.0.0" - crypto-browserify "^3.11.0" - domain-browser "^1.1.1" - events "^3.0.0" - https-browserify "^1.0.0" - os-browserify "^0.3.0" - path-browserify "0.0.1" - process "^0.11.10" - punycode "^1.2.4" - querystring-es3 "^0.2.0" - readable-stream "^2.3.3" - stream-browserify "^2.0.1" - stream-http "^2.7.2" - string_decoder "^1.0.0" - timers-browserify "^2.0.4" - tty-browserify "0.0.0" - url "^0.11.0" - util "^0.11.0" - vm-browserify "^1.0.1" - -normalize-path@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" - integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= - dependencies: - remove-trailing-separator "^1.0.1" - -normalize-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= - dependencies: - path-key "^2.0.0" - -nth-check@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" - integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== - dependencies: - boolbase "~1.0.0" - -object-assign@^4.0.1, object-assign@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= - -object-copy@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" - integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= - dependencies: - copy-descriptor "^0.1.0" - define-property "^0.2.5" - kind-of "^3.0.3" - -object-inspect@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" - integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== - -object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" - integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== - -object-visit@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" - integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= - dependencies: - isobject "^3.0.0" - -object.assign@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" - integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== - dependencies: - define-properties "^1.1.2" - function-bind "^1.1.1" - has-symbols "^1.0.0" - object-keys "^1.0.11" - -object.getownpropertydescriptors@^2.0.3: - version "2.1.0" - resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz#369bf1f9592d8ab89d712dced5cb81c7c5352649" - integrity sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.17.0-next.1" - -object.pick@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" - integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= - dependencies: - isobject "^3.0.1" - -once@^1.3.0, once@^1.3.1, once@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= - dependencies: - wrappy "1" - -os-browserify@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" - integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= - -os-locale@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" - integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== - dependencies: - execa "^1.0.0" - lcid "^2.0.0" - mem "^4.0.0" - -p-defer@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" - integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= - -p-is-promise@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" - integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== - -p-limit@^2.0.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.2.tgz#61279b67721f5287aa1c13a9a7fbbc48c9291b1e" - integrity sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ== - dependencies: - p-try "^2.0.0" - -p-locate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== - dependencies: - p-limit "^2.0.0" - -p-try@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -pako@~1.0.5: - version "1.0.11" - resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" - integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== - -parallel-transform@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" - integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== - dependencies: - cyclist "^1.0.1" - inherits "^2.0.3" - readable-stream "^2.1.5" - -param-case@2.1.x: - version "2.1.1" - resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" - integrity sha1-35T9jPZTHs915r75oIWPvHK+Ikc= - dependencies: - no-case "^2.2.0" - -parse-asn1@^5.0.0: - version "5.1.5" - resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.5.tgz#003271343da58dc94cace494faef3d2147ecea0e" - integrity sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ== - dependencies: - asn1.js "^4.0.0" - browserify-aes "^1.0.0" - create-hash "^1.1.0" - evp_bytestokey "^1.0.0" - pbkdf2 "^3.0.3" - safe-buffer "^5.1.1" - -parse-passwd@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" - integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= - -pascalcase@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" - integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= - -path-browserify@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" - integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== - -path-dirname@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" - integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= - -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= - -path-key@^2.0.0, path-key@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= - -path-parse@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" - integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== - -pbkdf2@^3.0.3: - version "3.0.17" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.17.tgz#976c206530617b14ebb32114239f7b09336e93a6" - integrity sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA== - dependencies: - create-hash "^1.1.2" - create-hmac "^1.1.4" - ripemd160 "^2.0.1" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - -pify@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" - integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== - -pkg-dir@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" - integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== - dependencies: - find-up "^3.0.0" - -posix-character-classes@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" - integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= - -pretty-error@^2.0.2: - version "2.1.1" - resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3" - integrity sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM= - dependencies: - renderkid "^2.0.1" - utila "~0.4" - -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - -process@^0.11.10: - version "0.11.10" - resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" - integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= - -promise-inflight@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" - integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= - -prr@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" - integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= - -public-encrypt@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" - integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== - dependencies: - bn.js "^4.1.0" - browserify-rsa "^4.0.0" - create-hash "^1.1.0" - parse-asn1 "^5.0.0" - randombytes "^2.0.1" - safe-buffer "^5.1.2" - -pump@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" - integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -pumpify@^1.3.3: - version "1.5.1" - resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" - integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== - dependencies: - duplexify "^3.6.0" - inherits "^2.0.3" - pump "^2.0.0" - -punycode@1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" - integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= - -punycode@^1.2.4: - version "1.4.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" - integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= - -punycode@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== - -querystring-es3@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" - integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= - -querystring@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" - integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= - -randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: - version "2.1.0" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" - integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== - dependencies: - safe-buffer "^5.1.0" - -randomfill@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" - integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== - dependencies: - randombytes "^2.0.5" - safe-buffer "^5.1.0" - -"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: - version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^3.1.1: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" - integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -readdirp@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" - integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== - dependencies: - graceful-fs "^4.1.11" - micromatch "^3.1.10" - readable-stream "^2.0.2" - -regex-not@^1.0.0, regex-not@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" - integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== - dependencies: - extend-shallow "^3.0.2" - safe-regex "^1.1.0" - -relateurl@0.2.x: - version "0.2.7" - resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" - integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= - -remove-trailing-separator@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" - integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= - -renderkid@^2.0.1: - version "2.0.3" - resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.3.tgz#380179c2ff5ae1365c522bf2fcfcff01c5b74149" - integrity sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA== - dependencies: - css-select "^1.1.0" - dom-converter "^0.2" - htmlparser2 "^3.3.0" - strip-ansi "^3.0.0" - utila "^0.4.0" - -repeat-element@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" - integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== - -repeat-string@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= - -require-main-filename@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" - integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== - -resolve-cwd@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" - integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= - dependencies: - resolve-from "^3.0.0" - -resolve-dir@^1.0.0, resolve-dir@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" - integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= - dependencies: - expand-tilde "^2.0.0" - global-modules "^1.0.0" - -resolve-from@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" - integrity sha1-six699nWiBvItuZTM17rywoYh0g= - -resolve-url@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" - integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= - -resolve@^1.11.0, resolve@^1.14.1, resolve@^1.14.2, resolve@^1.3.2: - version "1.15.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.15.1.tgz#27bdcdeffeaf2d6244b95bb0f9f4b4653451f3e8" - integrity sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w== - dependencies: - path-parse "^1.0.6" - -ret@~0.1.10: - version "0.1.15" - resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" - integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== - -rimraf@^2.5.4, rimraf@^2.6.3: - version "2.7.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" - integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== - dependencies: - glob "^7.1.3" - -ripemd160@^2.0.0, ripemd160@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" - integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - -rollup-plugin-dts@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/rollup-plugin-dts/-/rollup-plugin-dts-1.3.0.tgz#34de28ea8c9464392f2b0d4cb8cd0fe7c51d482e" - integrity sha512-G08HZvwliQdRbAOwNb1VnyKuRSp1EXpKPW5FrvRcHbxsmPP2Co443zZ0p8tSCTjuC5xNYyZ9VMzjcwtqrPn6Ew== - optionalDependencies: - "@babel/code-frame" "^7.8.3" - -rollup-plugin-rust@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/rollup-plugin-rust/-/rollup-plugin-rust-1.2.0.tgz#580ec7203aa0f1cf4ae370492c4635a3480caf60" - integrity sha512-LI+Thy5JrAa2eAbGAv6zkEFeR6aKgirI6Gvk7LqeFOaGpRjfSIqcOg+4t0Qe5dXixDPYFlL+qIbdW2O6IIvEEA== - dependencies: - rollup-pluginutils "^2.3.1" - -rollup-plugin-terser@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/rollup-plugin-terser/-/rollup-plugin-terser-5.3.0.tgz#9c0dd33d5771df9630cd027d6a2559187f65885e" - integrity sha512-XGMJihTIO3eIBsVGq7jiNYOdDMb3pVxuzY0uhOE/FM4x/u9nQgr3+McsjzqBn3QfHIpNSZmFnpoKAwHBEcsT7g== - dependencies: - "@babel/code-frame" "^7.5.5" - jest-worker "^24.9.0" - rollup-pluginutils "^2.8.2" - serialize-javascript "^2.1.2" - terser "^4.6.2" - -rollup-pluginutils@^2.3.1, rollup-pluginutils@^2.8.2: - version "2.8.2" - resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz#72f2af0748b592364dbd3389e600e5a9444a351e" - integrity sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ== - dependencies: - estree-walker "^0.6.1" - -rollup@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.0.6.tgz#865d6bb15a28cff3429ea1dc57236013661cb9de" - integrity sha512-P42IlI6a/bxh52ed8hEXXe44LcHfep2f26OZybMJPN1TTQftibvQEl3CWeOmJrzqGbFxOA000QXDWO9WJaOQpA== - optionalDependencies: - fsevents "~2.1.2" - -run-queue@^1.0.0, run-queue@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" - integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= - dependencies: - aproba "^1.1.1" - -safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" - integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg== - -safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -safe-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" - integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= - dependencies: - ret "~0.1.10" - -schema-utils@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" - integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== - dependencies: - ajv "^6.1.0" - ajv-errors "^1.0.0" - ajv-keywords "^3.1.0" - -semver@^5.4.1, semver@^5.5.0, semver@^5.6.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" - integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== - -serialize-javascript@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-2.1.2.tgz#ecec53b0e0317bdc95ef76ab7074b7384785fa61" - integrity sha512-rs9OggEUF0V4jUSecXazOYsLfu7OGK2qIn3c7IPBiffz32XniEp/TX9Xmc9LQfK2nQ2QKHvZ2oygKUGU0lG4jQ== - -set-blocking@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" - integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= - -set-value@^2.0.0, set-value@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" - integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== - dependencies: - extend-shallow "^2.0.1" - is-extendable "^0.1.1" - is-plain-object "^2.0.3" - split-string "^3.0.1" - -setimmediate@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" - integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= - -sha.js@^2.4.0, sha.js@^2.4.8: - version "2.4.11" - resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" - integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" - integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= - dependencies: - shebang-regex "^1.0.0" - -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" - integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= - -signal-exit@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" - integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= - -snapdragon-node@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" - integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== - dependencies: - define-property "^1.0.0" - isobject "^3.0.0" - snapdragon-util "^3.0.1" - -snapdragon-util@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" - integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== - dependencies: - kind-of "^3.2.0" - -snapdragon@^0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" - integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== - dependencies: - base "^0.11.1" - debug "^2.2.0" - define-property "^0.2.5" - extend-shallow "^2.0.1" - map-cache "^0.2.2" - source-map "^0.5.6" - source-map-resolve "^0.5.0" - use "^3.1.0" - -source-list-map@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" - integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== - -source-map-resolve@^0.5.0: - version "0.5.3" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" - integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== - dependencies: - atob "^2.1.2" - decode-uri-component "^0.2.0" - resolve-url "^0.2.1" - source-map-url "^0.4.0" - urix "^0.1.0" - -source-map-support@~0.5.12: - version "0.5.16" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042" - integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map-url@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" - integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= - -source-map@^0.5.0, source-map@^0.5.6: - version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= - -source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -sourcemap-codec@^1.4.4: - version "1.4.8" - resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" - integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== - -split-string@^3.0.1, split-string@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" - integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== - dependencies: - extend-shallow "^3.0.0" - -ssri@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.1.tgz#2a3c41b28dd45b62b63676ecb74001265ae9edd8" - integrity sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA== - dependencies: - figgy-pudding "^3.5.1" - -static-extend@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" - integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= - dependencies: - define-property "^0.2.5" - object-copy "^0.1.0" - -stream-browserify@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" - integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== - dependencies: - inherits "~2.0.1" - readable-stream "^2.0.2" - -stream-each@^1.1.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" - integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== - dependencies: - end-of-stream "^1.1.0" - stream-shift "^1.0.0" - -stream-http@^2.7.2: - version "2.8.3" - resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" - integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== - dependencies: - builtin-status-codes "^3.0.0" - inherits "^2.0.1" - readable-stream "^2.3.6" - to-arraybuffer "^1.0.0" - xtend "^4.0.0" - -stream-shift@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" - integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== - -string-width@^3.0.0, string-width@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" - integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== - dependencies: - emoji-regex "^7.0.1" - is-fullwidth-code-point "^2.0.0" - strip-ansi "^5.1.0" - -string.prototype.trimleft@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz#9bdb8ac6abd6d602b17a4ed321870d2f8dcefc74" - integrity sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag== - dependencies: - define-properties "^1.1.3" - function-bind "^1.1.1" - -string.prototype.trimright@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz#440314b15996c866ce8a0341894d45186200c5d9" - integrity sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g== - dependencies: - define-properties "^1.1.3" - function-bind "^1.1.1" - -string_decoder@^1.0.0, string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -strip-ansi@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= - dependencies: - ansi-regex "^2.0.0" - -strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" - integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== - dependencies: - ansi-regex "^4.1.0" - -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= - -supports-color@6.1.0, supports-color@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" - integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== - dependencies: - has-flag "^3.0.0" - -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -tapable@^1.0.0, tapable@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" - integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== - -terser-webpack-plugin@^1.4.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.3.tgz#5ecaf2dbdc5fb99745fd06791f46fc9ddb1c9a7c" - integrity sha512-QMxecFz/gHQwteWwSo5nTc6UaICqN1bMedC5sMtUc7y3Ha3Q8y6ZO0iCR8pq4RJC8Hjf0FEPEHZqcMB/+DFCrA== - dependencies: - cacache "^12.0.2" - find-cache-dir "^2.1.0" - is-wsl "^1.1.0" - schema-utils "^1.0.0" - serialize-javascript "^2.1.2" - source-map "^0.6.1" - terser "^4.1.2" - webpack-sources "^1.4.0" - worker-farm "^1.7.0" - -terser@^4.1.2, terser@^4.6.2: - version "4.6.6" - resolved "https://registry.yarnpkg.com/terser/-/terser-4.6.6.tgz#da2382e6cafbdf86205e82fb9a115bd664d54863" - integrity sha512-4lYPyeNmstjIIESr/ysHg2vUPRGf2tzF9z2yYwnowXVuVzLEamPN1Gfrz7f8I9uEPuHcbFlW4PLIAsJoxXyJ1g== - dependencies: - commander "^2.20.0" - source-map "~0.6.1" - source-map-support "~0.5.12" - -text-encoding@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/text-encoding/-/text-encoding-0.7.0.tgz#f895e836e45990624086601798ea98e8f36ee643" - integrity sha512-oJQ3f1hrOnbRLOcwKz0Liq2IcrvDeZRHXhd9RgLrsT+DjWY/nty1Hi7v3dtkaEYbPYe0mUoOfzRrMwfXXwgPUA== - -through2@^2.0.0: - version "2.0.5" - resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" - integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== - dependencies: - readable-stream "~2.3.6" - xtend "~4.0.1" - -timers-browserify@^2.0.4: - version "2.0.11" - resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" - integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== - dependencies: - setimmediate "^1.0.4" - -to-arraybuffer@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" - integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= - -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= - -to-object-path@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" - integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= - dependencies: - kind-of "^3.0.2" - -to-regex-range@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" - integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= - dependencies: - is-number "^3.0.0" - repeat-string "^1.6.1" - -to-regex@^3.0.1, to-regex@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" - integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== - dependencies: - define-property "^2.0.2" - extend-shallow "^3.0.2" - regex-not "^1.0.2" - safe-regex "^1.1.0" - -toposort@^1.0.0: - version "1.0.7" - resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" - integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= - -tslib@^1.9.0: - version "1.11.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.11.1.tgz#eb15d128827fbee2841549e171f45ed338ac7e35" - integrity sha512-aZW88SY8kQbU7gpV19lN24LtXh/yD4ZZg6qieAJDDg+YBsJcSmLGK9QpnUjAKVG/xefmvJGd1WUmfpT/g6AJGA== - -tty-browserify@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" - integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= - -typedarray@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= - -typescript@^3.8.3: - version "3.8.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.8.3.tgz#409eb8544ea0335711205869ec458ab109ee1061" - integrity sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w== - -uglify-js@3.4.x: - version "3.4.10" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" - integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== - dependencies: - commander "~2.19.0" - source-map "~0.6.1" - -union-value@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" - integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== - dependencies: - arr-union "^3.1.0" - get-value "^2.0.6" - is-extendable "^0.1.1" - set-value "^2.0.1" - -unique-filename@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" - integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== - dependencies: - unique-slug "^2.0.0" - -unique-slug@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" - integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== - dependencies: - imurmurhash "^0.1.4" - -unset-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" - integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= - dependencies: - has-value "^0.3.1" - isobject "^3.0.0" - -upath@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" - integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== - -upper-case@^1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" - integrity sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg= - -uri-js@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" - integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== - dependencies: - punycode "^2.1.0" - -urix@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" - integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= - -url@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" - integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= - dependencies: - punycode "1.3.2" - querystring "0.2.0" - -use@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" - integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== - -util-deprecate@^1.0.1, util-deprecate@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= - -util.promisify@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" - integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA== - dependencies: - define-properties "^1.1.2" - object.getownpropertydescriptors "^2.0.3" - -util@0.10.3: - version "0.10.3" - resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" - integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= - dependencies: - inherits "2.0.1" - -util@^0.11.0: - version "0.11.1" - resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" - integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== - dependencies: - inherits "2.0.3" - -utila@^0.4.0, utila@~0.4: - version "0.4.0" - resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" - integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= - -v8-compile-cache@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.0.3.tgz#00f7494d2ae2b688cfe2899df6ed2c54bef91dbe" - integrity sha512-CNmdbwQMBjwr9Gsmohvm0pbL954tJrNzf6gWL3K+QMQf00PF7ERGrEiLgjuU3mKreLC2MeGhUsNV9ybTbLgd3w== - -vm-browserify@^1.0.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" - integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== - -wasm-dce@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wasm-dce/-/wasm-dce-1.0.2.tgz#7e21e566fa17c61e8e257742a377a5bdb8f2e4f5" - integrity sha512-Fq1+nu43ybsjSnBquLrW/cULmKs61qbv9k8ep13QUe0nABBezMoNAA+j6QY66MW0/eoDVDp1rjXDqQ2VKyS/Xg== - dependencies: - "@babel/core" "^7.0.0-beta.39" - "@babel/traverse" "^7.0.0-beta.39" - "@babel/types" "^7.0.0-beta.39" - babylon "^7.0.0-beta.39" - webassembly-interpreter "0.0.30" - -wasm-loader@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/wasm-loader/-/wasm-loader-1.3.0.tgz#a123a6b6c9a9dac60de38449703be3537742e155" - integrity sha512-R4s75XH+o8qM+WaRrAU9S2rbAMDzob18/S3V8R9ZoFpZkPWLAohWWlzWAp1ybeTkOuuku/X1zJtxiV0pBYxZww== - dependencies: - loader-utils "^1.1.0" - wasm-dce "^1.0.0" - -watchpack@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.6.0.tgz#4bc12c2ebe8aa277a71f1d3f14d685c7b446cd00" - integrity sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA== - dependencies: - chokidar "^2.0.2" - graceful-fs "^4.1.2" - neo-async "^2.5.0" - -webassembly-floating-point-hex-parser@0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/webassembly-floating-point-hex-parser/-/webassembly-floating-point-hex-parser-0.1.2.tgz#85bb01f54e68690c2645ea0cfad26c1110fdf988" - integrity sha512-TUf1H++8U10+stJbFydnvrpG5Sznz5Rilez/oZlV5zI0C/e4cSxd8rALAJ8VpTvjVWxLmL3SVSJUK6Ap9AoiNg== - -webassembly-interpreter@0.0.30: - version "0.0.30" - resolved "https://registry.yarnpkg.com/webassembly-interpreter/-/webassembly-interpreter-0.0.30.tgz#f35aaec0fff2e6fd9ca7277eb1a9059dccedcb7f" - integrity sha512-+Jdy2piEvz9T5j751mOE8+rBO12p+nNW6Fg4kJZ+zP1oUfsm+151sbAbM8AFxWTURmWCGP+r8Lxwfv3pzN1bCQ== - dependencies: - "@babel/code-frame" "^7.0.0-beta.36" - long "^3.2.0" - webassembly-floating-point-hex-parser "0.1.2" - -webpack-cli@^3.3.11: - version "3.3.11" - resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-3.3.11.tgz#3bf21889bf597b5d82c38f215135a411edfdc631" - integrity sha512-dXlfuml7xvAFwYUPsrtQAA9e4DOe58gnzSxhgrO/ZM/gyXTBowrsYeubyN4mqGhYdpXMFNyQ6emjJS9M7OBd4g== - dependencies: - chalk "2.4.2" - cross-spawn "6.0.5" - enhanced-resolve "4.1.0" - findup-sync "3.0.0" - global-modules "2.0.0" - import-local "2.0.0" - interpret "1.2.0" - loader-utils "1.2.3" - supports-color "6.1.0" - v8-compile-cache "2.0.3" - yargs "13.2.4" - -webpack-sources@^1.4.0, webpack-sources@^1.4.1: - version "1.4.3" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" - integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== - dependencies: - source-list-map "^2.0.0" - source-map "~0.6.1" - -webpack@^4.42.0: - version "4.42.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.42.0.tgz#b901635dd6179391d90740a63c93f76f39883eb8" - integrity sha512-EzJRHvwQyBiYrYqhyjW9AqM90dE4+s1/XtCfn7uWg6cS72zH+2VPFAlsnW0+W0cDi0XRjNKUMoJtpSi50+Ph6w== - dependencies: - "@webassemblyjs/ast" "1.8.5" - "@webassemblyjs/helper-module-context" "1.8.5" - "@webassemblyjs/wasm-edit" "1.8.5" - "@webassemblyjs/wasm-parser" "1.8.5" - acorn "^6.2.1" - ajv "^6.10.2" - ajv-keywords "^3.4.1" - chrome-trace-event "^1.0.2" - enhanced-resolve "^4.1.0" - eslint-scope "^4.0.3" - json-parse-better-errors "^1.0.2" - loader-runner "^2.4.0" - loader-utils "^1.2.3" - memory-fs "^0.4.1" - micromatch "^3.1.10" - mkdirp "^0.5.1" - neo-async "^2.6.1" - node-libs-browser "^2.2.1" - schema-utils "^1.0.0" - tapable "^1.1.3" - terser-webpack-plugin "^1.4.3" - watchpack "^1.6.0" - webpack-sources "^1.4.1" - -which-module@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" - integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= - -which@^1.2.14, which@^1.2.9, which@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" - integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== - dependencies: - isexe "^2.0.0" - -worker-farm@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" - integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== - dependencies: - errno "~0.1.7" - -wrap-ansi@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" - integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== - dependencies: - ansi-styles "^3.2.0" - string-width "^3.0.0" - strip-ansi "^5.0.0" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= - -xtend@^4.0.0, xtend@~4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" - integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== - -y18n@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" - integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== - -yallist@^3.0.2: - version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" - integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== - -yargs-parser@^13.1.0: - version "13.1.2" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" - integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg== - dependencies: - camelcase "^5.0.0" - decamelize "^1.2.0" - -yargs@13.2.4: - version "13.2.4" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.2.4.tgz#0b562b794016eb9651b98bd37acf364aa5d6dc83" - integrity sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg== - dependencies: - cliui "^5.0.0" - find-up "^3.0.0" - get-caller-file "^2.0.1" - os-locale "^3.1.0" - require-directory "^2.1.1" - require-main-filename "^2.0.0" - set-blocking "^2.0.0" - string-width "^3.0.0" - which-module "^2.0.0" - y18n "^4.0.0" - yargs-parser "^13.1.0" - -"zksync-crypto@file:pkg": - version "0.1.0" diff --git a/js/zksync.js/package.json b/js/zksync.js/package.json index d891d8ede4..76c62d7ab2 100644 --- a/js/zksync.js/package.json +++ b/js/zksync.js/package.json @@ -13,9 +13,7 @@ "js-sha256": "^0.9.0", "websocket": "^1.0.30", "websocket-as-promised": "^0.10.1", - "zksync-crypto": "../zksync-crypto/pkg", - "zksync-crypto-node": "../zksync-crypto/nodejspgk", - "zksync-crypto-web": "../zksync-crypto/webpgk" + "zksync-crypto": "../zksync-crypto" }, "peerDependencies": { "ethers": "^4.0.33" diff --git a/js/zksync.js/src/crypto.ts b/js/zksync.js/src/crypto.ts index 2b878b8ede..6673989012 100644 --- a/js/zksync.js/src/crypto.ts +++ b/js/zksync.js/src/crypto.ts @@ -1,13 +1,7 @@ import BN = require("bn.js"); import { Signature } from "./types"; -const zksync_crypto = (async () => { - if (typeof window !== "undefined" && window.window === window) { - return await import("zksync-crypto"); - } else { - return await import("zksync-crypto-node"); - } -})(); +const zksync_crypto = import("zksync-crypto"); export async function signTransactionBytes(privKey: BN, bytes: Buffer): Promise { const { sign_musig_sha256 } = await zksync_crypto; diff --git a/js/zksync.js/yarn.lock b/js/zksync.js/yarn.lock index 0f0e788f3f..283c605c8d 100644 --- a/js/zksync.js/yarn.lock +++ b/js/zksync.js/yarn.lock @@ -1331,11 +1331,5 @@ yn@^3.0.0: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.0.tgz#fcbe2db63610361afcc5eb9e0ac91e976d046114" integrity sha512-kKfnnYkbTfrAdd0xICNFw7Atm8nKpLcLv9AZGEt+kczL/WQVai4e2V6ZN8U/O+iI6WrNuJjNNOyu4zfhl9D3Hg== -zksync-crypto-node@../zksync-crypto/nodejspgk: - version "0.1.0" - -zksync-crypto-web@../zksync-crypto/webpgk: - version "0.1.0" - -zksync-crypto@../zksync-crypto/pkg: - version "0.1.0" +zksync-crypto@../zksync-crypto: + version "0.0.0" From 08b33343f952cb38c6593f60a3bc6b57accf1915 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Fri, 20 Mar 2020 15:59:43 +0200 Subject: [PATCH 058/186] Import zksync-crypto statically --- Makefile | 1 + js/client/vue.config.js | 7 +------ js/explorer/yarn.lock | 10 ++-------- js/tests/loading.ts | 2 -- js/tests/yarn.lock | 19 +------------------ js/zksync-crypto/.gitignore | 1 - js/zksync-crypto/build.sh | 8 ++++++++ js/zksync-crypto/package.json | 6 +++--- js/zksync-crypto/src/lib.rs | 16 +--------------- js/zksync-crypto/yarn.lock | 4 ---- js/zksync.js/src/crypto.ts | 32 +++++++++++++++++--------------- js/zksync.js/src/signer.ts | 20 ++++++++++---------- js/zksync.js/src/wallet.ts | 12 ++++++------ 13 files changed, 50 insertions(+), 88 deletions(-) delete mode 100644 js/tests/loading.ts create mode 100755 js/zksync-crypto/build.sh delete mode 100644 js/zksync-crypto/yarn.lock diff --git a/Makefile b/Makefile index 77a6779cee..5d137c47fb 100644 --- a/Makefile +++ b/Makefile @@ -15,6 +15,7 @@ init: @bin/init yarn: + @cd js/zksync-crypto && yarn build @cd js/zksync.js && yarn && yarn build @cd js/client && yarn @cd js/explorer && yarn diff --git a/js/client/vue.config.js b/js/client/vue.config.js index 84f181bbab..214f27ffe7 100644 --- a/js/client/vue.config.js +++ b/js/client/vue.config.js @@ -1,12 +1,7 @@ module.exports = { publicPath: process.env.NODE_ENV === 'production' ? '/client/' - : '/', - configureWebpack: { - devServer: { - // mimeTypes: { 'application/wasm': ['wasm'] } - }, - }, + : '/', chainWebpack: config => { config.optimization.minimize(process.env.NODE_ENV === 'production'); config.resolve.symlinks(false); diff --git a/js/explorer/yarn.lock b/js/explorer/yarn.lock index af163592d1..8636a9dd1e 100644 --- a/js/explorer/yarn.lock +++ b/js/explorer/yarn.lock @@ -9057,14 +9057,8 @@ yorkie@^2.0.0: normalize-path "^1.0.0" strip-indent "^2.0.0" -zksync-crypto-node@../zksync-crypto/nodejspgk: - version "0.1.0" - -zksync-crypto-web@../zksync-crypto/webpgk: - version "0.1.0" - -zksync-crypto@../zksync-crypto/pkg: - version "0.1.0" +zksync-crypto@../zksync-crypto: + version "0.0.0" "zksync@link:../zksync.js": version "0.0.0" diff --git a/js/tests/loading.ts b/js/tests/loading.ts deleted file mode 100644 index 31dad747fc..0000000000 --- a/js/tests/loading.ts +++ /dev/null @@ -1,2 +0,0 @@ -import example from "example-node-wasm"; -example.then(console.log); diff --git a/js/tests/yarn.lock b/js/tests/yarn.lock index 4ff58f36d2..3c4428a4df 100644 --- a/js/tests/yarn.lock +++ b/js/tests/yarn.lock @@ -69,11 +69,6 @@ brorand@^1.0.1: resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= -buffer-es6@^4.9.3: - version "4.9.3" - resolved "https://registry.yarnpkg.com/buffer-es6/-/buffer-es6-4.9.3.tgz#f26347b82df76fd37e18bcb5288c4970cfd5c404" - integrity sha1-8mNHuC33b9N+GLy1KIxJcM/VxAQ= - buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" @@ -227,12 +222,6 @@ ethers@4.0.33: uuid "2.0.1" xmlhttprequest "1.8.0" -example-node-wasm@../zksync-crypto: - version "0.0.0" - dependencies: - buffer-es6 "^4.9.3" - zksync-crypto "file:../../../../Library/Caches/Yarn/v6/npm-example-node-wasm-0.0.0-1da603b1-efda-4ef8-b578-586eccd35c48-1584708911723/node_modules/example-node-wasm/pkg" - ext@^1.1.2: version "1.4.0" resolved "https://registry.yarnpkg.com/ext/-/ext-1.4.0.tgz#89ae7a07158f79d35517882904324077e4379244" @@ -545,13 +534,7 @@ yn@3.1.1: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== -zksync-crypto-node@../zksync-crypto/nodejspgk: - version "0.1.0" - -zksync-crypto-web@../zksync-crypto/webpgk: - version "0.1.0" - -zksync-crypto@../zksync-crypto/pkg, "zksync-crypto@file:../zksync-crypto/pkg": +zksync-crypto@../zksync-crypto: version "0.0.0" "zksync@link:../zksync.js": diff --git a/js/zksync-crypto/.gitignore b/js/zksync-crypto/.gitignore index bb9a2539a7..f5de7310e4 100644 --- a/js/zksync-crypto/.gitignore +++ b/js/zksync-crypto/.gitignore @@ -2,5 +2,4 @@ **/*.rs.bk Cargo.lock bin/ -pkg wasm-pack.log diff --git a/js/zksync-crypto/build.sh b/js/zksync-crypto/build.sh new file mode 100755 index 0000000000..9bf1af573c --- /dev/null +++ b/js/zksync-crypto/build.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +set -e + +which wasm-pack || cargo install wasm-pack +wasm-pack build --release --out-name=web --out-dir=dist +wasm-pack build --release --target=nodejs --out-name=node --out-dir=dist +rm dist/package.json diff --git a/js/zksync-crypto/package.json b/js/zksync-crypto/package.json index 2ff4cc03a3..a2699b6ab3 100644 --- a/js/zksync-crypto/package.json +++ b/js/zksync-crypto/package.json @@ -1,10 +1,10 @@ { "name": "zksync-crypto", "version": "0.0.0", - "browser": "pkg/web.js", - "main": "pkg/node.js", + "browser": "dist/web.js", + "main": "dist/node.js", "scripts": { - "build": "wasm-pack build --out-name=web && wasm-pack build --target=nodejs --out-name=node && rm pkg/package.json", + "build": "./build.sh", "test": "echo \"Error: no test specified\" && exit 1" } } diff --git a/js/zksync-crypto/src/lib.rs b/js/zksync-crypto/src/lib.rs index 8a60327bef..d9f932304b 100644 --- a/js/zksync-crypto/src/lib.rs +++ b/js/zksync-crypto/src/lib.rs @@ -30,7 +30,7 @@ pub fn init() { set_panic_hook(); } -#[wasm_bindgen] +#[wasm_bindgen(js_name = privateKeyFromSeed)] pub fn private_key_from_seed(seed: &[u8]) -> Vec { if seed.len() < 32 { panic!("Seed is too short"); @@ -99,17 +99,3 @@ pub fn sign_musig_sha256(private_key: &[u8], msg: &[u8]) -> Vec { result.extend_from_slice(&packed_signature[..]); result } - -//#[test] -//fn test_pub_key_hash() { -// let p_g = FixedGenerators::SpendingKeyGenerator; -// -// let sk = { -// PrivateKey::(Fs::from_str("5").unwrap()) -// }; -// -// let pubkey = JUBJUB_PARAMS.with(|params| PublicKey::from_private(&sk, p_g, params)); -// println!("{:?}", pubkey.0.into_xy()); -// println!("{}",hex::encode(&pub_key_hash(&pubkey))); -// panic!() -//} diff --git a/js/zksync-crypto/yarn.lock b/js/zksync-crypto/yarn.lock deleted file mode 100644 index fb57ccd13a..0000000000 --- a/js/zksync-crypto/yarn.lock +++ /dev/null @@ -1,4 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - diff --git a/js/zksync.js/src/crypto.ts b/js/zksync.js/src/crypto.ts index 6673989012..7f684044b6 100644 --- a/js/zksync.js/src/crypto.ts +++ b/js/zksync.js/src/crypto.ts @@ -1,25 +1,27 @@ import BN = require("bn.js"); import { Signature } from "./types"; -const zksync_crypto = import("zksync-crypto"); +import { private_key_to_pubkey_hash, sign_musig_sha256 } from "zksync-crypto"; -export async function signTransactionBytes(privKey: BN, bytes: Buffer): Promise { - const { sign_musig_sha256 } = await zksync_crypto; - const signaturePacked = sign_musig_sha256(privKey.toArrayLike(Buffer), bytes); - const pubKey = Buffer.from(signaturePacked.slice(0,32)).toString("hex"); - const signature = Buffer.from(signaturePacked.slice(32, 32 + 64)).toString("hex"); +export { privateKeyFromSeed } from "zksync-crypto"; + +export function signTransactionBytes( + privKey: Uint8Array, + bytes: Uint8Array +): Signature { + const signaturePacked = sign_musig_sha256(privKey, bytes); + const pubKey = Buffer.from(signaturePacked.slice(0, 32)).toString("hex"); + const signature = Buffer.from(signaturePacked.slice(32, 32 + 64)).toString( + "hex" + ); return { pubKey, - signature, + signature }; } -export async function privateKeyFromSeed(seed: Buffer): Promise { - const { private_key_from_seed } = await zksync_crypto; - return new BN(private_key_from_seed(seed)); -} - -export async function privateKeyToPubKeyHash(privateKey: BN): Promise { - const { private_key_to_pubkey_hash } = await zksync_crypto; - return `sync:${Buffer.from(private_key_to_pubkey_hash(privateKey.toArrayLike(Buffer))).toString("hex")}` +export function privateKeyToPubKeyHash(privateKey: Uint8Array): string { + return `sync:${Buffer.from(private_key_to_pubkey_hash(privateKey)).toString( + "hex" + )}`; } diff --git a/js/zksync.js/src/signer.ts b/js/zksync.js/src/signer.ts index 92ba9388de..a7edaf962d 100644 --- a/js/zksync.js/src/signer.ts +++ b/js/zksync.js/src/signer.ts @@ -19,18 +19,18 @@ export class Signer { this.privateKey = privKey; } - async pubKeyHash(): Promise { - return await privateKeyToPubKeyHash(this.privateKey); + pubKeyHash(): PubKeyHash { + return privateKeyToPubKeyHash(this.privateKey); } - async signSyncTransfer(transfer: { + signSyncTransfer(transfer: { from: Address; to: Address; tokenId: number; amount: utils.BigNumberish; fee: utils.BigNumberish; nonce: number; - }): Promise { + }): Transfer { const type = Buffer.from([5]); // tx type const from = serializeAddress(transfer.from); const to = serializeAddress(transfer.to); @@ -48,7 +48,7 @@ export class Signer { nonce ]); - const signature = await signTransactionBytes(this.privateKey, msgBytes); + const signature = signTransactionBytes(this.privateKey, msgBytes); return { type: "Transfer", @@ -62,14 +62,14 @@ export class Signer { }; } - async signSyncWithdraw(withdraw: { + signSyncWithdraw(withdraw: { from: Address; ethAddress: string; tokenId: number; amount: utils.BigNumberish; fee: utils.BigNumberish; nonce: number; - }): Promise { + }): Withdraw { const typeBytes = Buffer.from([3]); const accountBytes = serializeAddress(withdraw.from); const ethAddressBytes = serializeAddress(withdraw.ethAddress); @@ -86,7 +86,7 @@ export class Signer { feeBytes, nonceBytes ]); - const signature = await signTransactionBytes(this.privateKey, msgBytes); + const signature = signTransactionBytes(this.privateKey, msgBytes); return { type: "Withdraw", from: withdraw.from, @@ -103,7 +103,7 @@ export class Signer { return new Signer(pk); } - static async fromSeed(seed: Buffer): Promise { + static fromSeed(seed: Buffer): Signer { return new Signer(privateKeyFromSeed(seed)); } @@ -114,7 +114,7 @@ export class Signer { "Only sign this message for a trusted client!" ); const seed = Buffer.from(sign.substr(2), "hex"); - return await Signer.fromSeed(seed); + return Signer.fromSeed(seed); } } diff --git a/js/zksync.js/src/wallet.ts b/js/zksync.js/src/wallet.ts index 137aab4dd4..d90b95aedd 100644 --- a/js/zksync.js/src/wallet.ts +++ b/js/zksync.js/src/wallet.ts @@ -112,7 +112,7 @@ export class Wallet { humanReadableTxInfo ); - const signedTransferTransaction = await this.signer.signSyncTransfer( + const signedTransferTransaction = this.signer.signSyncTransfer( transactionData ); @@ -171,7 +171,7 @@ export class Wallet { humanReadableTxInfo ); - const signedWithdrawTransaction = await this.signer.signSyncWithdraw( + const signedWithdrawTransaction = this.signer.signSyncWithdraw( transactionData ); @@ -193,7 +193,7 @@ export class Wallet { ); } const currentPubKeyHash = await this.getCurrentPubKeyHash(); - const signerPubKeyHash = await this.signer.pubKeyHash(); + const signerPubKeyHash = this.signer.pubKeyHash(); return currentPubKeyHash === signerPubKeyHash; } @@ -208,7 +208,7 @@ export class Wallet { } const currentPubKeyHash = await this.getCurrentPubKeyHash(); - const newPubKeyHash = await this.signer.pubKeyHash(); + const newPubKeyHash = this.signer.pubKeyHash(); if (currentPubKeyHash == newPubKeyHash) { throw new Error("Current signing key is set already"); @@ -233,7 +233,7 @@ export class Wallet { const txData = { type: "ChangePubKey", account: this.address(), - newPkHash: await this.signer.pubKeyHash(), + newPkHash: this.signer.pubKeyHash(), nonce: numNonce, ethSignature }; @@ -253,7 +253,7 @@ export class Wallet { } const currentPubKeyHash = await this.getCurrentPubKeyHash(); - const newPubKeyHash = await this.signer.pubKeyHash(); + const newPubKeyHash = this.signer.pubKeyHash(); if (currentPubKeyHash == newPubKeyHash) { throw new Error("Current PubKeyHash is the same as new"); From b8e9ca4c4ffbe6ee81eccbb5bb019be18d296bb1 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Fri, 20 Mar 2020 18:31:31 +0200 Subject: [PATCH 059/186] Use crypto_exports for zksync-crypto --- Makefile | 2 +- contracts/package.json | 2 +- contracts/yarn.lock | 197 +--------------------------------- docker/ci/Dockerfile | 3 +- js/zksync-crypto/Cargo.toml | 2 +- js/zksync-crypto/src/lib.rs | 4 +- js/zksync-crypto/src/utils.rs | 2 +- 7 files changed, 12 insertions(+), 200 deletions(-) diff --git a/Makefile b/Makefile index 5d137c47fb..0d55ac8429 100644 --- a/Makefile +++ b/Makefile @@ -91,7 +91,7 @@ push-image-nginx: image-nginx image-ci: @docker build -t "${CI_DOCKER_IMAGE}" -f ./docker/ci/Dockerfile . -push-image-ci: +push-image-ci: image-ci docker push "${CI_DOCKER_IMAGE}" # Using RUST+Linux docker image (ekidd/rust-musl-builder) to build for Linux. More at https://github.com/emk/rust-musl-builder diff --git a/contracts/package.json b/contracts/package.json index 247f297310..a379036353 100644 --- a/contracts/package.json +++ b/contracts/package.json @@ -24,7 +24,7 @@ "ts-node": "^8.3.0", "tslint": "^5.18.0", "typescript": "^3.5.3", - "zksync": "link:../js/zksync.js" + "zksync": "link:./js/zksync.js" }, "scripts": { "build": "waffle .waffle.json", diff --git a/contracts/yarn.lock b/contracts/yarn.lock index 588333d528..c31350775a 100644 --- a/contracts/yarn.lock +++ b/contracts/yarn.lock @@ -936,21 +936,6 @@ bl@^1.0.0: readable-stream "^2.3.5" safe-buffer "^5.1.1" -blake2b-wasm@^1.1.0: - version "1.1.7" - resolved "https://registry.yarnpkg.com/blake2b-wasm/-/blake2b-wasm-1.1.7.tgz#e4d075da10068e5d4c3ec1fb9accc4d186c55d81" - integrity sha512-oFIHvXhlz/DUgF0kq5B1CqxIDjIJwh9iDeUUGQUcvgiGz7Wdw03McEO7CfLBy7QKGdsydcMCgO9jFNBAFCtFcA== - dependencies: - nanoassert "^1.0.0" - -blake2b@^2.1.3: - version "2.1.3" - resolved "https://registry.yarnpkg.com/blake2b/-/blake2b-2.1.3.tgz#f5388be424768e7c6327025dad0c3c6d83351bca" - integrity sha512-pkDss4xFVbMb4270aCyGD3qLv92314Et+FsKzilCLxDz5DuZ2/1g3w4nmBbu6nKApPspnjG7JcwTjGZnduB1yg== - dependencies: - blake2b-wasm "^1.1.0" - nanoassert "^1.0.0" - block-stream@*: version "0.0.9" resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a" @@ -978,11 +963,6 @@ bn.js@4.11.8, bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.10.0, bn.js@^4. resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f" integrity sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA== -bn.js@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.0.0.tgz#5c3d398021b3ddb548c1296a16f857e908f35c70" - integrity sha512-bVwDX8AF+72fIUNuARelKAlQUNtPOfG2fRxorbVvFk4zpHbqLrPdOGfVg5vrKwVzLLePqPBiATaOZNELQzmS0A== - body-parser@1.19.0, body-parser@^1.16.0: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" @@ -1326,11 +1306,6 @@ checkpoint-store@^1.1.0: dependencies: functional-red-black-tree "^1.0.1" -chnl@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/chnl/-/chnl-0.5.0.tgz#ed7aea426043fce5685e547f7297193b6d2bef8c" - integrity sha512-0dl4ZJfAZdLn9mDnWejs5nasZKVnDTwdXV+dkxodYbb//GJDtXNhPlqCCYUb1xF0NQpIB5zlHRDrU1RCB4BRog== - chownr@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.2.tgz#a18f1e0b269c8a6a5d3c86eb298beb14c3dd7bf6" @@ -1645,11 +1620,6 @@ crypto-browserify@3.12.0: randombytes "^2.0.0" randomfill "^1.0.3" -crypto-js@^3.1.9-1: - version "3.1.9-1" - resolved "https://registry.yarnpkg.com/crypto-js/-/crypto-js-3.1.9-1.tgz#fda19e761fc077e01ffbfdc6e9fdfc59e8806cd8" - integrity sha1-/aGedh/Ad+Af+/3G6f38WeiAbNg= - crypto-random-string@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e" @@ -1660,14 +1630,6 @@ cyclist@~0.2.2: resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-0.2.2.tgz#1b33792e11e914a2fd6d6ed6447464444e5fa640" integrity sha1-GzN5LhHpFKL9bW7WRHRkRE5fpkA= -d@1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" - integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== - dependencies: - es5-ext "^0.10.50" - type "^1.0.1" - dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" @@ -1965,19 +1927,6 @@ elliptic@^6.0.0, elliptic@^6.4.0, elliptic@^6.4.1: minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.0" -elliptic@^6.5.0: - version "6.5.2" - resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.2.tgz#05c5678d7173c049d8ca433552224a495d0e3762" - integrity sha512-f4x70okzZbIQl/NSRLkI/+tteV/9WqL98zx+SQ69KbXxmVrmjwsNUPn/gYJJ0sHvEak24cZgHIPegRePAtA/xw== - dependencies: - bn.js "^4.4.0" - brorand "^1.0.1" - hash.js "^1.0.0" - hmac-drbg "^1.0.0" - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.0" - emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" @@ -2049,23 +1998,6 @@ es-abstract@^1.13.0, es-abstract@^1.5.0, es-abstract@^1.5.1: is-regex "^1.0.4" object-keys "^1.0.12" -es-abstract@^1.17.0-next.0: - version "1.17.0-next.1" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.0-next.1.tgz#94acc93e20b05a6e96dacb5ab2f1cb3a81fc2172" - integrity sha512-7MmGr03N7Rnuid6+wyhD9sHNE2n4tFSwExnU2lQl3lIo2ShXWGePY80zYaoMOmILWv57H0amMjZGHNzzGG70Rw== - dependencies: - es-to-primitive "^1.2.1" - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.1" - is-callable "^1.1.4" - is-regex "^1.0.4" - object-inspect "^1.7.0" - object-keys "^1.1.1" - object.assign "^4.1.0" - string.prototype.trimleft "^2.1.0" - string.prototype.trimright "^2.1.0" - es-to-primitive@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.0.tgz#edf72478033456e8dda8ef09e00ad9650707f377" @@ -2075,33 +2007,6 @@ es-to-primitive@^1.2.0: is-date-object "^1.0.1" is-symbol "^1.0.2" -es-to-primitive@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" - integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== - dependencies: - is-callable "^1.1.4" - is-date-object "^1.0.1" - is-symbol "^1.0.2" - -es5-ext@^0.10.35, es5-ext@^0.10.50, es5-ext@~0.10.14: - version "0.10.50" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.50.tgz#6d0e23a0abdb27018e5ac4fd09b412bc5517a778" - integrity sha512-KMzZTPBkeQV/JcSQhI5/z6d9VWJ3EnQ194USTUwIYZ2ZbpN8+SGXQKt1h68EX44+qt+Fzr8DO17vnxrw7c3agw== - dependencies: - es6-iterator "~2.0.3" - es6-symbol "~3.1.1" - next-tick "^1.0.0" - -es6-iterator@~2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" - integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= - dependencies: - d "1" - es5-ext "^0.10.35" - es6-symbol "^3.1.1" - es6-promise@^4.0.3: version "4.2.8" resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a" @@ -2114,14 +2019,6 @@ es6-promisify@^5.0.0: dependencies: es6-promise "^4.0.3" -es6-symbol@^3.1.1, es6-symbol@~3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77" - integrity sha1-vwDvT9q2uhtG7Le2KbTH7VcVzHc= - dependencies: - d "1" - es5-ext "~0.10.14" - escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" @@ -2882,11 +2779,6 @@ flat-cache@^2.0.1: rimraf "2.6.3" write "1.0.3" -flat-options@^0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/flat-options/-/flat-options-0.1.3.tgz#56f644aaa095e5f984ecdd329f119e8a196c8aa3" - integrity sha512-z1vH9mb4ly55dWUZZFUeLNqhFWhwSQNngHpK8RQOhFuNw/sWcNDZhkHl3GS1YTHiYxB5qvcbSRbH7X6ThzX9UA== - flat@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/flat/-/flat-4.1.0.tgz#090bec8b05e39cba309747f1d588f04dbaf98db2" @@ -3264,11 +3156,6 @@ has-symbols@^1.0.0: resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.0.tgz#ba1a8f1af2a0fc39650f5c850367704122063b44" integrity sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q= -has-symbols@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" - integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== - has-to-string-tag-x@^1.2.0: version "1.4.1" resolved "https://registry.yarnpkg.com/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz#a045ab383d7b4b2012a00148ab0aa5f290044d4d" @@ -3734,11 +3621,6 @@ jju@^1.1.0: resolved "https://registry.yarnpkg.com/jju/-/jju-1.4.0.tgz#a3abe2718af241a2b2904f84a625970f389ae32a" integrity sha1-o6vicYryQaKykE+EpiWXDzia4yo= -js-sha256@^0.9.0: - version "0.9.0" - resolved "https://registry.yarnpkg.com/js-sha256/-/js-sha256-0.9.0.tgz#0b89ac166583e91ef9123644bd3c5334ce9d0966" - integrity sha512-sga3MHh9sgQN2+pJ9VYZ+1LPwXOxuBJBA5nrR5/ofPfuiJBE2hnjsaN8se8JznOmGLN2p49Pe5U/ttafcs/apA== - js-sha3@0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.5.5.tgz#baf0c0e8c54ad5903447df96ade7a4a1bca79a4a" @@ -4511,11 +4393,6 @@ nano-json-stream-parser@^0.1.2: resolved "https://registry.yarnpkg.com/nano-json-stream-parser/-/nano-json-stream-parser-0.1.2.tgz#0cc8f6d0e2b622b479c40d499c46d64b755c6f5f" integrity sha1-DMj20OK2IrR5xA1JnEbWS3Vcb18= -nanoassert@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/nanoassert/-/nanoassert-1.1.0.tgz#4f3152e09540fde28c76f44b19bbcd1d5a42478d" - integrity sha1-TzFS4JVA/eKMdvRLGbvNHVpCR40= - natural-compare@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" @@ -4531,11 +4408,6 @@ nested-error-stacks@~2.0.1: resolved "https://registry.yarnpkg.com/nested-error-stacks/-/nested-error-stacks-2.0.1.tgz#d2cc9fc5235ddb371fc44d506234339c8e4b0a4b" integrity sha512-SrQrok4CATudVzBS7coSz26QRSmlK9TzzoFbeKfcPBUFPjcQM9Rqvr/DlJkOrwI/0KcgvMub1n1g5Jt9EgRn4A== -next-tick@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" - integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= - nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" @@ -4694,17 +4566,12 @@ object-assign@^4, object-assign@^4.0.0, object-assign@^4.0.1, object-assign@^4.1 resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= -object-inspect@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" - integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== - object-inspect@~1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.6.0.tgz#c70b6cbf72f274aab4c34c0c82f5167bf82cf15b" integrity sha512-GJzfBZ6DgDAmnuaM3104jR4s1Myxr3Y3zfIyN4z3UdqN69oSRacNK8UhnobDdC+7J2AHCjGwxQubNJfE70SXXQ== -object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: +object-keys@^1.0.11, object-keys@^1.0.12: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== @@ -4714,7 +4581,7 @@ object-keys@~0.4.0: resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-0.4.0.tgz#28a6aae7428dd2c3a92f3d95f21335dd204e0336" integrity sha1-KKaq50KN0sOpLz2V8hM13SBOAzY= -object.assign@4.1.0, object.assign@^4.1.0: +object.assign@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== @@ -5126,11 +4993,6 @@ progress@^2.0.0, progress@^2.0.3: resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== -promise-controller@^0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/promise-controller/-/promise-controller-0.5.2.tgz#f32d2156dc1f8e62703bf9fb0b8158335660c212" - integrity sha512-ymVCGfCxN+A+6TqESLnGZhGfQdQJ08SpIMyft4xQPUDrOgoqzKcQnLIYaqQk7/rPyg4wpKpxBKefeGkvumWgUg== - promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" @@ -5152,15 +5014,6 @@ promise-to-callback@^1.0.0: is-fn "^1.0.0" set-immediate-shim "^1.0.1" -promise.prototype.finally@^3.1.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/promise.prototype.finally/-/promise.prototype.finally-3.1.2.tgz#b8af89160c9c673cefe3b4c4435b53cfd0287067" - integrity sha512-A2HuJWl2opDH0EafgdjwEw7HysI8ff/n4lW4QEVBCUXFk9QeGecBWv0Deph0UmLe3tTNYegz8MOjsVuE6SMoJA== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.17.0-next.0" - function-bind "^1.1.1" - prompts@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.1.0.tgz#bf90bc71f6065d255ea2bdc0fe6520485c1b45db" @@ -6162,22 +6015,6 @@ string.prototype.trim@~1.1.2: es-abstract "^1.5.0" function-bind "^1.0.2" -string.prototype.trimleft@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz#9bdb8ac6abd6d602b17a4ed321870d2f8dcefc74" - integrity sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag== - dependencies: - define-properties "^1.1.3" - function-bind "^1.1.1" - -string.prototype.trimright@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz#440314b15996c866ce8a0341894d45186200c5d9" - integrity sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g== - dependencies: - define-properties "^1.1.3" - function-bind "^1.1.1" - string_decoder@~0.10.x: version "0.10.31" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" @@ -6529,12 +6366,7 @@ type-is@~1.6.17, type-is@~1.6.18: media-typer "0.3.0" mime-types "~2.1.24" -type@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/type/-/type-1.0.1.tgz#084c9a17fcc9151a2cdb1459905c2e45e4bb7d61" - integrity sha512-MAM5dBMJCJNKs9E7JXo4CXRAansRfG0nlJxW7Wf6GZzSOvH31zClSaHdIMWLehe/EGMBkqeC55rrkaOr5Oo7Nw== - -typedarray-to-buffer@^3.1.2, typedarray-to-buffer@^3.1.5: +typedarray-to-buffer@^3.1.2: version "3.1.5" resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== @@ -6981,16 +6813,6 @@ web3@1.0.0-beta.35: web3-shh "1.0.0-beta.35" web3-utils "1.0.0-beta.35" -websocket-as-promised@^0.10.1: - version "0.10.1" - resolved "https://registry.yarnpkg.com/websocket-as-promised/-/websocket-as-promised-0.10.1.tgz#30aef1529fc7e797b8270875a408d83264e52958" - integrity sha512-hKAZPSIGaao4WPoporPkDQpKl9jz0I0Hh9x/Dn05k6u//e44+0rAhMam4/gvBlYZl02wJYELD561bAenyxwJJQ== - dependencies: - chnl "^0.5.0" - flat-options "^0.1.3" - promise-controller "^0.5.2" - promise.prototype.finally "^3.1.0" - websocket@1.0.26: version "1.0.26" resolved "https://registry.yarnpkg.com/websocket/-/websocket-1.0.26.tgz#a03a01299849c35268c83044aa919c6374be8194" @@ -7001,17 +6823,6 @@ websocket@1.0.26: typedarray-to-buffer "^3.1.2" yaeti "^0.0.6" -websocket@^1.0.30: - version "1.0.31" - resolved "https://registry.yarnpkg.com/websocket/-/websocket-1.0.31.tgz#e5d0f16c3340ed87670e489ecae6144c79358730" - integrity sha512-VAouplvGKPiKFDTeCCO65vYHsyay8DqoBSlzIO3fayrfOgU94lQN5a1uWVnFrMLceTJw/+fQXR5PGbUVRaHshQ== - dependencies: - debug "^2.2.0" - es5-ext "^0.10.50" - nan "^2.14.0" - typedarray-to-buffer "^3.1.5" - yaeti "^0.0.6" - "websocket@git://github.com/frozeman/WebSocket-Node.git#browserifyCompatible": version "1.0.26" resolved "git://github.com/frozeman/WebSocket-Node.git#6c72925e3f8aaaea8dc8450f97627e85263999f2" @@ -7267,6 +7078,6 @@ yn@^3.0.0: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== -"zksync@link:../js/zksync.js": +"zksync@link:./js/zksync.js": version "0.0.0" uid "" diff --git a/docker/ci/Dockerfile b/docker/ci/Dockerfile index 0f427ee54e..e1a94d013f 100644 --- a/docker/ci/Dockerfile +++ b/docker/ci/Dockerfile @@ -1,8 +1,9 @@ FROM fedora:30 ENV PATH="/root/.cargo/bin:${PATH}" -RUN dnf install -y gettext nodejs nodejs-yarn axel jq postgresql libpq-devel rust make openssl-devel git python gcc-c++ docker perl && dnf clean all \ +RUN dnf install -y gettext nodejs nodejs-yarn axel jq postgresql libpq-devel rust make openssl-devel git python gcc-c++ docker which && dnf clean all \ && curl https://sh.rustup.rs -sSf | bash -s -- -y \ && cargo install diesel_cli --no-default-features --features postgres \ + && cargo install wasm-pack \ && rm -rf ~/.cargo/registry \ && ln -s /usr/bin/nodejs-yarn /usr/bin/yarn diff --git a/js/zksync-crypto/Cargo.toml b/js/zksync-crypto/Cargo.toml index 7d74d707e2..6f16e0b454 100644 --- a/js/zksync-crypto/Cargo.toml +++ b/js/zksync-crypto/Cargo.toml @@ -14,7 +14,7 @@ default = ["console_error_panic_hook"] [dependencies] wasm-bindgen = "0.2.59" -franklin_crypto = { package = "franklin-crypto", git = "https://github.com/matter-labs/franklin-crypto.git", branch="plonk"} +crypto_exports = { path = "../../core/crypto_exports", version = "0.1.0" } ff = { package = "ff_ce", version = "0.6.0"} hex = "0.3" sha2 = "0.8" diff --git a/js/zksync-crypto/src/lib.rs b/js/zksync-crypto/src/lib.rs index d9f932304b..38d2588a8d 100644 --- a/js/zksync-crypto/src/lib.rs +++ b/js/zksync-crypto/src/lib.rs @@ -1,6 +1,6 @@ mod utils; -pub use franklin_crypto::bellman::pairing::bn256::{Bn256 as Engine, Fr}; +pub use crypto_exports::franklin_crypto::bellman::pairing::bn256::{Bn256 as Engine, Fr}; pub type Fs = ::Fs; thread_local! { pub static JUBJUB_PARAMS: AltJubjubBn256 = AltJubjubBn256::new(); @@ -8,7 +8,7 @@ thread_local! { use wasm_bindgen::prelude::*; -use franklin_crypto::{ +use crypto_exports::franklin_crypto::{ alt_babyjubjub::{fs::FsRepr, AltJubjubBn256, FixedGenerators}, bellman::pairing::ff::{PrimeField, PrimeFieldRepr}, eddsa::{PrivateKey, PublicKey, Seed}, diff --git a/js/zksync-crypto/src/utils.rs b/js/zksync-crypto/src/utils.rs index 7146b9b593..6bb9cd8726 100644 --- a/js/zksync-crypto/src/utils.rs +++ b/js/zksync-crypto/src/utils.rs @@ -1,6 +1,6 @@ use crate::JUBJUB_PARAMS; use crate::{Engine, Fr}; -use franklin_crypto::{ +use crypto_exports::franklin_crypto::{ bellman::{pairing::ff::PrimeField, BitIterator}, eddsa::PublicKey, pedersen_hash::{baby_pedersen_hash, Personalization}, From 8da16ddbd527c09d92ada4acdef70501e37b813d Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Fri, 20 Mar 2020 19:05:27 +0200 Subject: [PATCH 060/186] add gatekeeper --- bin/deploy-contracts.sh | 2 + contracts/scripts/add-erc20-token.ts | 10 ++-- contracts/scripts/contract-info.ts | 4 +- contracts/scripts/testnet-deploy.ts | 17 ++++-- contracts/src.ts/deploy.ts | 60 +++++++++++++------ .../test/unit_tests/upgradeGatekeeper_test.ts | 3 + etc/env/dev.env.example | 1 + 7 files changed, 67 insertions(+), 30 deletions(-) diff --git a/bin/deploy-contracts.sh b/bin/deploy-contracts.sh index 972661b2f4..7eff48e40d 100755 --- a/bin/deploy-contracts.sh +++ b/bin/deploy-contracts.sh @@ -17,6 +17,7 @@ cd contracts; yarn deploy-no-build | tee ../deploy.log; cd ..; +UPGRADE_GATEKEEPER_ADDR_NEW_VALUE=`grep "UPGRADE_GATEKEEPER_ADDR" deploy.log` GOVERNANCE_TARGET_ADDR_NEW_VALUE=`grep "GOVERNANCE_TARGET_ADDR" deploy.log` VERIFIER_TARGET_ADDR_NEW_VALUE=`grep "VERIFIER_TARGET_ADDR" deploy.log` CONTRACT_TARGET_ADDR_NEW_VALUE=`grep "CONTRACT_TARGET_ADDR" deploy.log` @@ -33,6 +34,7 @@ then cp ./$ENV_FILE logs/$LABEL/$ZKSYNC_ENV.bak cp deploy.log logs/$LABEL/ echo $CONTRACT_ADDR_NEW_VALUE + python3 bin/replace-env-variable.py ./$ENV_FILE $UPGRADE_GATEKEEPER_ADDR_NEW_VALUE python3 bin/replace-env-variable.py ./$ENV_FILE $GOVERNANCE_TARGET_ADDR_NEW_VALUE python3 bin/replace-env-variable.py ./$ENV_FILE $VERIFIER_TARGET_ADDR_NEW_VALUE python3 bin/replace-env-variable.py ./$ENV_FILE $CONTRACT_TARGET_ADDR_NEW_VALUE diff --git a/contracts/scripts/add-erc20-token.ts b/contracts/scripts/add-erc20-token.ts index 749ed45541..9058b48dff 100644 --- a/contracts/scripts/add-erc20-token.ts +++ b/contracts/scripts/add-erc20-token.ts @@ -1,6 +1,6 @@ -import { bigNumberify } from "ethers/utils"; -import { ethers } from "ethers"; -import { Deployer } from "../src.ts/deploy"; +import {bigNumberify} from "ethers/utils"; +import {ethers} from "ethers"; +import {Deployer} from "../src.ts/deploy"; const provider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL); const governorWallet = ethers.Wallet.fromMnemonic(process.env.MNEMONIC, "m/44'/60'/0'/0/1").connect(provider); @@ -10,9 +10,9 @@ async function main() { let tokenAddress = process.argv[process.argv.length - 1]; console.log("Adding new ERC20 token to network: ", tokenAddress); let tx = await deployer - .getDeployedContract('Governance') + .getDeployedProxyContract('Governance') .addToken(tokenAddress, {gasLimit: bigNumberify("1000000")}); - console.log("tx hash: ",tx.hash); + console.log("tx hash: ", tx.hash); let receipt = await tx.wait(); console.log("status: ", receipt.status); } diff --git a/contracts/scripts/contract-info.ts b/contracts/scripts/contract-info.ts index 599f9af584..94af84bf1b 100644 --- a/contracts/scripts/contract-info.ts +++ b/contracts/scripts/contract-info.ts @@ -1,4 +1,4 @@ -import { Deployer } from "../src.ts/deploy"; +import {Deployer} from "../src.ts/deploy"; import {Contract, ethers} from "ethers"; const provider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL); @@ -6,7 +6,7 @@ const wallet = ethers.Wallet.fromMnemonic(process.env.MNEMONIC, "m/44'/60'/0'/0/ async function main() { const deployer = new Deployer(wallet, false); - const franklinDeployedContract = deployer.getDeployedContract('Franklin'); + const franklinDeployedContract = deployer.getDeployedProxyContract('Franklin'); let value = await franklinDeployedContract.onchainOps(2); console.log(value); value = await franklinDeployedContract.balancesToWithdraw(wallet.address, 0); diff --git a/contracts/scripts/testnet-deploy.ts b/contracts/scripts/testnet-deploy.ts index 0ca35bdc70..d3f9a9dd48 100644 --- a/contracts/scripts/testnet-deploy.ts +++ b/contracts/scripts/testnet-deploy.ts @@ -1,6 +1,6 @@ import {ethers} from "ethers"; import {ArgumentParser} from "argparse"; -import { Deployer, addTestERC20Token, mintTestERC20Token } from "../src.ts/deploy"; +import {Deployer, addTestERC20Token, mintTestERC20Token} from "../src.ts/deploy"; async function main() { const parser = new ArgumentParser({ @@ -36,23 +36,28 @@ async function main() { await deployer.deployGovernance(); console.log(`GOVERNANCE_TARGET_ADDR=${await deployer.getDeployedContract('GovernanceTarget').address}`); console.log(`GOVERNANCE_GENESIS_TX_HASH=${await deployer.getDeployTransactionHash('Governance')}`); - console.log(`GOVERNANCE_ADDR=${await deployer.getDeployedContract('Governance').address}`); + console.log(`GOVERNANCE_ADDR=${await deployer.getDeployedProxyContract('Governance').address}`); console.log(`Governance contract deployed, time: ${(Date.now() - timer) / 1000} secs`); timer = Date.now(); await deployer.deployVerifier(); console.log(`VERIFIER_TARGET_ADDR=${await deployer.getDeployedContract('VerifierTarget').address}`); - console.log(`VERIFIER_ADDR=${await deployer.getDeployedContract('Verifier').address}`); + console.log(`VERIFIER_ADDR=${await deployer.getDeployedProxyContract('Verifier').address}`); console.log(`Verifier contract deployed, time: ${(Date.now() - timer) / 1000} secs`); timer = Date.now(); await deployer.deployFranklin(); console.log(`CONTRACT_TARGET_ADDR=${await deployer.getDeployedContract('FranklinTarget').address}`); console.log(`CONTRACT_GENESIS_TX_HASH=${await deployer.getDeployTransactionHash('Franklin')}`); - console.log(`CONTRACT_ADDR=${await deployer.getDeployedContract('Franklin').address}`); + console.log(`CONTRACT_ADDR=${await deployer.getDeployedProxyContract('Franklin').address}`); console.log(`Main contract deployed, time: ${(Date.now() - timer) / 1000} secs`); - const governance = await deployer.getDeployedContract('Governance'); + timer = Date.now(); + await deployer.deployUpgradeGatekeeper(); + console.log(`UPGRADE_GATEKEEPER_ADDR=${await deployer.getDeployedContract('UpgradeGatekeeper').address}`); + console.log(`Upgrade gatekeeper deployed, time: ${(Date.now() - timer) / 1000} secs`); + + const governance = await deployer.getDeployedProxyContract('Governance'); await governance.setValidator(process.env.OPERATOR_ETH_ADDRESS, true); const erc20 = await addTestERC20Token(wallet, governance); @@ -70,6 +75,7 @@ async function main() { deployer.postContractToTesseracts("Governance"), deployer.postContractToTesseracts("Verifier"), deployer.postContractToTesseracts("Franklin"), + deployer.postContractToTesseracts("UpgradeGatekeeper"), ]); } else { // sequentially, since etherscan has request limit @@ -79,6 +85,7 @@ async function main() { await deployer.publishSourceCodeToEtherscan("Governance"); await deployer.publishSourceCodeToEtherscan("Verifier"); await deployer.publishSourceCodeToEtherscan("Franklin"); + await deployer.publishSourceCodeToEtherscan("UpgradeGatekeeper"); } } catch (e) { console.error("Failed to post contract code: ", e.toString()); diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 477795f719..7d9ddcc3d8 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -16,11 +16,13 @@ export const ERC20MintableContract = function () { return contract }(); -export const proxyContractCode = require(`../build/Proxy`); +export const upgradeGatekeeperContractCode = require(`../build/UpgradeGatekeeper`); export const franklinContractCode = require(`../build/Franklin`); export const verifierContractCode = require(`../build/Verifier`); export const governanceContractCode = require(`../build/Governance`); +export const proxyContractCode = require(`../build/Proxy`); +export const upgradeGatekeeperTestContractCode = require(`../build/UpgradeGatekeeperTest`); export const franklinTestContractCode = require('../build/FranklinTest'); export const verifierTestContractCode = require('../build/VerifierTest'); export const governanceTestContractCode = require('../build/GovernanceTest'); @@ -65,12 +67,13 @@ export class Deployer { constructor(public wallet: ethers.Wallet, isTest: boolean) { this.bytecodes = { - GovernanceTarget: isTest ? governanceTestContractCode : governanceContractCode, - VerifierTarget: isTest ? verifierTestContractCode : verifierContractCode, - FranklinTarget: isTest ? franklinTestContractCode : franklinContractCode, + GovernanceTarget: isTest ? governanceTestContractCode : governanceContractCode, + VerifierTarget: isTest ? verifierTestContractCode : verifierContractCode, + FranklinTarget: isTest ? franklinTestContractCode : franklinContractCode, Governance: proxyContractCode, Verifier: proxyContractCode, Franklin: proxyContractCode, + UpgradeGatekeeper: isTest ? upgradeGatekeeperTestContractCode : upgradeGatekeeperContractCode, }; this.addresses = { @@ -80,6 +83,7 @@ export class Deployer { Governance: process.env.GOVERNANCE_ADDR, Verifier: process.env.VERIFIER_ADDR, Franklin: process.env.CONTRACT_ADDR, + UpgradeGatekeeper: process.env.UPGRADE_GATEKEEPER_ADDR, }; this.deployTransactionHash = { @@ -92,21 +96,20 @@ export class Deployer { return this.deployTransactionHash[name]; } + getDeployedProxyContract(name) { + return new ethers.Contract( + this.addresses[name], + this.bytecodes[name+"Target"].interface, + this.wallet + ); + } + getDeployedContract(name) { - if (["Governance", "Verifier", "Franklin"].includes(name)) { - return new ethers.Contract( - this.addresses[name], - this.bytecodes[name+"Target"].interface, - this.wallet - ); - } - else{ - return new ethers.Contract( - this.addresses[name], - this.bytecodes[name].interface, - this.wallet - ); - } + return new ethers.Contract( + this.addresses[name], + this.bytecodes[name].interface, + this.wallet + ); } initializationArgs(contractName) { @@ -133,6 +136,7 @@ export class Deployer { 'Governance': [this.addresses.GovernanceTarget, this.encodedInitializationArgs('Governance')], 'Verifier': [this.addresses.VerifierTarget, this.encodedInitializationArgs('Verifier')], 'Franklin': [this.addresses.FranklinTarget, this.encodedInitializationArgs('Franklin')], + 'UpgradeGatekeeper': [this.addresses.Franklin], }[contractName]; } encodedConstructorArgs(contractName) { @@ -210,6 +214,26 @@ export class Deployer { return new ethers.Contract(proxy.address, this.bytecodes.FranklinTarget.interface, this.wallet); } + async deployUpgradeGatekeeper() { + const contract = await deployContract( + this.wallet, + this.bytecodes.UpgradeGatekeeper, + this.constructorArgs('UpgradeGatekeeper'), + { gasLimit: 3000000 }, + ); + this.addresses.UpgradeGatekeeper = contract.address; + + let transferMastershipTransaction; + transferMastershipTransaction = await this.getDeployedContract('Governance').transferMastership(contract.address); + await transferMastershipTransaction.wait(); + transferMastershipTransaction = await this.getDeployedContract('Verifier').transferMastership(contract.address); + await transferMastershipTransaction.wait(); + transferMastershipTransaction = await this.getDeployedContract('Franklin').transferMastership(contract.address); + await transferMastershipTransaction.wait(); + + return contract; + } + async postContractToTesseracts(contractName) { const address = this.addresses[contractName]; const contractCode = this.bytecodes[contractName]; diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index a7d8ec47fd..b607f3c602 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -103,6 +103,9 @@ describe("UpgradeGatekeeper unit tests", function () { .to.emit(UpgradeGatekeeperContract, 'UpgradeCompleted') .withArgs(proxyTestContract.address, 0, DummySecond.address) + await expect(await proxyTestContract.getTarget()) + .to.equal(DummySecond.address); + // check dummy index and updated storage expect(await proxyDummyInterface.get_DUMMY_INDEX()) .to.equal(2); diff --git a/etc/env/dev.env.example b/etc/env/dev.env.example index 1a923b49a0..606b1b9999 100755 --- a/etc/env/dev.env.example +++ b/etc/env/dev.env.example @@ -19,6 +19,7 @@ ETHERSCAN_API_KEY= CLIENT_GITHUB_DIR=~/src/gluk64.github.io/ +UPGRADE_GATEKEEPER_ADDR=0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9 GOVERNANCE_TARGET_ADDR=0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9 VERIFIER_TARGET_ADDR=0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9 CONTRACT_TARGET_ADDR=0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9 From 1b26a78a3d302d1ee374602c4ccbee71d43bcf25 Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Fri, 20 Mar 2020 19:06:16 +0200 Subject: [PATCH 061/186] migration integration test --- Cargo.lock | 9 +- Makefile | 4 + bin/prepare-test-contracts.sh | 7 + bin/test-upgrade-franklin.sh | 11 ++ contracts/scripts/exampleDeposits.ts | 2 +- contracts/scripts/test-upgrade-franklin.ts | 73 ++++++++ core/testkit/Cargo.toml | 1 + core/testkit/src/bin/migration_test.rs | 100 +++++++++++ core/testkit/src/external_commands.rs | 25 +++ core/testkit/src/lib.rs | 186 +++++++++++---------- 10 files changed, 324 insertions(+), 94 deletions(-) create mode 100644 bin/test-upgrade-franklin.sh create mode 100644 contracts/scripts/test-upgrade-franklin.ts create mode 100644 core/testkit/src/bin/migration_test.rs diff --git a/Cargo.lock b/Cargo.lock index 9416b26177..0cefc85869 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1082,7 +1082,7 @@ source = "git+https://github.com/matter-labs/ff.git?branch=playground#3f6722c070 dependencies = [ "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "ff_derive_ce 0.6.0 (git+https://github.com/matter-labs/ff.git?branch=playground)", - "hex 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1367,7 +1367,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "hex" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2989,7 +2989,7 @@ dependencies = [ "env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "hex 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "models 0.0.1", @@ -3113,6 +3113,7 @@ dependencies = [ "ethabi 8.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "models 0.0.1", "prover 0.0.1", @@ -3941,7 +3942,7 @@ dependencies = [ "checksum heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1679e6ea370dee694f91f1dc469bf94cf8f52051d147aec3e1f9497c6fc22461" "checksum hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eff2656d88f158ce120947499e971d743c05dbcbed62e5bd2f38f1698bbc3772" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" -"checksum hex 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "76cdda6bf525062a0c9e8f14ee2b37935c86b8efb6c8b69b3c83dfb518a914af" +"checksum hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "644f9158b2f133fd50f5fb3242878846d9eb792e445c893805ff0e3824006e35" "checksum hmac 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5dcb5e64cda4c23119ab41ba960d1e170a774c8e4b9d9e6a9bc18aabf5e59695" "checksum hostname 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "21ceb46a83a85e824ef93669c8b390009623863b5c195d1ba747292c0c72f94e" "checksum http 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d6ccf5ede3a895d8856620237b2f02972c1bbc78d2965ad7fe8838d4a0ed41f0" diff --git a/Makefile b/Makefile index bb918f8c60..4ebc968ded 100644 --- a/Makefile +++ b/Makefile @@ -162,6 +162,10 @@ loadtest: confirm_action integration-testkit: build-contracts cargo run --bin testkit --release cargo run --bin exodus_test --release + cargo run --bin migration_test --release + +migration-test: build-contracts + cargo run --bin migration_test --release itest: # contracts simple integration tests @bin/prepare-test-contracts.sh diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index 704771ff0e..42ea69de16 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -19,6 +19,7 @@ cp $IN_DIR/Bytes.sol $OUT_DIR/Bytes.sol cp $IN_DIR/Events.sol $OUT_DIR/Events.sol cp $IN_DIR/Operations.sol $OUT_DIR/Operations.sol cp $IN_DIR/VerificationKey.sol $OUT_DIR/VerificationKey.sol +cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTestNoInit.sol # Change dependencies ssed 's/import "\.\./import "\.\.\/\.\./' -i $OUT_DIR/*.sol @@ -30,6 +31,9 @@ ssed 's/Storage/StorageTest/' -i $OUT_DIR/*.sol ssed 's/Config/ConfigTest/' -i $OUT_DIR/*.sol ssed 's/UpgradeGatekeeper/UpgradeGatekeeperTest/' -i $OUT_DIR/*.sol +# Renaming contract in FranklinTestNoInit.sol +ssed 's/contract FranklinTest/contract FranklinTestNoInit/' -i $OUT_DIR/FranklinTestNoInit.sol + # Changes solidity constant to provided value # In solidity constant should be in the following form. @@ -53,3 +57,6 @@ create_constant_getter NOTICE_PERIOD $OUT_DIR/UpgradeGatekeeperTest.sol # Verify always true set_constant DUMMY_VERIFIER true $OUT_DIR/VerifierTest.sol + +# Make initialize function in FranklinTestNoInit to do nothing +ssed -E "s/ function initialize(.*)/ function initialize\1\n return;/" -i $OUT_DIR/FranklinTestNoInit.sol diff --git a/bin/test-upgrade-franklin.sh b/bin/test-upgrade-franklin.sh new file mode 100644 index 0000000000..c2ec3e8069 --- /dev/null +++ b/bin/test-upgrade-franklin.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +if [ ! -z $ZKSYNC_HOME ] +then + cd $ZKSYNC_HOME +fi + +. .setup_env + +cd contracts; +yarn ts-node scripts/test-upgrade-franklin.ts $1 $2 diff --git a/contracts/scripts/exampleDeposits.ts b/contracts/scripts/exampleDeposits.ts index 225a259617..24e9918780 100644 --- a/contracts/scripts/exampleDeposits.ts +++ b/contracts/scripts/exampleDeposits.ts @@ -9,7 +9,7 @@ const franklinAddressBinary = Buffer.from(franklinAddress, "hex"); async function main() { const deployer = new Deployer(wallet, false); - const franklinDeployedContract = deployer.getDeployedContract('Franklin'); + const franklinDeployedContract = deployer.getDeployedProxyContract('Franklin'); const depositValue = parseEther("0.3"); const tx = await franklinDeployedContract.depositETH(franklinAddressBinary, {value: depositValue}); const receipt = await tx.wait(); diff --git a/contracts/scripts/test-upgrade-franklin.ts b/contracts/scripts/test-upgrade-franklin.ts new file mode 100644 index 0000000000..235296cf5c --- /dev/null +++ b/contracts/scripts/test-upgrade-franklin.ts @@ -0,0 +1,73 @@ +import {ethers} from "ethers"; +import {ArgumentParser} from "argparse"; +import {proxyContractCode, upgradeGatekeeperTestContractCode} from "../src.ts/deploy"; +import {deployContract} from "ethereum-waffle"; + +const {performance} = require('perf_hooks'); +const {expect} = require("chai") + +export const FranklinTestNoInitContractCode = require(`../build/FranklinTestNoInit`); + +async function main() { + const parser = new ArgumentParser({ + version: '0.0.1', + addHelp: true, + description: 'Contract upgrade', + }); + parser.addArgument('contractAddress'); + parser.addArgument('upgradeGatekeeperAddress'); + const args = parser.parseArgs(process.argv.slice(2)); + if (process.env.ETH_NETWORK !== 'localhost') { + console.log("Upgrading test contract not on localhost is not allowed"); + return; + } + + const provider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL); + if (process.env.ETH_NETWORK == "localhost") { + // small polling interval for localhost network + provider.pollingInterval = 200; + } + + const wallet = ethers.Wallet.fromMnemonic(process.env.MNEMONIC, "m/44'/60'/0'/0/1").connect(provider); + + const proxyContract = new ethers.Contract( + args.contractAddress, + proxyContractCode.interface, + wallet + ); + + const upgradeGatekeeper = new ethers.Contract( + args.upgradeGatekeeperAddress, + upgradeGatekeeperTestContractCode.interface, + wallet, + ); + + const newTarget = await deployContract( + wallet, + FranklinTestNoInitContractCode, + [], + {gasLimit: 6500000}, + ); + + let notice_period = parseInt(await upgradeGatekeeper.get_NOTICE_PERIOD()); + + const upgradeProxyTransaction = await upgradeGatekeeper.upgradeProxy(proxyContract.address, newTarget.address); + await upgradeProxyTransaction.wait(); + let upgrade_start_time = performance.now(); + + // wait notice period + while ((performance.now() - upgrade_start_time) < notice_period * 1000 + 10) { + + } + + // finish upgrade + const finalizeStatusActivationTransaction = await upgradeGatekeeper.activeFinalizeStatusOfUpgrade(proxyContract.address); + await finalizeStatusActivationTransaction.wait(); + const finishProxyUpgradeTransaction = await upgradeGatekeeper.finishProxyUpgrade(proxyContract.address, []); + await finishProxyUpgradeTransaction.wait(); + + await expect(await proxyContract.getTarget()) + .to.equal(newTarget.address); +} + +main(); diff --git a/core/testkit/Cargo.toml b/core/testkit/Cargo.toml index b032744ed0..6a70f857f7 100644 --- a/core/testkit/Cargo.toml +++ b/core/testkit/Cargo.toml @@ -13,6 +13,7 @@ crypto_exports = { path = "../crypto_exports", version = "0.1.0" } eth_client = { path = "../eth_client", version = "0.1.0" } +hex = "0.4.2" tokio = { version = "0.2", features = ["full"] } futures = { version = "0.3", features = ["compat"] } log = "0.4" diff --git a/core/testkit/src/bin/migration_test.rs b/core/testkit/src/bin/migration_test.rs new file mode 100644 index 0000000000..1c61c71dfa --- /dev/null +++ b/core/testkit/src/bin/migration_test.rs @@ -0,0 +1,100 @@ +//! Exodus mode test steps: +//! + Create verified state with balances on the accounts. +//! + Commit some deposits and wait for priority expiration. +//! + Check exodus mode triggered. +//! + Check canceling of the outstanding deposits. +//! + Check exit with correct proof. +//! + Check double exit with the correct proof. +//! + Check exit with garbage proof. +//! + Check exit with correct proof for other account, correct proof for this account but other token, correct proof but wrong amount. + +use crate::eth_account::{parse_ether, EthereumAccount}; +use crate::external_commands::{deploy_test_contracts, get_test_accounts, run_upgrade_franklin}; +use crate::zksync_account::ZksyncAccount; +use models::config_options::ConfigurationOptions; +use std::time::Instant; +use testkit::*; +use web3::transports::Http; + +fn migration_test() { + let config = ConfigurationOptions::from_env(); + + let fee_account = ZksyncAccount::rand(); + let (sk_thread_handle, stop_state_keeper_sender, sk_channels) = + spawn_state_keeper(&fee_account.address); + + let deploy_timer = Instant::now(); + println!("deploying contracts"); + let contracts = deploy_test_contracts(); + println!( + "contracts deployed {:#?}, {} secs", + contracts, + deploy_timer.elapsed().as_secs() + ); + + let (_el, transport) = Http::new(&config.web3_url).expect("http transport start"); + let commit_account = EthereumAccount::new( + config.operator_private_key, + config.operator_eth_addr, + transport.clone(), + contracts.contract, + &config, + ); + + let eth_accounts = get_test_accounts() + .into_iter() + .map(|test_eth_account| { + EthereumAccount::new( + test_eth_account.private_key, + test_eth_account.address, + transport.clone(), + contracts.contract, + &config, + ) + }) + .collect::>(); + + let zksync_accounts = { + let mut zksync_accounts = Vec::new(); + zksync_accounts.push(fee_account); + zksync_accounts.extend(eth_accounts.iter().map(|eth_account| { + let rng_zksync_key = ZksyncAccount::rand().private_key; + ZksyncAccount::new( + rng_zksync_key, + 0, + eth_account.address, + eth_account.private_key, + ) + })); + zksync_accounts + }; + + let accounts = AccountSet { + eth_accounts, + zksync_accounts, + fee_account_id: ZKSyncAccountId(0), + }; + + let mut test_setup = TestSetup::new(sk_channels, accounts, &contracts, commit_account); + + let deposit_amount = parse_ether("1.0").unwrap(); + + for token in 0..=1 { + perform_basic_operations(token, &mut test_setup, deposit_amount.clone()); + } + + let start_upgrade = Instant::now(); + run_upgrade_franklin(contracts.contract, contracts.upgrade_gatekeeper); + println!("Upgrade done in {:?}", start_upgrade.elapsed()); + + for token in 0..=1 { + perform_basic_operations(token, &mut test_setup, deposit_amount.clone()); + } + + stop_state_keeper_sender.send(()).expect("sk stop send"); + sk_thread_handle.join().expect("sk thread join"); +} + +fn main() { + migration_test(); +} diff --git a/core/testkit/src/external_commands.rs b/core/testkit/src/external_commands.rs index 2748f8975d..d5d12e8385 100644 --- a/core/testkit/src/external_commands.rs +++ b/core/testkit/src/external_commands.rs @@ -1,6 +1,7 @@ //! Run external commands from `$ZKSYNC_HOME/bin` //!`$ZKSYNC_HOME/bin` should be in path. //! +extern crate hex; use std::collections::HashMap; use std::process::Command; use std::str::FromStr; @@ -13,6 +14,7 @@ pub struct Contracts { pub governance: Address, pub verifier: Address, pub contract: Address, + pub upgrade_gatekeeper: Address, pub test_erc20_address: Address, } @@ -35,6 +37,12 @@ fn get_contract_address(deploy_script_out: &str) -> Option<(String, Address)> { Address::from_str(&deploy_script_out["CONTRACT_ADDR=0x".len()..]) .expect("can't parse contract address"), )) + } else if deploy_script_out.starts_with("UPGRADE_GATEKEEPER_ADDR=0x") { + Some(( + String::from("UPGRADE_GATEKEEPER_ADDR"), + Address::from_str(&deploy_script_out["UPGRADE_GATEKEEPER_ADDR=0x".len()..]) + .expect("can't parse contract address"), + )) } else if deploy_script_out.starts_with("TEST_ERC20=0x") { Some(( String::from("TEST_ERC20"), @@ -75,9 +83,26 @@ pub fn deploy_test_contracts() -> Contracts { contract: contracts .remove("CONTRACT_ADDR") .expect("CONTRACT_ADDR missing"), + upgrade_gatekeeper: contracts + .remove("UPGRADE_GATEKEEPER_ADDR") + .expect("UPGRADE_GATEKEEPER_ADDR missing"), test_erc20_address: contracts.remove("TEST_ERC20").expect("TEST_ERC20 missing"), } } +pub fn run_upgrade_franklin(franklin_address: Address, upgrade_gatekeeper_address: Address) { + let result = Command::new("sh") + .arg("test-upgrade-franklin.sh") + .arg(String::from("0x") + &hex::encode(franklin_address.as_bytes())) + .arg(String::from("0x") + &hex::encode(upgrade_gatekeeper_address.as_bytes())) + .output() + .expect("failed to execute test upgrade franklin script"); + if !result.status.success() { + panic!("test upgrade franklin script failed") + } + if !result.stderr.is_empty() { + panic!("test upgrade franklin script failed with error") + } +} #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] diff --git a/core/testkit/src/lib.rs b/core/testkit/src/lib.rs index 3b8906e68d..fcd27718f6 100644 --- a/core/testkit/src/lib.rs +++ b/core/testkit/src/lib.rs @@ -239,6 +239,100 @@ pub fn spawn_state_keeper( ) } +pub fn perform_basic_operations( + token: u16, + test_setup: &mut TestSetup, + deposit_amount: BigDecimal, +) { + // test deposit to other account + test_setup.start_block(); + test_setup.deposit( + ETHAccountId(0), + ZKSyncAccountId(2), + Token(token), + deposit_amount.clone(), + ); + test_setup + .execute_commit_and_verify_block() + .expect("Block execution failed"); + println!("Deposit to other account test success, token_id: {}", token); + + // test two deposits + test_setup.start_block(); + test_setup.deposit( + ETHAccountId(0), + ZKSyncAccountId(1), + Token(token), + deposit_amount.clone(), + ); + test_setup.deposit( + ETHAccountId(0), + ZKSyncAccountId(1), + Token(token), + deposit_amount.clone(), + ); + test_setup + .execute_commit_and_verify_block() + .expect("Block execution failed"); + println!("Deposit test success, token_id: {}", token); + + // test transfers + test_setup.start_block(); + + test_setup.change_pubkey_with_onchain_auth(ETHAccountId(0), ZKSyncAccountId(1)); + + //should be executed as a transfer + test_setup.transfer( + ZKSyncAccountId(1), + ZKSyncAccountId(2), + Token(token), + &deposit_amount / &BigDecimal::from(4), + &deposit_amount / &BigDecimal::from(4), + ); + + let nonce = test_setup.accounts.zksync_accounts[1].nonce(); + let incorrect_nonce_transfer = test_setup.accounts.transfer( + ZKSyncAccountId(1), + ZKSyncAccountId(0), + Token(token), + deposit_amount.clone(), + BigDecimal::from(0), + Some(nonce + 1), + false, + ); + test_setup.execute_incorrect_tx(incorrect_nonce_transfer); + + //should be executed as a transfer to new + test_setup.transfer( + ZKSyncAccountId(1), + ZKSyncAccountId(2), + Token(token), + &deposit_amount / &BigDecimal::from(4), + &deposit_amount / &BigDecimal::from(4), + ); + + test_setup.change_pubkey_with_tx(ZKSyncAccountId(2)); + + test_setup.withdraw( + ZKSyncAccountId(2), + ETHAccountId(0), + Token(token), + &deposit_amount / &BigDecimal::from(4), + &deposit_amount / &BigDecimal::from(4), + ); + test_setup + .execute_commit_and_verify_block() + .expect("Block execution failed"); + println!("Transfer test success, token_id: {}", token); + + test_setup.start_block(); + test_setup.full_exit(ETHAccountId(0), ZKSyncAccountId(1), Token(token)); + test_setup + .execute_commit_and_verify_block() + .expect("Block execution failed"); + println!("Full exit test success, token_id: {}", token); +} + pub fn perform_basic_tests() { let config = ConfigurationOptions::from_env(); @@ -303,93 +397,7 @@ pub fn perform_basic_tests() { let deposit_amount = parse_ether("1.0").unwrap(); for token in 0..=1 { - // test deposit to other account - test_setup.start_block(); - test_setup.deposit( - ETHAccountId(0), - ZKSyncAccountId(2), - Token(token), - deposit_amount.clone(), - ); - test_setup - .execute_commit_and_verify_block() - .expect("Block execution failed"); - println!("Deposit to other account test success, token_id: {}", token); - - // test two deposits - test_setup.start_block(); - test_setup.deposit( - ETHAccountId(0), - ZKSyncAccountId(1), - Token(token), - deposit_amount.clone(), - ); - test_setup.deposit( - ETHAccountId(0), - ZKSyncAccountId(1), - Token(token), - deposit_amount.clone(), - ); - test_setup - .execute_commit_and_verify_block() - .expect("Block execution failed"); - println!("Deposit test success, token_id: {}", token); - - // test transfers - test_setup.start_block(); - - test_setup.change_pubkey_with_onchain_auth(ETHAccountId(0), ZKSyncAccountId(1)); - - //should be executed as a transfer - test_setup.transfer( - ZKSyncAccountId(1), - ZKSyncAccountId(2), - Token(token), - &deposit_amount / &BigDecimal::from(4), - &deposit_amount / &BigDecimal::from(4), - ); - - let nonce = test_setup.accounts.zksync_accounts[1].nonce(); - let incorrect_nonce_transfer = test_setup.accounts.transfer( - ZKSyncAccountId(1), - ZKSyncAccountId(0), - Token(token), - deposit_amount.clone(), - BigDecimal::from(0), - Some(nonce + 1), - false, - ); - test_setup.execute_incorrect_tx(incorrect_nonce_transfer); - - //should be executed as a transfer to new - test_setup.transfer( - ZKSyncAccountId(1), - ZKSyncAccountId(2), - Token(token), - &deposit_amount / &BigDecimal::from(4), - &deposit_amount / &BigDecimal::from(4), - ); - - test_setup.change_pubkey_with_tx(ZKSyncAccountId(2)); - - test_setup.withdraw( - ZKSyncAccountId(2), - ETHAccountId(0), - Token(token), - &deposit_amount / &BigDecimal::from(4), - &deposit_amount / &BigDecimal::from(4), - ); - test_setup - .execute_commit_and_verify_block() - .expect("Block execution failed"); - println!("Transfer test success, token_id: {}", token); - - test_setup.start_block(); - test_setup.full_exit(ETHAccountId(0), ZKSyncAccountId(1), Token(token)); - test_setup - .execute_commit_and_verify_block() - .expect("Block execution failed"); - println!("Full exit test success, token_id: {}", token); + perform_basic_operations(token, &mut test_setup, deposit_amount.clone()); } stop_state_keeper_sender.send(()).expect("sk stop send"); @@ -600,7 +608,7 @@ impl TestSetup { self.execute_priority_op(full_exit); } - fn change_pubkey_with_tx(&mut self, zksync_signer: ZKSyncAccountId) { + pub fn change_pubkey_with_tx(&mut self, zksync_signer: ZKSyncAccountId) { let tx = self .accounts .change_pubkey_with_tx(zksync_signer, None, true); @@ -608,7 +616,7 @@ impl TestSetup { self.execute_tx(tx); } - fn change_pubkey_with_onchain_auth( + pub fn change_pubkey_with_onchain_auth( &mut self, eth_account: ETHAccountId, zksync_signer: ZKSyncAccountId, From 286beaad13908034e869b2bdd066bae86021ef31 Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Fri, 20 Mar 2020 19:05:27 +0200 Subject: [PATCH 062/186] add gatekeeper --- bin/deploy-contracts.sh | 2 + contracts/scripts/add-erc20-token.ts | 10 ++-- contracts/scripts/contract-info.ts | 4 +- contracts/scripts/testnet-deploy.ts | 17 ++++-- contracts/src.ts/deploy.ts | 60 +++++++++++++------ .../test/unit_tests/upgradeGatekeeper_test.ts | 3 + etc/env/dev.env.example | 1 + 7 files changed, 67 insertions(+), 30 deletions(-) diff --git a/bin/deploy-contracts.sh b/bin/deploy-contracts.sh index 972661b2f4..7eff48e40d 100755 --- a/bin/deploy-contracts.sh +++ b/bin/deploy-contracts.sh @@ -17,6 +17,7 @@ cd contracts; yarn deploy-no-build | tee ../deploy.log; cd ..; +UPGRADE_GATEKEEPER_ADDR_NEW_VALUE=`grep "UPGRADE_GATEKEEPER_ADDR" deploy.log` GOVERNANCE_TARGET_ADDR_NEW_VALUE=`grep "GOVERNANCE_TARGET_ADDR" deploy.log` VERIFIER_TARGET_ADDR_NEW_VALUE=`grep "VERIFIER_TARGET_ADDR" deploy.log` CONTRACT_TARGET_ADDR_NEW_VALUE=`grep "CONTRACT_TARGET_ADDR" deploy.log` @@ -33,6 +34,7 @@ then cp ./$ENV_FILE logs/$LABEL/$ZKSYNC_ENV.bak cp deploy.log logs/$LABEL/ echo $CONTRACT_ADDR_NEW_VALUE + python3 bin/replace-env-variable.py ./$ENV_FILE $UPGRADE_GATEKEEPER_ADDR_NEW_VALUE python3 bin/replace-env-variable.py ./$ENV_FILE $GOVERNANCE_TARGET_ADDR_NEW_VALUE python3 bin/replace-env-variable.py ./$ENV_FILE $VERIFIER_TARGET_ADDR_NEW_VALUE python3 bin/replace-env-variable.py ./$ENV_FILE $CONTRACT_TARGET_ADDR_NEW_VALUE diff --git a/contracts/scripts/add-erc20-token.ts b/contracts/scripts/add-erc20-token.ts index 749ed45541..9058b48dff 100644 --- a/contracts/scripts/add-erc20-token.ts +++ b/contracts/scripts/add-erc20-token.ts @@ -1,6 +1,6 @@ -import { bigNumberify } from "ethers/utils"; -import { ethers } from "ethers"; -import { Deployer } from "../src.ts/deploy"; +import {bigNumberify} from "ethers/utils"; +import {ethers} from "ethers"; +import {Deployer} from "../src.ts/deploy"; const provider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL); const governorWallet = ethers.Wallet.fromMnemonic(process.env.MNEMONIC, "m/44'/60'/0'/0/1").connect(provider); @@ -10,9 +10,9 @@ async function main() { let tokenAddress = process.argv[process.argv.length - 1]; console.log("Adding new ERC20 token to network: ", tokenAddress); let tx = await deployer - .getDeployedContract('Governance') + .getDeployedProxyContract('Governance') .addToken(tokenAddress, {gasLimit: bigNumberify("1000000")}); - console.log("tx hash: ",tx.hash); + console.log("tx hash: ", tx.hash); let receipt = await tx.wait(); console.log("status: ", receipt.status); } diff --git a/contracts/scripts/contract-info.ts b/contracts/scripts/contract-info.ts index 599f9af584..94af84bf1b 100644 --- a/contracts/scripts/contract-info.ts +++ b/contracts/scripts/contract-info.ts @@ -1,4 +1,4 @@ -import { Deployer } from "../src.ts/deploy"; +import {Deployer} from "../src.ts/deploy"; import {Contract, ethers} from "ethers"; const provider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL); @@ -6,7 +6,7 @@ const wallet = ethers.Wallet.fromMnemonic(process.env.MNEMONIC, "m/44'/60'/0'/0/ async function main() { const deployer = new Deployer(wallet, false); - const franklinDeployedContract = deployer.getDeployedContract('Franklin'); + const franklinDeployedContract = deployer.getDeployedProxyContract('Franklin'); let value = await franklinDeployedContract.onchainOps(2); console.log(value); value = await franklinDeployedContract.balancesToWithdraw(wallet.address, 0); diff --git a/contracts/scripts/testnet-deploy.ts b/contracts/scripts/testnet-deploy.ts index 0ca35bdc70..d3f9a9dd48 100644 --- a/contracts/scripts/testnet-deploy.ts +++ b/contracts/scripts/testnet-deploy.ts @@ -1,6 +1,6 @@ import {ethers} from "ethers"; import {ArgumentParser} from "argparse"; -import { Deployer, addTestERC20Token, mintTestERC20Token } from "../src.ts/deploy"; +import {Deployer, addTestERC20Token, mintTestERC20Token} from "../src.ts/deploy"; async function main() { const parser = new ArgumentParser({ @@ -36,23 +36,28 @@ async function main() { await deployer.deployGovernance(); console.log(`GOVERNANCE_TARGET_ADDR=${await deployer.getDeployedContract('GovernanceTarget').address}`); console.log(`GOVERNANCE_GENESIS_TX_HASH=${await deployer.getDeployTransactionHash('Governance')}`); - console.log(`GOVERNANCE_ADDR=${await deployer.getDeployedContract('Governance').address}`); + console.log(`GOVERNANCE_ADDR=${await deployer.getDeployedProxyContract('Governance').address}`); console.log(`Governance contract deployed, time: ${(Date.now() - timer) / 1000} secs`); timer = Date.now(); await deployer.deployVerifier(); console.log(`VERIFIER_TARGET_ADDR=${await deployer.getDeployedContract('VerifierTarget').address}`); - console.log(`VERIFIER_ADDR=${await deployer.getDeployedContract('Verifier').address}`); + console.log(`VERIFIER_ADDR=${await deployer.getDeployedProxyContract('Verifier').address}`); console.log(`Verifier contract deployed, time: ${(Date.now() - timer) / 1000} secs`); timer = Date.now(); await deployer.deployFranklin(); console.log(`CONTRACT_TARGET_ADDR=${await deployer.getDeployedContract('FranklinTarget').address}`); console.log(`CONTRACT_GENESIS_TX_HASH=${await deployer.getDeployTransactionHash('Franklin')}`); - console.log(`CONTRACT_ADDR=${await deployer.getDeployedContract('Franklin').address}`); + console.log(`CONTRACT_ADDR=${await deployer.getDeployedProxyContract('Franklin').address}`); console.log(`Main contract deployed, time: ${(Date.now() - timer) / 1000} secs`); - const governance = await deployer.getDeployedContract('Governance'); + timer = Date.now(); + await deployer.deployUpgradeGatekeeper(); + console.log(`UPGRADE_GATEKEEPER_ADDR=${await deployer.getDeployedContract('UpgradeGatekeeper').address}`); + console.log(`Upgrade gatekeeper deployed, time: ${(Date.now() - timer) / 1000} secs`); + + const governance = await deployer.getDeployedProxyContract('Governance'); await governance.setValidator(process.env.OPERATOR_ETH_ADDRESS, true); const erc20 = await addTestERC20Token(wallet, governance); @@ -70,6 +75,7 @@ async function main() { deployer.postContractToTesseracts("Governance"), deployer.postContractToTesseracts("Verifier"), deployer.postContractToTesseracts("Franklin"), + deployer.postContractToTesseracts("UpgradeGatekeeper"), ]); } else { // sequentially, since etherscan has request limit @@ -79,6 +85,7 @@ async function main() { await deployer.publishSourceCodeToEtherscan("Governance"); await deployer.publishSourceCodeToEtherscan("Verifier"); await deployer.publishSourceCodeToEtherscan("Franklin"); + await deployer.publishSourceCodeToEtherscan("UpgradeGatekeeper"); } } catch (e) { console.error("Failed to post contract code: ", e.toString()); diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 477795f719..7d9ddcc3d8 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -16,11 +16,13 @@ export const ERC20MintableContract = function () { return contract }(); -export const proxyContractCode = require(`../build/Proxy`); +export const upgradeGatekeeperContractCode = require(`../build/UpgradeGatekeeper`); export const franklinContractCode = require(`../build/Franklin`); export const verifierContractCode = require(`../build/Verifier`); export const governanceContractCode = require(`../build/Governance`); +export const proxyContractCode = require(`../build/Proxy`); +export const upgradeGatekeeperTestContractCode = require(`../build/UpgradeGatekeeperTest`); export const franklinTestContractCode = require('../build/FranklinTest'); export const verifierTestContractCode = require('../build/VerifierTest'); export const governanceTestContractCode = require('../build/GovernanceTest'); @@ -65,12 +67,13 @@ export class Deployer { constructor(public wallet: ethers.Wallet, isTest: boolean) { this.bytecodes = { - GovernanceTarget: isTest ? governanceTestContractCode : governanceContractCode, - VerifierTarget: isTest ? verifierTestContractCode : verifierContractCode, - FranklinTarget: isTest ? franklinTestContractCode : franklinContractCode, + GovernanceTarget: isTest ? governanceTestContractCode : governanceContractCode, + VerifierTarget: isTest ? verifierTestContractCode : verifierContractCode, + FranklinTarget: isTest ? franklinTestContractCode : franklinContractCode, Governance: proxyContractCode, Verifier: proxyContractCode, Franklin: proxyContractCode, + UpgradeGatekeeper: isTest ? upgradeGatekeeperTestContractCode : upgradeGatekeeperContractCode, }; this.addresses = { @@ -80,6 +83,7 @@ export class Deployer { Governance: process.env.GOVERNANCE_ADDR, Verifier: process.env.VERIFIER_ADDR, Franklin: process.env.CONTRACT_ADDR, + UpgradeGatekeeper: process.env.UPGRADE_GATEKEEPER_ADDR, }; this.deployTransactionHash = { @@ -92,21 +96,20 @@ export class Deployer { return this.deployTransactionHash[name]; } + getDeployedProxyContract(name) { + return new ethers.Contract( + this.addresses[name], + this.bytecodes[name+"Target"].interface, + this.wallet + ); + } + getDeployedContract(name) { - if (["Governance", "Verifier", "Franklin"].includes(name)) { - return new ethers.Contract( - this.addresses[name], - this.bytecodes[name+"Target"].interface, - this.wallet - ); - } - else{ - return new ethers.Contract( - this.addresses[name], - this.bytecodes[name].interface, - this.wallet - ); - } + return new ethers.Contract( + this.addresses[name], + this.bytecodes[name].interface, + this.wallet + ); } initializationArgs(contractName) { @@ -133,6 +136,7 @@ export class Deployer { 'Governance': [this.addresses.GovernanceTarget, this.encodedInitializationArgs('Governance')], 'Verifier': [this.addresses.VerifierTarget, this.encodedInitializationArgs('Verifier')], 'Franklin': [this.addresses.FranklinTarget, this.encodedInitializationArgs('Franklin')], + 'UpgradeGatekeeper': [this.addresses.Franklin], }[contractName]; } encodedConstructorArgs(contractName) { @@ -210,6 +214,26 @@ export class Deployer { return new ethers.Contract(proxy.address, this.bytecodes.FranklinTarget.interface, this.wallet); } + async deployUpgradeGatekeeper() { + const contract = await deployContract( + this.wallet, + this.bytecodes.UpgradeGatekeeper, + this.constructorArgs('UpgradeGatekeeper'), + { gasLimit: 3000000 }, + ); + this.addresses.UpgradeGatekeeper = contract.address; + + let transferMastershipTransaction; + transferMastershipTransaction = await this.getDeployedContract('Governance').transferMastership(contract.address); + await transferMastershipTransaction.wait(); + transferMastershipTransaction = await this.getDeployedContract('Verifier').transferMastership(contract.address); + await transferMastershipTransaction.wait(); + transferMastershipTransaction = await this.getDeployedContract('Franklin').transferMastership(contract.address); + await transferMastershipTransaction.wait(); + + return contract; + } + async postContractToTesseracts(contractName) { const address = this.addresses[contractName]; const contractCode = this.bytecodes[contractName]; diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index a7d8ec47fd..b607f3c602 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -103,6 +103,9 @@ describe("UpgradeGatekeeper unit tests", function () { .to.emit(UpgradeGatekeeperContract, 'UpgradeCompleted') .withArgs(proxyTestContract.address, 0, DummySecond.address) + await expect(await proxyTestContract.getTarget()) + .to.equal(DummySecond.address); + // check dummy index and updated storage expect(await proxyDummyInterface.get_DUMMY_INDEX()) .to.equal(2); diff --git a/etc/env/dev.env.example b/etc/env/dev.env.example index 1a923b49a0..606b1b9999 100755 --- a/etc/env/dev.env.example +++ b/etc/env/dev.env.example @@ -19,6 +19,7 @@ ETHERSCAN_API_KEY= CLIENT_GITHUB_DIR=~/src/gluk64.github.io/ +UPGRADE_GATEKEEPER_ADDR=0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9 GOVERNANCE_TARGET_ADDR=0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9 VERIFIER_TARGET_ADDR=0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9 CONTRACT_TARGET_ADDR=0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9 From e27a02647119ec66e243b10817c03bd04fdcc044 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Fri, 20 Mar 2020 18:48:43 +0200 Subject: [PATCH 063/186] Use link for zksync-crypto in zksync.js --- contracts/package.json | 2 +- contracts/yarn.lock | 225 +++++++++++++++++++++++++++++++++++- js/client/vue.config.js | 2 +- js/tests/yarn.lock | 3 +- js/zksync-crypto/.gitignore | 1 + js/zksync-crypto/Cargo.toml | 2 +- js/zksync.js/package.json | 2 +- js/zksync.js/yarn.lock | 3 +- 8 files changed, 230 insertions(+), 10 deletions(-) diff --git a/contracts/package.json b/contracts/package.json index a379036353..247f297310 100644 --- a/contracts/package.json +++ b/contracts/package.json @@ -24,7 +24,7 @@ "ts-node": "^8.3.0", "tslint": "^5.18.0", "typescript": "^3.5.3", - "zksync": "link:./js/zksync.js" + "zksync": "link:../js/zksync.js" }, "scripts": { "build": "waffle .waffle.json", diff --git a/contracts/yarn.lock b/contracts/yarn.lock index c31350775a..f04affb354 100644 --- a/contracts/yarn.lock +++ b/contracts/yarn.lock @@ -936,6 +936,21 @@ bl@^1.0.0: readable-stream "^2.3.5" safe-buffer "^5.1.1" +blake2b-wasm@^1.1.0: + version "1.1.7" + resolved "https://registry.yarnpkg.com/blake2b-wasm/-/blake2b-wasm-1.1.7.tgz#e4d075da10068e5d4c3ec1fb9accc4d186c55d81" + integrity sha512-oFIHvXhlz/DUgF0kq5B1CqxIDjIJwh9iDeUUGQUcvgiGz7Wdw03McEO7CfLBy7QKGdsydcMCgO9jFNBAFCtFcA== + dependencies: + nanoassert "^1.0.0" + +blake2b@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/blake2b/-/blake2b-2.1.3.tgz#f5388be424768e7c6327025dad0c3c6d83351bca" + integrity sha512-pkDss4xFVbMb4270aCyGD3qLv92314Et+FsKzilCLxDz5DuZ2/1g3w4nmBbu6nKApPspnjG7JcwTjGZnduB1yg== + dependencies: + blake2b-wasm "^1.1.0" + nanoassert "^1.0.0" + block-stream@*: version "0.0.9" resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a" @@ -963,6 +978,11 @@ bn.js@4.11.8, bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.10.0, bn.js@^4. resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f" integrity sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA== +bn.js@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.1.1.tgz#48efc4031a9c4041b9c99c6941d903463ab62eb5" + integrity sha512-IUTD/REb78Z2eodka1QZyyEk66pciRcP6Sroka0aI3tG/iwIdYLrBD62RsubR7vqdt3WyX8p4jxeatzmRSphtA== + body-parser@1.19.0, body-parser@^1.16.0: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" @@ -1306,6 +1326,11 @@ checkpoint-store@^1.1.0: dependencies: functional-red-black-tree "^1.0.1" +chnl@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/chnl/-/chnl-0.5.0.tgz#ed7aea426043fce5685e547f7297193b6d2bef8c" + integrity sha512-0dl4ZJfAZdLn9mDnWejs5nasZKVnDTwdXV+dkxodYbb//GJDtXNhPlqCCYUb1xF0NQpIB5zlHRDrU1RCB4BRog== + chownr@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.2.tgz#a18f1e0b269c8a6a5d3c86eb298beb14c3dd7bf6" @@ -1620,6 +1645,11 @@ crypto-browserify@3.12.0: randombytes "^2.0.0" randomfill "^1.0.3" +crypto-js@^3.1.9-1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/crypto-js/-/crypto-js-3.3.0.tgz#846dd1cce2f68aacfa156c8578f926a609b7976b" + integrity sha512-DIT51nX0dCfKltpRiXV+/TVZq+Qq2NgF4644+K7Ttnla7zEzqc+kjJyiB96BHNyUTBxyjzRcZYpUdZa+QAqi6Q== + crypto-random-string@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e" @@ -1630,6 +1660,14 @@ cyclist@~0.2.2: resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-0.2.2.tgz#1b33792e11e914a2fd6d6ed6447464444e5fa640" integrity sha1-GzN5LhHpFKL9bW7WRHRkRE5fpkA= +d@1, d@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" + integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== + dependencies: + es5-ext "^0.10.50" + type "^1.0.1" + dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" @@ -1927,6 +1965,19 @@ elliptic@^6.0.0, elliptic@^6.4.0, elliptic@^6.4.1: minimalistic-assert "^1.0.0" minimalistic-crypto-utils "^1.0.0" +elliptic@^6.5.0: + version "6.5.2" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.2.tgz#05c5678d7173c049d8ca433552224a495d0e3762" + integrity sha512-f4x70okzZbIQl/NSRLkI/+tteV/9WqL98zx+SQ69KbXxmVrmjwsNUPn/gYJJ0sHvEak24cZgHIPegRePAtA/xw== + dependencies: + bn.js "^4.4.0" + brorand "^1.0.1" + hash.js "^1.0.0" + hmac-drbg "^1.0.0" + inherits "^2.0.1" + minimalistic-assert "^1.0.0" + minimalistic-crypto-utils "^1.0.0" + emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" @@ -1998,6 +2049,23 @@ es-abstract@^1.13.0, es-abstract@^1.5.0, es-abstract@^1.5.1: is-regex "^1.0.4" object-keys "^1.0.12" +es-abstract@^1.17.0-next.0: + version "1.17.4" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.4.tgz#e3aedf19706b20e7c2594c35fc0d57605a79e184" + integrity sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ== + dependencies: + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + is-callable "^1.1.5" + is-regex "^1.0.5" + object-inspect "^1.7.0" + object-keys "^1.1.1" + object.assign "^4.1.0" + string.prototype.trimleft "^2.1.1" + string.prototype.trimright "^2.1.1" + es-to-primitive@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.0.tgz#edf72478033456e8dda8ef09e00ad9650707f377" @@ -2007,6 +2075,33 @@ es-to-primitive@^1.2.0: is-date-object "^1.0.1" is-symbol "^1.0.2" +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +es5-ext@^0.10.35, es5-ext@^0.10.50: + version "0.10.53" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1" + integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q== + dependencies: + es6-iterator "~2.0.3" + es6-symbol "~3.1.3" + next-tick "~1.0.0" + +es6-iterator@~2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" + integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= + dependencies: + d "1" + es5-ext "^0.10.35" + es6-symbol "^3.1.1" + es6-promise@^4.0.3: version "4.2.8" resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a" @@ -2019,6 +2114,14 @@ es6-promisify@^5.0.0: dependencies: es6-promise "^4.0.3" +es6-symbol@^3.1.1, es6-symbol@~3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" + integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== + dependencies: + d "^1.0.1" + ext "^1.1.2" + escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" @@ -2625,6 +2728,13 @@ express@^4.14.0: utils-merge "1.0.1" vary "~1.1.2" +ext@^1.1.2: + version "1.4.0" + resolved "https://registry.yarnpkg.com/ext/-/ext-1.4.0.tgz#89ae7a07158f79d35517882904324077e4379244" + integrity sha512-Key5NIsUxdqKg3vIsdw9dSuXpPCQ297y6wBjL30edxwPgt2E44WcWBZey/ZvUc6sERLTxKdyCu4gZFmUbk1Q7A== + dependencies: + type "^2.0.0" + extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" @@ -2779,6 +2889,11 @@ flat-cache@^2.0.1: rimraf "2.6.3" write "1.0.3" +flat-options@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/flat-options/-/flat-options-0.1.3.tgz#56f644aaa095e5f984ecdd329f119e8a196c8aa3" + integrity sha512-z1vH9mb4ly55dWUZZFUeLNqhFWhwSQNngHpK8RQOhFuNw/sWcNDZhkHl3GS1YTHiYxB5qvcbSRbH7X6ThzX9UA== + flat@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/flat/-/flat-4.1.0.tgz#090bec8b05e39cba309747f1d588f04dbaf98db2" @@ -3156,6 +3271,11 @@ has-symbols@^1.0.0: resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.0.tgz#ba1a8f1af2a0fc39650f5c850367704122063b44" integrity sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q= +has-symbols@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" + integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== + has-to-string-tag-x@^1.2.0: version "1.4.1" resolved "https://registry.yarnpkg.com/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz#a045ab383d7b4b2012a00148ab0aa5f290044d4d" @@ -3453,6 +3573,11 @@ is-callable@^1.1.3, is-callable@^1.1.4: resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.4.tgz#1e1adf219e1eeb684d691f9d6a05ff0d30a24d75" integrity sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA== +is-callable@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.5.tgz#f7e46b596890456db74e7f6e976cb3273d06faab" + integrity sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q== + is-ci@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" @@ -3556,6 +3681,13 @@ is-regex@^1.0.4: dependencies: has "^1.0.1" +is-regex@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.5.tgz#39d589a358bf18967f726967120b8fc1aed74eae" + integrity sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ== + dependencies: + has "^1.0.3" + is-retry-allowed@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34" @@ -3621,6 +3753,11 @@ jju@^1.1.0: resolved "https://registry.yarnpkg.com/jju/-/jju-1.4.0.tgz#a3abe2718af241a2b2904f84a625970f389ae32a" integrity sha1-o6vicYryQaKykE+EpiWXDzia4yo= +js-sha256@^0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/js-sha256/-/js-sha256-0.9.0.tgz#0b89ac166583e91ef9123644bd3c5334ce9d0966" + integrity sha512-sga3MHh9sgQN2+pJ9VYZ+1LPwXOxuBJBA5nrR5/ofPfuiJBE2hnjsaN8se8JznOmGLN2p49Pe5U/ttafcs/apA== + js-sha3@0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.5.5.tgz#baf0c0e8c54ad5903447df96ade7a4a1bca79a4a" @@ -4393,6 +4530,11 @@ nano-json-stream-parser@^0.1.2: resolved "https://registry.yarnpkg.com/nano-json-stream-parser/-/nano-json-stream-parser-0.1.2.tgz#0cc8f6d0e2b622b479c40d499c46d64b755c6f5f" integrity sha1-DMj20OK2IrR5xA1JnEbWS3Vcb18= +nanoassert@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/nanoassert/-/nanoassert-1.1.0.tgz#4f3152e09540fde28c76f44b19bbcd1d5a42478d" + integrity sha1-TzFS4JVA/eKMdvRLGbvNHVpCR40= + natural-compare@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" @@ -4408,6 +4550,11 @@ nested-error-stacks@~2.0.1: resolved "https://registry.yarnpkg.com/nested-error-stacks/-/nested-error-stacks-2.0.1.tgz#d2cc9fc5235ddb371fc44d506234339c8e4b0a4b" integrity sha512-SrQrok4CATudVzBS7coSz26QRSmlK9TzzoFbeKfcPBUFPjcQM9Rqvr/DlJkOrwI/0KcgvMub1n1g5Jt9EgRn4A== +next-tick@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" + integrity sha1-yobR/ogoFpsBICCOPchCS524NCw= + nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" @@ -4566,12 +4713,17 @@ object-assign@^4, object-assign@^4.0.0, object-assign@^4.0.1, object-assign@^4.1 resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= +object-inspect@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67" + integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== + object-inspect@~1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.6.0.tgz#c70b6cbf72f274aab4c34c0c82f5167bf82cf15b" integrity sha512-GJzfBZ6DgDAmnuaM3104jR4s1Myxr3Y3zfIyN4z3UdqN69oSRacNK8UhnobDdC+7J2AHCjGwxQubNJfE70SXXQ== -object-keys@^1.0.11, object-keys@^1.0.12: +object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== @@ -4581,7 +4733,7 @@ object-keys@~0.4.0: resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-0.4.0.tgz#28a6aae7428dd2c3a92f3d95f21335dd204e0336" integrity sha1-KKaq50KN0sOpLz2V8hM13SBOAzY= -object.assign@4.1.0: +object.assign@4.1.0, object.assign@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== @@ -4993,6 +5145,11 @@ progress@^2.0.0, progress@^2.0.3: resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== +promise-controller@^0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/promise-controller/-/promise-controller-0.5.2.tgz#f32d2156dc1f8e62703bf9fb0b8158335660c212" + integrity sha512-ymVCGfCxN+A+6TqESLnGZhGfQdQJ08SpIMyft4xQPUDrOgoqzKcQnLIYaqQk7/rPyg4wpKpxBKefeGkvumWgUg== + promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" @@ -5014,6 +5171,15 @@ promise-to-callback@^1.0.0: is-fn "^1.0.0" set-immediate-shim "^1.0.1" +promise.prototype.finally@^3.1.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/promise.prototype.finally/-/promise.prototype.finally-3.1.2.tgz#b8af89160c9c673cefe3b4c4435b53cfd0287067" + integrity sha512-A2HuJWl2opDH0EafgdjwEw7HysI8ff/n4lW4QEVBCUXFk9QeGecBWv0Deph0UmLe3tTNYegz8MOjsVuE6SMoJA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.0-next.0" + function-bind "^1.1.1" + prompts@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.1.0.tgz#bf90bc71f6065d255ea2bdc0fe6520485c1b45db" @@ -6015,6 +6181,22 @@ string.prototype.trim@~1.1.2: es-abstract "^1.5.0" function-bind "^1.0.2" +string.prototype.trimleft@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz#9bdb8ac6abd6d602b17a4ed321870d2f8dcefc74" + integrity sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag== + dependencies: + define-properties "^1.1.3" + function-bind "^1.1.1" + +string.prototype.trimright@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz#440314b15996c866ce8a0341894d45186200c5d9" + integrity sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g== + dependencies: + define-properties "^1.1.3" + function-bind "^1.1.1" + string_decoder@~0.10.x: version "0.10.31" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" @@ -6366,7 +6548,17 @@ type-is@~1.6.17, type-is@~1.6.18: media-typer "0.3.0" mime-types "~2.1.24" -typedarray-to-buffer@^3.1.2: +type@^1.0.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" + integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== + +type@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3" + integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow== + +typedarray-to-buffer@^3.1.2, typedarray-to-buffer@^3.1.5: version "3.1.5" resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== @@ -6813,6 +7005,16 @@ web3@1.0.0-beta.35: web3-shh "1.0.0-beta.35" web3-utils "1.0.0-beta.35" +websocket-as-promised@^0.10.1: + version "0.10.1" + resolved "https://registry.yarnpkg.com/websocket-as-promised/-/websocket-as-promised-0.10.1.tgz#30aef1529fc7e797b8270875a408d83264e52958" + integrity sha512-hKAZPSIGaao4WPoporPkDQpKl9jz0I0Hh9x/Dn05k6u//e44+0rAhMam4/gvBlYZl02wJYELD561bAenyxwJJQ== + dependencies: + chnl "^0.5.0" + flat-options "^0.1.3" + promise-controller "^0.5.2" + promise.prototype.finally "^3.1.0" + websocket@1.0.26: version "1.0.26" resolved "https://registry.yarnpkg.com/websocket/-/websocket-1.0.26.tgz#a03a01299849c35268c83044aa919c6374be8194" @@ -6823,6 +7025,17 @@ websocket@1.0.26: typedarray-to-buffer "^3.1.2" yaeti "^0.0.6" +websocket@^1.0.30: + version "1.0.31" + resolved "https://registry.yarnpkg.com/websocket/-/websocket-1.0.31.tgz#e5d0f16c3340ed87670e489ecae6144c79358730" + integrity sha512-VAouplvGKPiKFDTeCCO65vYHsyay8DqoBSlzIO3fayrfOgU94lQN5a1uWVnFrMLceTJw/+fQXR5PGbUVRaHshQ== + dependencies: + debug "^2.2.0" + es5-ext "^0.10.50" + nan "^2.14.0" + typedarray-to-buffer "^3.1.5" + yaeti "^0.0.6" + "websocket@git://github.com/frozeman/WebSocket-Node.git#browserifyCompatible": version "1.0.26" resolved "git://github.com/frozeman/WebSocket-Node.git#6c72925e3f8aaaea8dc8450f97627e85263999f2" @@ -7078,6 +7291,10 @@ yn@^3.0.0: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== -"zksync@link:./js/zksync.js": +"zksync-crypto@link:../js/zksync-crypto": + version "0.0.0" + uid "" + +"zksync@link:../js/zksync.js": version "0.0.0" uid "" diff --git a/js/client/vue.config.js b/js/client/vue.config.js index 214f27ffe7..cfd8e84a6d 100644 --- a/js/client/vue.config.js +++ b/js/client/vue.config.js @@ -1,7 +1,7 @@ module.exports = { publicPath: process.env.NODE_ENV === 'production' ? '/client/' - : '/', + : '/', chainWebpack: config => { config.optimization.minimize(process.env.NODE_ENV === 'production'); config.resolve.symlinks(false); diff --git a/js/tests/yarn.lock b/js/tests/yarn.lock index 3c4428a4df..19bd38e934 100644 --- a/js/tests/yarn.lock +++ b/js/tests/yarn.lock @@ -534,8 +534,9 @@ yn@3.1.1: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== -zksync-crypto@../zksync-crypto: +"zksync-crypto@link:../zksync-crypto": version "0.0.0" + uid "" "zksync@link:../zksync.js": version "0.0.0" diff --git a/js/zksync-crypto/.gitignore b/js/zksync-crypto/.gitignore index f5de7310e4..e35e598f75 100644 --- a/js/zksync-crypto/.gitignore +++ b/js/zksync-crypto/.gitignore @@ -1,5 +1,6 @@ /target **/*.rs.bk Cargo.lock +yarn.lock bin/ wasm-pack.log diff --git a/js/zksync-crypto/Cargo.toml b/js/zksync-crypto/Cargo.toml index 6f16e0b454..e48b8441ec 100644 --- a/js/zksync-crypto/Cargo.toml +++ b/js/zksync-crypto/Cargo.toml @@ -7,7 +7,7 @@ authors = ["Vitalii Drohan "] edition = "2018" [lib] -crate-type = ["cdylib", "rlib"] +crate-type = ["cdylib"] [features] default = ["console_error_panic_hook"] diff --git a/js/zksync.js/package.json b/js/zksync.js/package.json index d6eb039e94..28d70ff925 100644 --- a/js/zksync.js/package.json +++ b/js/zksync.js/package.json @@ -13,7 +13,7 @@ "js-sha256": "^0.9.0", "websocket": "^1.0.30", "websocket-as-promised": "^0.10.1", - "zksync-crypto": "../zksync-crypto" + "zksync-crypto": "link:../zksync-crypto" }, "peerDependencies": { "ethers": "^4.0.33" diff --git a/js/zksync.js/yarn.lock b/js/zksync.js/yarn.lock index 283c605c8d..10c5b6bf1a 100644 --- a/js/zksync.js/yarn.lock +++ b/js/zksync.js/yarn.lock @@ -1331,5 +1331,6 @@ yn@^3.0.0: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.0.tgz#fcbe2db63610361afcc5eb9e0ac91e976d046114" integrity sha512-kKfnnYkbTfrAdd0xICNFw7Atm8nKpLcLv9AZGEt+kczL/WQVai4e2V6ZN8U/O+iI6WrNuJjNNOyu4zfhl9D3Hg== -zksync-crypto@../zksync-crypto: +"zksync-crypto@link:../zksync-crypto": version "0.0.0" + uid "" From 73702a7d0052622205230399a968b49c5d80f25d Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 13:50:07 +0200 Subject: [PATCH 064/186] Small deployUpgradeGatekeeper fix --- contracts/src.ts/deploy.ts | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 7d9ddcc3d8..1f8339cd63 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -223,13 +223,9 @@ export class Deployer { ); this.addresses.UpgradeGatekeeper = contract.address; - let transferMastershipTransaction; - transferMastershipTransaction = await this.getDeployedContract('Governance').transferMastership(contract.address); - await transferMastershipTransaction.wait(); - transferMastershipTransaction = await this.getDeployedContract('Verifier').transferMastership(contract.address); - await transferMastershipTransaction.wait(); - transferMastershipTransaction = await this.getDeployedContract('Franklin').transferMastership(contract.address); - await transferMastershipTransaction.wait(); + await (await this.getDeployedContract('Governance').transferMastership(contract.address)).wait(); + await (await this.getDeployedContract('Verifier').transferMastership(contract.address)).wait(); + await (await this.getDeployedContract('Franklin').transferMastership(contract.address)).wait(); return contract; } From 6cc4442c4387bec7c785d67e8dbfc4b8ae9fb690 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 15:37:12 +0200 Subject: [PATCH 065/186] registeredPriorityOperations => totalRegisteredPriorityOperations --- contracts/contracts/Franklin.sol | 2 +- contracts/contracts/UpgradeGatekeeper.sol | 6 +++--- contracts/contracts/test/DummyTarget.sol | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index 50b8250cf1..ac8c2430c0 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -51,7 +51,7 @@ contract Franklin is Storage, Config, Events { blocks[0].stateRoot = _genesisRoot; } - function registeredPriorityOperations() public view returns (uint64) { + function totalRegisteredPriorityOperations() public view returns (uint64) { return firstPriorityRequestId + totalOpenPriorityRequests; } diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 47f93b4950..cd2ffd8733 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -94,11 +94,11 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Finalize; (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( - abi.encodeWithSignature("registeredPriorityOperations()") + abi.encodeWithSignature("totalRegisteredPriorityOperations()") ); require(mainContractCallSuccess, "uaf12"); // uaf12 - main contract static call failed - uint64 registeredPriorityOperations = abi.decode(encodedResult, (uint64)); - upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = registeredPriorityOperations; + uint64 totalRegisteredPriorityOperations = abi.decode(encodedResult, (uint64)); + upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = totalRegisteredPriorityOperations; emit UpgradeModeFinalizeStatusActivated(proxyAddress, version[proxyAddress]); return true; diff --git a/contracts/contracts/test/DummyTarget.sol b/contracts/contracts/test/DummyTarget.sol index 012946dee2..ef65512832 100644 --- a/contracts/contracts/test/DummyTarget.sol +++ b/contracts/contracts/test/DummyTarget.sol @@ -8,7 +8,7 @@ interface DummyTarget { function verifiedPriorityOperations() external returns (uint64); - function registeredPriorityOperations() external returns (uint64); + function totalRegisteredPriorityOperations() external returns (uint64); function verifyPriorityOperation() external; @@ -36,7 +36,7 @@ contract DummyFirst is DummyTarget { return _verifiedPriorityOperations; } - function registeredPriorityOperations() external returns (uint64){ + function totalRegisteredPriorityOperations() external returns (uint64){ return 1; } @@ -68,7 +68,7 @@ contract DummySecond is DummyTarget { return _verifiedPriorityOperations; } - function registeredPriorityOperations() external returns (uint64){ + function totalRegisteredPriorityOperations() external returns (uint64){ return 0; } From c8656493801f41fa71e87bac12bdfa40bf710129 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 16:00:00 +0200 Subject: [PATCH 066/186] Renames in UpgradeStatus enum --- contracts/contracts/Events.sol | 4 +-- contracts/contracts/UpgradeGatekeeper.sol | 34 +++++++++---------- .../test/unit_tests/upgradeGatekeeper_test.ts | 10 +++--- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/contracts/contracts/Events.sol b/contracts/contracts/Events.sol index fdce311c5e..65a51d322e 100644 --- a/contracts/contracts/Events.sol +++ b/contracts/contracts/Events.sol @@ -69,8 +69,8 @@ contract UpgradeEvents { uint64 version ); - /// @notice Upgrade mode finalize status event - event UpgradeModeFinalizeStatusActivated( + /// @notice Upgrade mode cleaning up status event + event UpgradeModeCleaningUpStatusActivated( address proxyAddress, uint64 version ); diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index cd2ffd8733..9648cec52c 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -8,7 +8,7 @@ import "./Ownable.sol"; /// @author Matter Labs contract UpgradeGatekeeper is UpgradeEvents, Ownable { - /// @notice Notice period before activation finalize status mode (in seconds) + /// @notice Notice period before activation cleaning up status of upgrade mode (in seconds) uint256 constant NOTICE_PERIOD = 2 weeks; /// @notice Versions of proxy contracts @@ -19,9 +19,9 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @notice Upgrade mode statuses enum UpgradeStatus { - NotActive, + Idle, NoticePeriod, - Finalize + CleaningUp } /// @notice Info for upgrade proxy @@ -37,7 +37,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { address nextTarget; /// @notice Number of priority operations that must be verified at the time of finishing upgrade - /// @dev Will store zero in case of not active finalize status of upgrade mode + /// @dev Will store zero in case of not active cleaning up status of upgrade mode uint64 priorityOperationsToProcessBeforeUpgrade; } @@ -56,7 +56,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @param newTarget New target function upgradeProxy(address proxyAddress, address newTarget) external { requireMaster(msg.sender); - require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.NotActive, "upa11"); // upa11 - unable to activate active upgrade mode + require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Idle, "upa11"); // upa11 - unable to activate active upgrade mode upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.NoticePeriod; upgradeInfo[proxyAddress].activationTime = now; @@ -70,9 +70,9 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @param proxyAddress Address of proxy to process function cancelProxyUpgrade(address proxyAddress) external { requireMaster(msg.sender); - require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeGatekeeper.UpgradeStatus.NotActive, "umc11"); // umc11 - unable to cancel not active upgrade mode + require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeGatekeeper.UpgradeStatus.Idle, "umc11"); // umc11 - unable to cancel not active upgrade mode - upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.NotActive; + upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Idle; upgradeInfo[proxyAddress].activationTime = 0; upgradeInfo[proxyAddress].nextTarget = address(0); upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; @@ -80,18 +80,18 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { emit UpgradeCanceled(proxyAddress, version[proxyAddress]); } - /// @notice Checks that finalize status is active and activates it if needed + /// @notice Checks that cleaning up status is active and activates it if needed /// @param proxyAddress Address of proxy to process - /// @return Bool flag indicating that finalize status is active after this call - function activeFinalizeStatusOfUpgrade(address proxyAddress) public returns (bool) { - require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeGatekeeper.UpgradeStatus.NotActive, "uaf11"); // uaf11 - unable to activate finalize status in case of not active upgrade mode + /// @return Bool flag indicating that cleaning up status is active after this call + function activateCleaningUpStatusOfUpgrade(address proxyAddress) public returns (bool) { + require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeGatekeeper.UpgradeStatus.Idle, "uaf11"); // uaf11 - unable to activate cleaning up status in case of not active upgrade mode - if (upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Finalize) { + if (upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.CleaningUp) { return true; } if (now >= upgradeInfo[proxyAddress].activationTime + NOTICE_PERIOD) { - upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Finalize; + upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.CleaningUp; (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("totalRegisteredPriorityOperations()") @@ -100,7 +100,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { uint64 totalRegisteredPriorityOperations = abi.decode(encodedResult, (uint64)); upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = totalRegisteredPriorityOperations; - emit UpgradeModeFinalizeStatusActivated(proxyAddress, version[proxyAddress]); + emit UpgradeModeCleaningUpStatusActivated(proxyAddress, version[proxyAddress]); return true; } else { return false; @@ -112,7 +112,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @param newTargetInitializationParameters New target initialization parameters function finishProxyUpgrade(address proxyAddress, bytes calldata newTargetInitializationParameters) external { requireMaster(msg.sender); - require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Finalize, "umf11"); // umf11 - unable to finish upgrade without finalize status active + require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.CleaningUp, "umf11"); // umf11 - unable to finish upgrade without cleaning up status active (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("verifiedPriorityOperations()") @@ -120,7 +120,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { require(mainContractCallSuccess, "umf12"); // umf12 - main contract static call failed uint64 verifiedPriorityOperations = abi.decode(encodedResult, (uint64)); - require(verifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, "umf13"); // umf13 - can't finish upgrade before verifing all priority operations received before start of finalize status + require(verifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, "umf13"); // umf13 - can't finish upgrade before verifing all priority operations received before start of cleaning up status (bool proxyUpgradeCallSuccess, ) = proxyAddress.call( abi.encodeWithSignature("upgradeTarget(address,bytes)", upgradeInfo[proxyAddress].nextTarget, newTargetInitializationParameters) @@ -130,7 +130,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { emit UpgradeCompleted(proxyAddress, version[proxyAddress], upgradeInfo[proxyAddress].nextTarget); version[proxyAddress]++; - upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.NotActive; + upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Idle; upgradeInfo[proxyAddress].activationTime = 0; upgradeInfo[proxyAddress].nextTarget = address(0); upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index b607f3c602..8f103dbadf 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -51,7 +51,7 @@ describe("UpgradeGatekeeper unit tests", function () { it("check UpgradeGatekeeper reverts; activate and cancel upgrade", async () => { expect((await getCallRevertReason( () => UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address) )).revertReason).equal("umc11") - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address) )).revertReason).equal("uaf11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.activateCleaningUpStatusOfUpgrade(proxyTestContract.address) )).revertReason).equal("uaf11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf11") await expect(UpgradeGatekeeperContract.upgradeProxy(proxyTestContract.address, DummySecond.address)) @@ -73,7 +73,7 @@ describe("UpgradeGatekeeper unit tests", function () { let activated_time = performance.now(); - // wait and activate finalize status + // wait and activate cleaning up status let all_time_in_sec = parseInt(await UpgradeGatekeeperContract.get_NOTICE_PERIOD()); for (let step = 1; step <= 3; step++) { if (step != 3) { @@ -87,10 +87,10 @@ describe("UpgradeGatekeeper unit tests", function () { } if (step != 3) { - await UpgradeGatekeeperContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address); + await UpgradeGatekeeperContract.activateCleaningUpStatusOfUpgrade(proxyTestContract.address); } else { - await expect(UpgradeGatekeeperContract.activeFinalizeStatusOfUpgrade(proxyTestContract.address)) - .to.emit(UpgradeGatekeeperContract, 'UpgradeModeFinalizeStatusActivated') + await expect(UpgradeGatekeeperContract.activateCleaningUpStatusOfUpgrade(proxyTestContract.address)) + .to.emit(UpgradeGatekeeperContract, 'UpgradeModeCleaningUpStatusActivated') .withArgs(proxyTestContract.address, 0) } } From fffa4744e23e889568d3f1bf5fdc625fa5df9903 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 16:19:12 +0200 Subject: [PATCH 067/186] Merge with ib/73/contracts_upgradeability after renames in UpgradeStatus enum --- contracts/scripts/test-upgrade-franklin.ts | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/contracts/scripts/test-upgrade-franklin.ts b/contracts/scripts/test-upgrade-franklin.ts index 235296cf5c..fc5382f8ad 100644 --- a/contracts/scripts/test-upgrade-franklin.ts +++ b/contracts/scripts/test-upgrade-franklin.ts @@ -51,20 +51,14 @@ async function main() { let notice_period = parseInt(await upgradeGatekeeper.get_NOTICE_PERIOD()); - const upgradeProxyTransaction = await upgradeGatekeeper.upgradeProxy(proxyContract.address, newTarget.address); - await upgradeProxyTransaction.wait(); - let upgrade_start_time = performance.now(); + await (await upgradeGatekeeper.upgradeProxy(proxyContract.address, newTarget.address)).wait(); // wait notice period - while ((performance.now() - upgrade_start_time) < notice_period * 1000 + 10) { - - } + await new Promise(r => setTimeout(r, notice_period * 1000 + 10)); // finish upgrade - const finalizeStatusActivationTransaction = await upgradeGatekeeper.activeFinalizeStatusOfUpgrade(proxyContract.address); - await finalizeStatusActivationTransaction.wait(); - const finishProxyUpgradeTransaction = await upgradeGatekeeper.finishProxyUpgrade(proxyContract.address, []); - await finishProxyUpgradeTransaction.wait(); + await (await upgradeGatekeeper.activateCleaningUpStatusOfUpgrade(proxyContract.address)).wait(); + await (await upgradeGatekeeper.finishProxyUpgrade(proxyContract.address, [])).wait(); await expect(await proxyContract.getTarget()) .to.equal(newTarget.address); From 0d00799023cf7ca13a05dadbbc1366272254491c Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 17:02:07 +0200 Subject: [PATCH 068/186] Moved Upgradeable.sol into Proxy.sol --- contracts/contracts/Proxy.sol | 53 +++++++++++++++++++++++-- contracts/contracts/Upgradeable.sol | 61 ----------------------------- 2 files changed, 49 insertions(+), 65 deletions(-) delete mode 100644 contracts/contracts/Upgradeable.sol diff --git a/contracts/contracts/Proxy.sol b/contracts/contracts/Proxy.sol index fc5c7bb40f..198b358ee8 100644 --- a/contracts/contracts/Proxy.sol +++ b/contracts/contracts/Proxy.sol @@ -1,17 +1,62 @@ pragma solidity 0.5.16; -import "./Upgradeable.sol"; +import "./Ownable.sol"; /// @title Proxy Contract /// @author Matter Labs -contract Proxy is Upgradeable { +contract Proxy is Ownable { + + /// @notice Storage position of "target" (actual implementation address) + bytes32 private constant targetPosition = keccak256("target"); /// @notice Contract constructor - /// @dev Calls Upgradeable contract constructor + /// @dev Calls Ownable contract constructor and initialize target /// @param target Initial implementation address /// @param targetInitializationParameters Target initialization parameters - constructor(address target, bytes memory targetInitializationParameters) Upgradeable(target, targetInitializationParameters) public {} + constructor(address target, bytes memory targetInitializationParameters) Ownable(msg.sender) public { + setTarget(target); + (bool initializationSuccess, ) = getTarget().delegatecall( + abi.encodeWithSignature("initialize(bytes)", targetInitializationParameters) + ); + require(initializationSuccess, "uin11"); // uin11 - target initialization failed + } + + /// @notice Intercepts initialization calls + function initialize(bytes calldata) external pure { + revert("ini11"); // ini11 - interception of initialization call + } + + /// @notice Returns target of contract + /// @return Actual implementation address + function getTarget() public view returns (address target) { + bytes32 position = targetPosition; + assembly { + target := sload(position) + } + } + + /// @notice Sets new target of contract + /// @param _newTarget New actual implementation address + function setTarget(address _newTarget) internal { + bytes32 position = targetPosition; + assembly { + sstore(position, _newTarget) + } + } + + /// @notice Upgrades target + /// @param newTarget New target + /// @param newTargetInitializationParameters New target initialization parameters + function upgradeTarget(address newTarget, bytes calldata newTargetInitializationParameters) external { + requireMaster(msg.sender); + + setTarget(newTarget); + (bool initializationSuccess, ) = getTarget().delegatecall( + abi.encodeWithSignature("initialize(bytes)", newTargetInitializationParameters) + ); + require(initializationSuccess, "ufu11"); // ufu11 - target initialization failed + } /// @notice Performs a delegatecall to the contract implementation /// @dev Fallback function allowing to perform a delegatecall to the given implementation diff --git a/contracts/contracts/Upgradeable.sol b/contracts/contracts/Upgradeable.sol deleted file mode 100644 index cf76bbb2ac..0000000000 --- a/contracts/contracts/Upgradeable.sol +++ /dev/null @@ -1,61 +0,0 @@ -pragma solidity 0.5.16; - -import "./Ownable.sol"; - - -/// @title Upgradeable contract -/// @author Matter Labs -contract Upgradeable is Ownable { - - /// @notice Storage position of "target" (actual implementation address) - bytes32 private constant targetPosition = keccak256("target"); - - /// @notice Contract constructor - /// @dev Calls Ownable contract constructor and initialize target - /// @param target Initial implementation address - /// @param targetInitializationParameters Target initialization parameters - constructor(address target, bytes memory targetInitializationParameters) Ownable(msg.sender) public { - setTarget(target); - (bool initializationSuccess, ) = getTarget().delegatecall( - abi.encodeWithSignature("initialize(bytes)", targetInitializationParameters) - ); - require(initializationSuccess, "uin11"); // uin11 - target initialization failed - } - - /// @notice Intercepts initialization calls - function initialize(bytes calldata) external pure { - revert("ini11"); // ini11 - interception of initialization call - } - - /// @notice Returns target of contract - /// @return Actual implementation address - function getTarget() public view returns (address target) { - bytes32 position = targetPosition; - assembly { - target := sload(position) - } - } - - /// @notice Sets new target of contract - /// @param _newTarget New actual implementation address - function setTarget(address _newTarget) internal { - bytes32 position = targetPosition; - assembly { - sstore(position, _newTarget) - } - } - - /// @notice Upgrades target - /// @param newTarget New target - /// @param newTargetInitializationParameters New target initialization parameters - function upgradeTarget(address newTarget, bytes calldata newTargetInitializationParameters) external { - requireMaster(msg.sender); - - setTarget(newTarget); - (bool initializationSuccess, ) = getTarget().delegatecall( - abi.encodeWithSignature("initialize(bytes)", newTargetInitializationParameters) - ); - require(initializationSuccess, "ufu11"); // ufu11 - target initialization failed - } - -} From db9a6b7c3850dcdc8aee001a5d753e001fde2413 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 17:19:21 +0200 Subject: [PATCH 069/186] verifiedPriorityOperations => totalVerifiedPriorityOperations --- contracts/contracts/Franklin.sol | 2 +- contracts/contracts/UpgradeGatekeeper.sol | 6 +++--- contracts/contracts/test/DummyTarget.sol | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index ac8c2430c0..ecd3b4875e 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -55,7 +55,7 @@ contract Franklin is Storage, Config, Events { return firstPriorityRequestId + totalOpenPriorityRequests; } - function verifiedPriorityOperations() public view returns (uint64) { + function totalVerifiedPriorityOperations() public view returns (uint64) { return firstPriorityRequestId; } diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 9648cec52c..0d5ae5f964 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -115,12 +115,12 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.CleaningUp, "umf11"); // umf11 - unable to finish upgrade without cleaning up status active (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( - abi.encodeWithSignature("verifiedPriorityOperations()") + abi.encodeWithSignature("totalVerifiedPriorityOperations()") ); require(mainContractCallSuccess, "umf12"); // umf12 - main contract static call failed - uint64 verifiedPriorityOperations = abi.decode(encodedResult, (uint64)); + uint64 totalVerifiedPriorityOperations = abi.decode(encodedResult, (uint64)); - require(verifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, "umf13"); // umf13 - can't finish upgrade before verifing all priority operations received before start of cleaning up status + require(totalVerifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, "umf13"); // umf13 - can't finish upgrade before verifing all priority operations received before start of cleaning up status (bool proxyUpgradeCallSuccess, ) = proxyAddress.call( abi.encodeWithSignature("upgradeTarget(address,bytes)", upgradeInfo[proxyAddress].nextTarget, newTargetInitializationParameters) diff --git a/contracts/contracts/test/DummyTarget.sol b/contracts/contracts/test/DummyTarget.sol index ef65512832..c5d719ea95 100644 --- a/contracts/contracts/test/DummyTarget.sol +++ b/contracts/contracts/test/DummyTarget.sol @@ -6,7 +6,7 @@ interface DummyTarget { function initialize(bytes calldata initializationParameters) external; - function verifiedPriorityOperations() external returns (uint64); + function totalVerifiedPriorityOperations() external returns (uint64); function totalRegisteredPriorityOperations() external returns (uint64); @@ -32,7 +32,7 @@ contract DummyFirst is DummyTarget { } } - function verifiedPriorityOperations() external returns (uint64){ + function totalVerifiedPriorityOperations() external returns (uint64){ return _verifiedPriorityOperations; } @@ -64,7 +64,7 @@ contract DummySecond is DummyTarget { } } - function verifiedPriorityOperations() external returns (uint64){ + function totalVerifiedPriorityOperations() external returns (uint64){ return _verifiedPriorityOperations; } From f4c9a899e04ecf68935affa1715799f7bae5e10f Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 18:43:39 +0200 Subject: [PATCH 070/186] Added checking the presence in the main contract functions that will be called from the gatekeeper --- contracts/test/unit_tests/proxy_test.ts | 2 +- contracts/test/unit_tests/upgradeGatekeeper_test.ts | 13 ++++++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/contracts/test/unit_tests/proxy_test.ts b/contracts/test/unit_tests/proxy_test.ts index fb9a0cfac9..03bb7e19fd 100644 --- a/contracts/test/unit_tests/proxy_test.ts +++ b/contracts/test/unit_tests/proxy_test.ts @@ -24,7 +24,7 @@ describe("Proxy unit tests", function () { expect((await getCallRevertReason( () => testContract_with_wallet2_signer.upgradeTarget(AddressZero, []) )).revertReason).equal("oro11") }); - it("check Proxy reverts", async () => { + it("checking Proxy reverts", async () => { expect((await getCallRevertReason( () => proxyTestContract.initialize([]) )).revertReason).equal("ini11") expect((await getCallRevertReason( () => proxyTestContract.upgradeTarget(proxyTestContract.address, []) )).revertReason).equal("ufu11") }); diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index 8f103dbadf..55f8c4228c 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -30,9 +30,8 @@ describe("UpgradeGatekeeper unit tests", function () { gasLimit: 6000000, }) proxyTestContract.transferMastership(UpgradeGatekeeperContract.address); - }); - it("check initial dummy index and storage", async () => { + // check initial dummy index and storage expect(await proxyDummyInterface.get_DUMMY_INDEX()) .to.equal(1); @@ -49,7 +48,7 @@ describe("UpgradeGatekeeper unit tests", function () { expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.finishProxyUpgrade(AddressZero, []) )).revertReason).equal("oro11") }); - it("check UpgradeGatekeeper reverts; activate and cancel upgrade", async () => { + it("checking UpgradeGatekeeper reverts; activation and cancelation upgrade", async () => { expect((await getCallRevertReason( () => UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address) )).revertReason).equal("umc11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract.activateCleaningUpStatusOfUpgrade(proxyTestContract.address) )).revertReason).equal("uaf11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf11") @@ -126,4 +125,12 @@ describe("UpgradeGatekeeper unit tests", function () { .withArgs(proxyTestContract.address, 1); }); + it("checking the presence in the main contract functions that will be called from the gatekeeper", async () => { + let mainContract = await deployContract(wallet, require('../../build/Franklin'), [], { + gasLimit: 6000000, + }); + await mainContract.totalRegisteredPriorityOperations(); + await mainContract.totalVerifiedPriorityOperations(); + }); + }); From ab4697dfbc9d6ec550592b04021d6d7947a16011 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 19:11:52 +0200 Subject: [PATCH 071/186] Code style in run_upgrade_franklin function --- core/testkit/src/external_commands.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/testkit/src/external_commands.rs b/core/testkit/src/external_commands.rs index d5d12e8385..9360cf46cf 100644 --- a/core/testkit/src/external_commands.rs +++ b/core/testkit/src/external_commands.rs @@ -92,8 +92,8 @@ pub fn deploy_test_contracts() -> Contracts { pub fn run_upgrade_franklin(franklin_address: Address, upgrade_gatekeeper_address: Address) { let result = Command::new("sh") .arg("test-upgrade-franklin.sh") - .arg(String::from("0x") + &hex::encode(franklin_address.as_bytes())) - .arg(String::from("0x") + &hex::encode(upgrade_gatekeeper_address.as_bytes())) + .arg(format!("0x{:x}", franklin_address)) + .arg(format!("0x{:x}", upgrade_gatekeeper_address)) .output() .expect("failed to execute test upgrade franklin script"); if !result.status.success() { From 79845bb401a4d4e89ba4b26241fd1b7ce9c6d29d Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 19:14:09 +0200 Subject: [PATCH 072/186] Removed dependency of hex in core/testkit --- Cargo.lock | 1 - core/testkit/Cargo.toml | 1 - core/testkit/src/external_commands.rs | 1 - 3 files changed, 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ae189ca3cf..10c36698b9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3113,7 +3113,6 @@ dependencies = [ "ethabi 8.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "models 0.0.1", "prover 0.0.1", diff --git a/core/testkit/Cargo.toml b/core/testkit/Cargo.toml index 6340bb2dbb..2ee7b9aba5 100644 --- a/core/testkit/Cargo.toml +++ b/core/testkit/Cargo.toml @@ -12,7 +12,6 @@ crypto_exports = { path = "../crypto_exports", version = "0.1.0" } eth_client = { path = "../eth_client", version = "0.1.0" } -hex = "0.4.2" tokio = { version = "0.2", features = ["full"] } futures = { version = "0.3", features = ["compat"] } log = "0.4" diff --git a/core/testkit/src/external_commands.rs b/core/testkit/src/external_commands.rs index 9360cf46cf..9563734833 100644 --- a/core/testkit/src/external_commands.rs +++ b/core/testkit/src/external_commands.rs @@ -1,7 +1,6 @@ //! Run external commands from `$ZKSYNC_HOME/bin` //!`$ZKSYNC_HOME/bin` should be in path. //! -extern crate hex; use std::collections::HashMap; use std::process::Command; use std::str::FromStr; From 5a454bfe11c09781ba28e5b4a185c2118eca1003 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 19:18:14 +0200 Subject: [PATCH 073/186] Comment in UpgradeGatekeeper.sol --- contracts/contracts/UpgradeGatekeeper.sol | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 0d5ae5f964..33a848c2d0 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -32,7 +32,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @dev Will be equal to zero in case of not active mode uint256 activationTime; - /// @notice Next target + /// @notice Address of the next version of the contract to be upgraded /// @dev Will store zero in case of not active upgrade mode address nextTarget; From e74d8735c416a1801755e92a23d52e55e46d6845 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sun, 22 Mar 2020 19:25:15 +0200 Subject: [PATCH 074/186] Comment in UpgradeGatekeeper.sol --- contracts/contracts/UpgradeGatekeeper.sol | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 33a848c2d0..ab5b5a8e81 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -28,7 +28,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { struct UpgradeInfo { UpgradeStatus upgradeStatus; - /// @notice Time of activating notice period + /// @notice Notice period activation timestamp (in seconds) /// @dev Will be equal to zero in case of not active mode uint256 activationTime; From 8057d6edfaa1e4c5cdcae0d8e215ce9ce6646e7c Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Mon, 23 Mar 2020 09:49:19 +0200 Subject: [PATCH 075/186] Comment in UpgradeGatekeeper.sol --- contracts/contracts/UpgradeGatekeeper.sol | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index ab5b5a8e81..1c0f6284c9 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -36,7 +36,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @dev Will store zero in case of not active upgrade mode address nextTarget; - /// @notice Number of priority operations that must be verified at the time of finishing upgrade + /// @notice Number of priority operations that must be verified by main contract at the time of finishing upgrade /// @dev Will store zero in case of not active cleaning up status of upgrade mode uint64 priorityOperationsToProcessBeforeUpgrade; } From 0c2425bcdee83bc6c4d6207f2708e78bcc82c606 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Mon, 23 Mar 2020 09:58:00 +0200 Subject: [PATCH 076/186] upgradeProxy => startProxyUpgrade --- contracts/contracts/UpgradeGatekeeper.sol | 4 ++-- contracts/test/unit_tests/upgradeGatekeeper_test.ts | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 1c0f6284c9..ec0e5e312b 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -51,10 +51,10 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { mainContractAddress = _mainContractAddress; } - /// @notice Activates notice period + /// @notice Starts upgrade (activates notice period) /// @param proxyAddress Address of proxy to process /// @param newTarget New target - function upgradeProxy(address proxyAddress, address newTarget) external { + function startProxyUpgrade(address proxyAddress, address newTarget) external { requireMaster(msg.sender); require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Idle, "upa11"); // upa11 - unable to activate active upgrade mode diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index 55f8c4228c..919ae66a51 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -43,7 +43,7 @@ describe("UpgradeGatekeeper unit tests", function () { it("checking that requireMaster calls present", async () => { let UpgradeGatekeeperContract_with_wallet2_signer = await UpgradeGatekeeperContract.connect(wallet2); - expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.upgradeProxy(AddressZero, AddressZero) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.startProxyUpgrade(AddressZero, AddressZero) )).revertReason).equal("oro11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.cancelProxyUpgrade(AddressZero) )).revertReason).equal("oro11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.finishProxyUpgrade(AddressZero, []) )).revertReason).equal("oro11") }); @@ -53,10 +53,10 @@ describe("UpgradeGatekeeper unit tests", function () { expect((await getCallRevertReason( () => UpgradeGatekeeperContract.activateCleaningUpStatusOfUpgrade(proxyTestContract.address) )).revertReason).equal("uaf11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf11") - await expect(UpgradeGatekeeperContract.upgradeProxy(proxyTestContract.address, DummySecond.address)) + await expect(UpgradeGatekeeperContract.startProxyUpgrade(proxyTestContract.address, DummySecond.address)) .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') .withArgs(proxyTestContract.address, 0) - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.upgradeProxy(proxyTestContract.address, DummySecond.address) )).revertReason).equal("upa11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startProxyUpgrade(proxyTestContract.address, DummySecond.address) )).revertReason).equal("upa11") await expect(UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address)) .to.emit(UpgradeGatekeeperContract, 'UpgradeCanceled') .withArgs(proxyTestContract.address, 0) @@ -66,7 +66,7 @@ describe("UpgradeGatekeeper unit tests", function () { let start_time = performance.now(); // activate - await expect(UpgradeGatekeeperContract.upgradeProxy(proxyTestContract.address, DummySecond.address)) + await expect(UpgradeGatekeeperContract.startProxyUpgrade(proxyTestContract.address, DummySecond.address)) .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') .withArgs(proxyTestContract.address, 0) @@ -117,7 +117,7 @@ describe("UpgradeGatekeeper unit tests", function () { .to.equal(bytes[3]); // one more activate and cancel with version equal to 1 - await expect(UpgradeGatekeeperContract.upgradeProxy(proxyTestContract.address, DummyFirst.address)) + await expect(UpgradeGatekeeperContract.startProxyUpgrade(proxyTestContract.address, DummyFirst.address)) .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') .withArgs(proxyTestContract.address, 1); await expect(UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address)) From 336a1b478d276d1f60dd242a692fd64de787f0e4 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Mon, 23 Mar 2020 10:00:09 +0200 Subject: [PATCH 077/186] Comment in UpgradeGatekeeper.sol --- contracts/contracts/UpgradeGatekeeper.sol | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index ec0e5e312b..e0909d7bd7 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -37,7 +37,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { address nextTarget; /// @notice Number of priority operations that must be verified by main contract at the time of finishing upgrade - /// @dev Will store zero in case of not active cleaning up status of upgrade mode + /// @dev Will store zero in case of not active upgrade mode or not active cleaning up status of upgrade mode uint64 priorityOperationsToProcessBeforeUpgrade; } From cf05dd03b67a86a785f81b4b0f8e6371a5e6ce53 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Mon, 23 Mar 2020 10:08:17 +0200 Subject: [PATCH 078/186] Merge branch 'ib/73/contracts_upgradeability' into ib/301/migration_test --- contracts/scripts/test-upgrade-franklin.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contracts/scripts/test-upgrade-franklin.ts b/contracts/scripts/test-upgrade-franklin.ts index fc5382f8ad..e9bc12534f 100644 --- a/contracts/scripts/test-upgrade-franklin.ts +++ b/contracts/scripts/test-upgrade-franklin.ts @@ -51,7 +51,7 @@ async function main() { let notice_period = parseInt(await upgradeGatekeeper.get_NOTICE_PERIOD()); - await (await upgradeGatekeeper.upgradeProxy(proxyContract.address, newTarget.address)).wait(); + await (await upgradeGatekeeper.startProxyUpgrade(proxyContract.address, newTarget.address)).wait(); // wait notice period await new Promise(r => setTimeout(r, notice_period * 1000 + 10)); From 0c41202b53d1140571d31e321bbaa57369d58a03 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 23 Mar 2020 16:50:12 +0300 Subject: [PATCH 079/186] Make the operation id not required in ethereum schema --- .../2020-03-19-042712_more_eth_data/down.sql | 7 + .../2020-03-19-042712_more_eth_data/up.sql | 20 ++ core/storage/src/chain/block/mod.rs | 20 +- core/storage/src/ethereum/mod.rs | 192 +++++++++++------- core/storage/src/ethereum/records.rs | 23 ++- core/storage/src/schema.rs | 16 +- core/storage/src/tests/chain/block.rs | 28 ++- core/storage/src/tests/ethereum.rs | 101 ++++----- 8 files changed, 251 insertions(+), 156 deletions(-) diff --git a/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql b/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql index f686943a1a..48d3299957 100644 --- a/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql +++ b/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql @@ -1,3 +1,10 @@ -- This file should undo anything in `up.sql` DROP TABLE IF EXISTS eth_nonce CASCADE; DROP TABLE IF EXISTS eth_stats CASCADE; +DROP TABLE IF EXISTS eth_ops_binding CASCADE; + +ALTER TABLE eth_operations + ADD COLUMN op_id bigserial REFERENCES operations (id), + DROP COLUMN op_type CASCADE, + ADD COLUMN gas_price numeric not null, + DROP COLUMN last_used_gas_price CASCADE \ No newline at end of file diff --git a/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql b/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql index 0046e957e2..ec00eb0541 100644 --- a/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql +++ b/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql @@ -14,3 +14,23 @@ CREATE TABLE eth_stats ( verify_ops BIGINT NOT NULL, withdraw_ops BIGINT NOT NULL ); + +-- Table connection `eth_operations` and `operations` table. +-- Each entry provides a mapping between the Ethereum transaction and the ZK Sync operation. +CREATE TABLE eth_ops_binding +( + id bigserial PRIMARY KEY, + op_id bigserial NOT NULL REFERENCES operations (id), + eth_op_id bigserial NOT NULL REFERENCES eth_operations (id) +); + +ALTER TABLE eth_operations + -- Add the operation type (`commit` / `verify` / `withdraw`). + ADD COLUMN op_type text not null, + -- Remove the `op_id` field, since `withdraw` operation does not have an associated operation. + -- The `eth_ops_binding` table should be used since now. + DROP COLUMN op_id CASCADE, + -- Rename `gas_price` to `last_used_gas_price`, since it's the only field changed for resent txs + -- and it makes no sense to store every sent transaction separately. + DROP COLUMN gas_price CASCADE, + ADD COLUMN last_used_gas_price numeric not null diff --git a/core/storage/src/chain/block/mod.rs b/core/storage/src/chain/block/mod.rs index 61d9a1dc64..fcec8f2e44 100644 --- a/core/storage/src/chain/block/mod.rs +++ b/core/storage/src/chain/block/mod.rs @@ -27,7 +27,7 @@ use crate::{ operations_ext::records::{InsertTx, ReadTx}, state::StateSchema, }, - ethereum::records::StorageETHOperation, + ethereum::records::ETHBinding, }; mod conversion; @@ -225,7 +225,8 @@ impl<'a> BlockSchema<'a> { limit: u32, ) -> QueryResult> { // This query does the following: - // - joins the `operations` and `eth_operations` tables to collect the data: + // - joins the `operations` and `eth_operations` (using the intermediate `eth_ops_binding` table) + // tables to collect the data: // block number, ethereum transaction hash, action type and action creation timestamp; // - joins the `blocks` table with result of the join twice: once for committed operations // and verified operations; @@ -239,7 +240,8 @@ impl<'a> BlockSchema<'a> { operations.action_type, \ operations.created_at \ from operations \ - left join eth_operations on eth_operations.op_id = operations.id \ + left join eth_ops_binding on eth_ops_binding.op_id = operations.id \ + left join eth_operations on eth_operations.id = eth_ops_binding.eth_op_id \ ) \ select \ blocks.number as block_number, \ @@ -276,7 +278,8 @@ impl<'a> BlockSchema<'a> { let block_number = query.parse::().unwrap_or(i64::max_value()); let l_query = query.to_lowercase(); // This query does the following: - // - joins the `operations` and `eth_operations` tables to collect the data: + // - joins the `operations` and `eth_operations` (using the intermediate `eth_ops_binding` table) + // tables to collect the data: // block number, ethereum transaction hash, action type and action creation timestamp; // - joins the `blocks` table with result of the join twice: once for committed operations // and verified operations; @@ -294,7 +297,8 @@ impl<'a> BlockSchema<'a> { operations.action_type, \ operations.created_at \ from operations \ - left join eth_operations on eth_operations.op_id = operations.id \ + left join eth_ops_binding on eth_ops_binding.op_id = operations.id \ + left join eth_operations on eth_operations.id = eth_ops_binding.eth_op_id \ ) \ select \ blocks.number as block_number, \ @@ -337,10 +341,10 @@ impl<'a> BlockSchema<'a> { pub fn load_unsent_ops(&self) -> QueryResult> { self.0.conn().transaction(|| { let ops: Vec<_> = operations::table - .left_join(eth_operations::table.on(eth_operations::op_id.eq(operations::id))) - .filter(eth_operations::id.is_null()) + .left_join(eth_ops_binding::table.on(eth_ops_binding::op_id.eq(operations::id))) + .filter(eth_ops_binding::id.is_null()) .order(operations::id.asc()) - .load::<(StoredOperation, Option)>(self.0.conn())?; + .load::<(StoredOperation, Option)>(self.0.conn())?; ops.into_iter().map(|(o, _)| o.into_op(self.0)).collect() }) } diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 1f4010ab7a..2033db40bb 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -3,12 +3,13 @@ use bigdecimal::BigDecimal; use diesel::dsl::{insert_into, update}; use diesel::prelude::*; -use itertools::Itertools; use web3::types::H256; // Workspace imports use models::Operation; // Local imports -use self::records::{ETHNonce, ETHStats, NewETHOperation, StorageETHOperation}; +use self::records::{ + ETHBinding, ETHNonce, ETHStats, NewETHBinding, NewETHOperation, StorageETHOperation, +}; use crate::chain::operations::records::StoredOperation; use crate::schema::*; use crate::StorageProcessor; @@ -22,6 +23,16 @@ pub enum OperationType { Withdraw, } +impl OperationType { + pub fn to_string(&self) -> String { + match self { + Self::Commit => "commit".into(), + Self::Verify => "verify".into(), + Self::Withdraw => "withdraw".into(), + } + } +} + /// Ethereum schema is capable of storing the information about the /// interaction with the Ethereum blockchain (mainly the list of sent /// Ethereum transactions). @@ -34,66 +45,52 @@ impl<'a> EthereumSchema<'a> { pub fn load_unconfirmed_operations( &self, // TODO: move Eth transaction state to models and add it here - ) -> QueryResult)>> { + ) -> QueryResult)>> { // Load the operations with the associated Ethereum transactions // from the database. - // Here we obtain a sequence of one-to-one mappings (operation ID) -> (ETH operation). - // This means that operation ID may be encountered multiple times (if there was more than - // one transaction sent). - let ops: Vec<(StoredOperation, Option)> = - self.0.conn().transaction(|| { - operations::table - .left_join(eth_operations::table.on(eth_operations::op_id.eq(operations::id))) - .filter(operations::confirmed.eq(false)) - .order(operations::id.asc()) - .load(self.0.conn()) - })?; + // Here we obtain a sequence of one-to-one mappings (ETH tx) -> (operation ID). + // Each Ethereum transaction can have no more than one associated operation, and each + // operation is associated with exactly one Ethereum transaction. Note that there may + // be ETH transactions without an operation (e.g. `completeWithdrawals` call), but for + // every operation always there is an ETH transaction. + let raw_ops: Vec<( + StorageETHOperation, + Option, + Option, + )> = self.0.conn().transaction(|| { + eth_operations::table + .left_join( + eth_ops_binding::table.on(eth_operations::id.eq(eth_ops_binding::eth_op_id)), + ) + .left_join(operations::table.on(operations::id.eq(eth_ops_binding::op_id))) + .filter(eth_operations::confirmed.eq(false)) + .order(eth_operations::id.asc()) + .load(self.0.conn()) + })?; + + // Create a vector for the expected output. + let mut ops: Vec<(StorageETHOperation, Option)> = + Vec::with_capacity(raw_ops.len()); // Transform the `StoredOperation` to `Operation`. - let mut ops = ops - .into_iter() - .map(|(op, eth_ops)| op.into_op(self.0).map(|op| (op, eth_ops))) - .collect::>>()?; - - // Sort the operations and group them by key, so we will obtain the groups - // of Ethereum operations mapped to the operations as a many-to-one mapping. - ops.sort_by_key(|(op, _)| op.id.expect("Operations in the db MUST have and id")); - let grouped_operations = ops.into_iter().group_by(|(o, _)| o.id.unwrap()); - - // Now go through the groups and collect all the Ethereum transactions to the vectors - // associated with a certain `Operation`. - let result = grouped_operations - .into_iter() - .map(|(_, group_iter)| { - // In this fold we have two accumulators: - // - operation (initialized at the first step, then just checked to be the same). - // - list of ETH txs (appended on each step). - let fold_result = group_iter.fold( - (None, Vec::new()), - |(mut accum_op, mut accum_eth_ops): (Option, _), (op, eth_op)| { - // Ensure that the grouping was done right and the operation is the same - // across the group. - assert_eq!(accum_op.get_or_insert_with(|| op.clone()).id, op.id); - - // Add the Ethereum operation to the list. - if let Some(eth_op) = eth_op { - accum_eth_ops.push(eth_op); - } - - (accum_op, accum_eth_ops) - }, - ); - (fold_result.0.unwrap(), fold_result.1) - }) - .collect(); - - Ok(result) + for (eth_op, _, raw_op) in raw_ops { + let op = if let Some(raw_op) = raw_op { + Some(raw_op.into_op(self.0)?) + } else { + None + }; + + ops.push((eth_op, op)); + } + + Ok(ops) } /// Stores the sent (but not confirmed yet) Ethereum transaction in the database. - pub fn save_operation_eth_tx( + pub fn save_new_eth_tx( &self, - op_id: i64, + op_type: OperationType, + op_id: Option, hash: H256, deadline_block: u64, nonce: u32, @@ -101,18 +98,47 @@ impl<'a> EthereumSchema<'a> { raw_tx: Vec, ) -> QueryResult<()> { let operation = NewETHOperation { - op_id, + op_type: op_type.to_string(), nonce: i64::from(nonce), deadline_block: deadline_block as i64, - gas_price, + last_used_gas_price: gas_price, tx_hash: hash.as_bytes().to_vec(), raw_tx, }; - insert_into(eth_operations::table) - .values(&operation) - .execute(self.0.conn()) - .map(drop) + self.0.conn().transaction(|| { + let inserted = insert_into(eth_operations::table) + .values(&operation) + .returning(eth_operations::id) + .get_results(self.0.conn())?; + assert_eq!(inserted.len(), 1, "Wrong amount of updated rows"); + + let eth_op_id = inserted[0]; + if let Some(op_id) = op_id { + // If the operation ID was provided, we should also insert a binding entry. + let binding = NewETHBinding { op_id, eth_op_id }; + + insert_into(eth_ops_binding::table) + .values(&binding) + .execute(self.0.conn())?; + } + + self.report_created_operation(op_type)?; + + Ok(()) + }) + } + + /// Changes the last used gas for a transaction. Since for every sent transaction the gas + /// is the only field changed, it makes no sense to duplicate many alike transactions for each + /// operation. Instead we enforce using exactly one tx for each operation and store only the last + /// used gas value (to increment later if we'll need to send the tx again). + pub fn update_eth_tx_gas(&self, hash: &H256, new_gas_value: BigDecimal) -> QueryResult<()> { + update(eth_operations::table.filter(eth_operations::tx_hash.eq(hash.as_bytes()))) + .set(eth_operations::last_used_gas_price.eq(new_gas_value)) + .execute(self.0.conn())?; + + Ok(()) } /// Updates the stats counter with the new operation reported. @@ -123,7 +149,7 @@ impl<'a> EthereumSchema<'a> { /// This method expects the database to be initially prepared with inserting the actual /// nonce value. Currently the script `db-insert-eth-data.sh` is responsible for that /// and it's invoked within `db-reset` subcommand. - pub fn report_created_operation(&self, operation_type: OperationType) -> QueryResult<()> { + fn report_created_operation(&self, operation_type: OperationType) -> QueryResult<()> { self.0.conn().transaction(|| { let mut current_stats: ETHStats = eth_stats::table.first(self.0.conn())?; @@ -162,19 +188,37 @@ impl<'a> EthereumSchema<'a> { /// is marked as confirmed as well). pub fn confirm_eth_tx(&self, hash: &H256) -> QueryResult<()> { self.0.conn().transaction(|| { - update(eth_operations::table.filter(eth_operations::tx_hash.eq(hash.as_bytes()))) - .set(eth_operations::confirmed.eq(true)) - .execute(self.0.conn()) - .map(drop)?; - let (op, _) = operations::table - .inner_join(eth_operations::table.on(eth_operations::op_id.eq(operations::id))) - .filter(eth_operations::tx_hash.eq(hash.as_bytes())) - .first::<(StoredOperation, StorageETHOperation)>(self.0.conn())?; - - update(operations::table.filter(operations::id.eq(op.id))) - .set(operations::confirmed.eq(true)) - .execute(self.0.conn()) - .map(drop) + let updated: Vec = + update(eth_operations::table.filter(eth_operations::tx_hash.eq(hash.as_bytes()))) + .set(eth_operations::confirmed.eq(true)) + .returning(eth_operations::id) + .get_results(self.0.conn())?; + + assert_eq!( + updated.len(), + 1, + "Unexpected amount of operations were confirmed" + ); + + let eth_op_id = updated[0]; + + let binding: Option = eth_ops_binding::table + .filter(eth_ops_binding::eth_op_id.eq(eth_op_id)) + .first::(self.0.conn()) + .optional()?; + + if let Some(binding) = binding { + let op = operations::table + .filter(operations::id.eq(binding.op_id)) + .first::(self.0.conn())?; + + update(operations::table.filter(operations::id.eq(op.id))) + .set(operations::confirmed.eq(true)) + .execute(self.0.conn()) + .map(drop)?; + } + + Ok(()) }) } diff --git a/core/storage/src/ethereum/records.rs b/core/storage/src/ethereum/records.rs index 0c29c3a627..d710860b51 100644 --- a/core/storage/src/ethereum/records.rs +++ b/core/storage/src/ethereum/records.rs @@ -8,24 +8,39 @@ use crate::schema::*; #[table_name = "eth_operations"] pub struct StorageETHOperation { pub id: i64, - pub op_id: i64, pub nonce: i64, pub deadline_block: i64, - pub gas_price: BigDecimal, pub tx_hash: Vec, pub confirmed: bool, pub raw_tx: Vec, + pub op_type: String, + pub last_used_gas_price: BigDecimal, } #[derive(Debug, Insertable, PartialEq)] #[table_name = "eth_operations"] pub struct NewETHOperation { - pub op_id: i64, pub nonce: i64, pub deadline_block: i64, - pub gas_price: BigDecimal, pub tx_hash: Vec, pub raw_tx: Vec, + pub op_type: String, + pub last_used_gas_price: BigDecimal, +} + +#[derive(Debug, Insertable, PartialEq)] +#[table_name = "eth_ops_binding"] +pub struct NewETHBinding { + pub op_id: i64, + pub eth_op_id: i64, +} + +#[derive(Debug, Queryable, QueryableByName, PartialEq)] +#[table_name = "eth_ops_binding"] +pub struct ETHBinding { + pub id: i64, + pub op_id: i64, + pub eth_op_id: i64, } #[derive(Debug, Queryable, QueryableByName, PartialEq)] diff --git a/core/storage/src/schema.rs b/core/storage/src/schema.rs index 96560e89ed..7e516b8499 100644 --- a/core/storage/src/schema.rs +++ b/core/storage/src/schema.rs @@ -92,13 +92,21 @@ table! { table! { eth_operations (id) { id -> Int8, - op_id -> Int8, nonce -> Int8, deadline_block -> Int8, - gas_price -> Numeric, tx_hash -> Bytea, confirmed -> Bool, raw_tx -> Bytea, + op_type -> Text, + last_used_gas_price -> Numeric, + } +} + +table! { + eth_ops_binding (id) { + id -> Int8, + op_id -> Int8, + eth_op_id -> Int8, } } @@ -225,7 +233,8 @@ table! { joinable!(account_balance_updates -> tokens (coin_id)); joinable!(balances -> accounts (account_id)); joinable!(balances -> tokens (coin_id)); -joinable!(eth_operations -> operations (op_id)); +joinable!(eth_ops_binding -> eth_operations (eth_op_id)); +joinable!(eth_ops_binding -> operations (op_id)); joinable!(executed_transactions -> mempool (tx_hash)); allow_tables_to_appear_in_same_query!( @@ -239,6 +248,7 @@ allow_tables_to_appear_in_same_query!( data_restore_last_watched_eth_block, eth_nonce, eth_operations, + eth_ops_binding, eth_stats, events_state, executed_priority_operations, diff --git a/core/storage/src/tests/chain/block.rs b/core/storage/src/tests/chain/block.rs index 197e0c3dba..daf6cd54bd 100644 --- a/core/storage/src/tests/chain/block.rs +++ b/core/storage/src/tests/chain/block.rs @@ -12,7 +12,7 @@ use crate::{ block::{records::BlockDetails, BlockSchema}, state::StateSchema, }, - ethereum::EthereumSchema, + ethereum::{EthereumSchema, OperationType}, prover::ProverSchema, StorageProcessor, }; @@ -199,6 +199,9 @@ fn find_block_by_height_or_hash() { let conn = StorageProcessor::establish_connection().unwrap(); db_test(conn.conn(), || { + // Required since we use `EthereumSchema` in this test. + EthereumSchema(&conn).initialize_eth_data()?; + let mut accounts_map = AccountMap::default(); let n_committed = 5; let n_verified = n_committed - 2; @@ -233,8 +236,9 @@ fn find_block_by_height_or_hash() { // commit/verify hashes. let ethereum_op_id = operation.id.unwrap() as i64; let eth_tx_hash = ethereum_tx_hash(ethereum_op_id); - EthereumSchema(&conn).save_operation_eth_tx( - ethereum_op_id, + EthereumSchema(&conn).save_new_eth_tx( + OperationType::Commit, + Some(ethereum_op_id), eth_tx_hash, 100, 100, @@ -267,8 +271,9 @@ fn find_block_by_height_or_hash() { // Do not add an ethereum confirmation for the last operation. if block_number != n_verified { - EthereumSchema(&conn).save_operation_eth_tx( - ethereum_op_id, + EthereumSchema(&conn).save_new_eth_tx( + OperationType::Verify, + Some(ethereum_op_id), eth_tx_hash, 100, 100, @@ -340,6 +345,9 @@ fn block_range() { let conn = StorageProcessor::establish_connection().unwrap(); db_test(conn.conn(), || { + // Required since we use `EthereumSchema` in this test. + EthereumSchema(&conn).initialize_eth_data()?; + let mut accounts_map = AccountMap::default(); let n_committed = 5; let n_verified = n_committed - 2; @@ -360,8 +368,9 @@ fn block_range() { // commit/verify hashes. let ethereum_op_id = operation.id.unwrap() as i64; let eth_tx_hash = ethereum_tx_hash(ethereum_op_id); - EthereumSchema(&conn).save_operation_eth_tx( - ethereum_op_id, + EthereumSchema(&conn).save_new_eth_tx( + OperationType::Commit, + Some(ethereum_op_id), eth_tx_hash, 100, 100, @@ -381,8 +390,9 @@ fn block_range() { ))?; let ethereum_op_id = operation.id.unwrap() as i64; let eth_tx_hash = ethereum_tx_hash(ethereum_op_id); - EthereumSchema(&conn).save_operation_eth_tx( - ethereum_op_id, + EthereumSchema(&conn).save_new_eth_tx( + OperationType::Verify, + Some(ethereum_op_id), eth_tx_hash, 100, 100, diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index 41d5a4eef8..cd98d581e4 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -35,6 +35,7 @@ pub fn get_operation(block_number: BlockNumber) -> Operation { /// Parameters for `EthereumSchema::save_operation_eth_tx` method. #[derive(Debug)] pub struct EthereumTxParams { + op_type: String, op_id: i64, hash: H256, deadline_block: u64, @@ -44,8 +45,9 @@ pub struct EthereumTxParams { } impl EthereumTxParams { - pub fn new(op_id: i64, nonce: u32) -> Self { + pub fn new(op_type: String, op_id: i64, nonce: u32) -> Self { Self { + op_type, op_id, hash: H256::from_low_u64_ne(op_id as u64), deadline_block: 100, @@ -58,10 +60,10 @@ impl EthereumTxParams { pub fn to_eth_op(&self, db_id: i64) -> StorageETHOperation { StorageETHOperation { id: db_id, - op_id: self.op_id, + op_type: self.op_type.clone(), nonce: self.nonce as i64, deadline_block: self.deadline_block as i64, - gas_price: self.gas_price.clone(), + last_used_gas_price: self.gas_price.clone(), tx_hash: self.hash.as_bytes().to_vec(), confirmed: false, raw_tx: self.raw_tx.clone(), @@ -95,6 +97,8 @@ fn ethereum_empty_load() { fn ethereum_storage() { let conn = StorageProcessor::establish_connection().unwrap(); db_test(conn.conn(), || { + EthereumSchema(&conn).initialize_eth_data()?; + let unconfirmed_operations = EthereumSchema(&conn).load_unconfirmed_operations()?; assert!(unconfirmed_operations.is_empty()); @@ -103,9 +107,10 @@ fn ethereum_storage() { let operation = BlockSchema(&conn).execute_operation(get_operation(block_number))?; // Store the Ethereum transaction. - let params = EthereumTxParams::new(operation.id.unwrap(), 1); - EthereumSchema(&conn).save_operation_eth_tx( - params.op_id, + let params = EthereumTxParams::new("commit".into(), operation.id.unwrap(), 1); + EthereumSchema(&conn).save_new_eth_tx( + OperationType::Commit, + Some(params.op_id), params.hash, params.deadline_block, params.nonce, @@ -115,16 +120,24 @@ fn ethereum_storage() { // Check that it can be loaded. let unconfirmed_operations = EthereumSchema(&conn).load_unconfirmed_operations()?; - assert_eq!(unconfirmed_operations[0].0.id, operation.id); - assert_eq!(unconfirmed_operations[0].1.len(), 1); + let eth_op = unconfirmed_operations[0].0.clone(); + let op = unconfirmed_operations[0] + .1 + .clone() + .expect("No Operation entry"); + assert_eq!(op.id, operation.id); // Load the database ID, since we can't predict it for sure. - let db_id = unconfirmed_operations[0].1[0].id; - assert_eq!(unconfirmed_operations[0].1, vec![params.to_eth_op(db_id)]); + assert_eq!(eth_op, params.to_eth_op(eth_op.id)); + + // Store operation with ID 2. + let block_number = 2; + let operation_2 = BlockSchema(&conn).execute_operation(get_operation(block_number))?; // Create one more Ethereum transaction. - let params_2 = EthereumTxParams::new(operation.id.unwrap(), 2); - EthereumSchema(&conn).save_operation_eth_tx( - params_2.op_id, + let params_2 = EthereumTxParams::new("commit".into(), operation_2.id.unwrap(), 2); + EthereumSchema(&conn).save_new_eth_tx( + OperationType::Commit, + Some(params_2.op_id), params_2.hash, params_2.deadline_block, params_2.nonce, @@ -134,20 +147,28 @@ fn ethereum_storage() { // Check that we now can load two operations. let unconfirmed_operations = EthereumSchema(&conn).load_unconfirmed_operations()?; - assert_eq!(unconfirmed_operations[0].0.id, operation.id); - assert_eq!(unconfirmed_operations[0].1.len(), 2); - let db_id_2 = unconfirmed_operations[0].1[1].id; - assert_eq!( - unconfirmed_operations[0].1, - vec![params.to_eth_op(db_id), params_2.to_eth_op(db_id_2)] - ); + assert_eq!(unconfirmed_operations.len(), 2); + let eth_op = unconfirmed_operations[1].0.clone(); + let op = unconfirmed_operations[1] + .1 + .clone() + .expect("No Operation entry"); + assert_eq!(op.id, operation_2.id); + assert_eq!(eth_op, params_2.to_eth_op(eth_op.id)); // Make the transaction as completed. EthereumSchema(&conn).confirm_eth_tx(¶ms_2.hash)?; - // Now there should be no unconfirmed transactions. + // Now there should be only one unconfirmed operation. let unconfirmed_operations = EthereumSchema(&conn).load_unconfirmed_operations()?; - assert!(unconfirmed_operations.is_empty()); + assert_eq!(unconfirmed_operations.len(), 1); + + // Check that stats are updated as well. + let updated_stats = EthereumSchema(&conn).load_stats()?; + + assert_eq!(updated_stats.commit_ops, 2); + assert_eq!(updated_stats.verify_ops, 0); + assert_eq!(updated_stats.withdraw_ops, 0); Ok(()) }); @@ -170,39 +191,3 @@ fn eth_nonce() { Ok(()) }); } - -/// Checks that Ethereum stats are incremented as expected. -#[test] -#[cfg_attr(not(feature = "db_test"), ignore)] -fn eth_stats() { - let conn = StorageProcessor::establish_connection().unwrap(); - db_test(conn.conn(), || { - EthereumSchema(&conn).initialize_eth_data()?; - - let initial_stats = EthereumSchema(&conn).load_stats()?; - - assert_eq!(initial_stats.commit_ops, 0); - assert_eq!(initial_stats.verify_ops, 0); - assert_eq!(initial_stats.withdraw_ops, 0); - - let ops_to_add = vec![ - (OperationType::Commit, 5), - (OperationType::Verify, 3), - (OperationType::Withdraw, 2), - ]; - - for (op, count) in ops_to_add.iter() { - for _ in 0..*count { - EthereumSchema(&conn).report_created_operation(*op)?; - } - } - - let updated_stats = EthereumSchema(&conn).load_stats()?; - - assert_eq!(updated_stats.commit_ops, ops_to_add[0].1); - assert_eq!(updated_stats.verify_ops, ops_to_add[1].1); - assert_eq!(updated_stats.withdraw_ops, ops_to_add[2].1); - - Ok(()) - }); -} From b00d96ee1e49a4c3bd529bdda23405833df9e410 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 24 Mar 2020 07:56:36 +0300 Subject: [PATCH 080/186] Add an additional table to store sent eth txs hashes --- .../2020-03-19-042712_more_eth_data/down.sql | 1 + .../2020-03-19-042712_more_eth_data/up.sql | 10 ++++ core/storage/src/chain/block/mod.rs | 12 ++--- core/storage/src/ethereum/mod.rs | 54 +++++++++++++++---- core/storage/src/ethereum/records.rs | 17 +++++- core/storage/src/schema.rs | 11 +++- core/storage/src/tests/ethereum.rs | 3 +- 7 files changed, 88 insertions(+), 20 deletions(-) diff --git a/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql b/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql index 48d3299957..c286967a03 100644 --- a/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql +++ b/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql @@ -2,6 +2,7 @@ DROP TABLE IF EXISTS eth_nonce CASCADE; DROP TABLE IF EXISTS eth_stats CASCADE; DROP TABLE IF EXISTS eth_ops_binding CASCADE; +DROP TABLE IF EXISTS eth_tx_hashes CASCADE; ALTER TABLE eth_operations ADD COLUMN op_id bigserial REFERENCES operations (id), diff --git a/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql b/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql index ec00eb0541..3dd53a4258 100644 --- a/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql +++ b/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql @@ -24,6 +24,14 @@ CREATE TABLE eth_ops_binding eth_op_id bigserial NOT NULL REFERENCES eth_operations (id) ); +-- Table storing all the sent Ethereum transaction hashes. +CREATE TABLE eth_tx_hashes +( + id bigserial PRIMARY KEY, + eth_op_id bigserial NOT NULL REFERENCES eth_operations (id), + tx_hash bytea not null +); + ALTER TABLE eth_operations -- Add the operation type (`commit` / `verify` / `withdraw`). ADD COLUMN op_type text not null, @@ -33,4 +41,6 @@ ALTER TABLE eth_operations -- Rename `gas_price` to `last_used_gas_price`, since it's the only field changed for resent txs -- and it makes no sense to store every sent transaction separately. DROP COLUMN gas_price CASCADE, + -- Different tx hashes are now stored in the `eth_tx_hashes` table, so this field isn't needed anymore. + DROP COLUMN tx_hash CASCADE, ADD COLUMN last_used_gas_price numeric not null diff --git a/core/storage/src/chain/block/mod.rs b/core/storage/src/chain/block/mod.rs index fcec8f2e44..b1ba3f29c1 100644 --- a/core/storage/src/chain/block/mod.rs +++ b/core/storage/src/chain/block/mod.rs @@ -225,7 +225,7 @@ impl<'a> BlockSchema<'a> { limit: u32, ) -> QueryResult> { // This query does the following: - // - joins the `operations` and `eth_operations` (using the intermediate `eth_ops_binding` table) + // - joins the `operations` and `eth_tx_hashes` (using the intermediate `eth_ops_binding` table) // tables to collect the data: // block number, ethereum transaction hash, action type and action creation timestamp; // - joins the `blocks` table with result of the join twice: once for committed operations @@ -236,12 +236,12 @@ impl<'a> BlockSchema<'a> { with eth_ops as ( \ select \ operations.block_number, \ - '0x' || encode(eth_operations.tx_hash::bytea, 'hex') as tx_hash, \ + '0x' || encode(eth_tx_hashes.tx_hash::bytea, 'hex') as tx_hash, \ operations.action_type, \ operations.created_at \ from operations \ left join eth_ops_binding on eth_ops_binding.op_id = operations.id \ - left join eth_operations on eth_operations.id = eth_ops_binding.eth_op_id \ + left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_ops_binding.eth_op_id \ ) \ select \ blocks.number as block_number, \ @@ -278,7 +278,7 @@ impl<'a> BlockSchema<'a> { let block_number = query.parse::().unwrap_or(i64::max_value()); let l_query = query.to_lowercase(); // This query does the following: - // - joins the `operations` and `eth_operations` (using the intermediate `eth_ops_binding` table) + // - joins the `operations` and `eth_tx_hashes` (using the intermediate `eth_ops_binding` table) // tables to collect the data: // block number, ethereum transaction hash, action type and action creation timestamp; // - joins the `blocks` table with result of the join twice: once for committed operations @@ -293,12 +293,12 @@ impl<'a> BlockSchema<'a> { with eth_ops as ( \ select \ operations.block_number, \ - '0x' || encode(eth_operations.tx_hash::bytea, 'hex') as tx_hash, \ + '0x' || encode(eth_tx_hashes.tx_hash::bytea, 'hex') as tx_hash, \ operations.action_type, \ operations.created_at \ from operations \ left join eth_ops_binding on eth_ops_binding.op_id = operations.id \ - left join eth_operations on eth_operations.id = eth_ops_binding.eth_op_id \ + left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_ops_binding.eth_op_id \ ) \ select \ blocks.number as block_number, \ diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 2033db40bb..80d11a1a1b 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -8,7 +8,8 @@ use web3::types::H256; use models::Operation; // Local imports use self::records::{ - ETHBinding, ETHNonce, ETHStats, NewETHBinding, NewETHOperation, StorageETHOperation, + ETHBinding, ETHNonce, ETHStats, ETHTxHash, NewETHBinding, NewETHOperation, NewETHTxHash, + StorageETHOperation, }; use crate::chain::operations::records::StoredOperation; use crate::schema::*; @@ -72,6 +73,8 @@ impl<'a> EthereumSchema<'a> { let mut ops: Vec<(StorageETHOperation, Option)> = Vec::with_capacity(raw_ops.len()); + // TODO: load tx hashes. + // Transform the `StoredOperation` to `Operation`. for (eth_op, _, raw_op) in raw_ops { let op = if let Some(raw_op) = raw_op { @@ -102,18 +105,34 @@ impl<'a> EthereumSchema<'a> { nonce: i64::from(nonce), deadline_block: deadline_block as i64, last_used_gas_price: gas_price, - tx_hash: hash.as_bytes().to_vec(), raw_tx, }; self.0.conn().transaction(|| { - let inserted = insert_into(eth_operations::table) + let inserted_tx = insert_into(eth_operations::table) .values(&operation) .returning(eth_operations::id) .get_results(self.0.conn())?; - assert_eq!(inserted.len(), 1, "Wrong amount of updated rows"); + assert_eq!( + inserted_tx.len(), + 1, + "Wrong amount of updated rows (eth_operations)" + ); + + let eth_op_id = inserted_tx[0]; + + let hash_entry = NewETHTxHash { + eth_op_id, + tx_hash: hash.as_bytes().to_vec(), + }; + let inserted_hashes_rows = insert_into(eth_tx_hashes::table) + .values(&hash_entry) + .execute(self.0.conn())?; + assert_eq!( + inserted_hashes_rows, 1, + "Wrong amount of updated rows (eth_tx_hashes)" + ); - let eth_op_id = inserted[0]; if let Some(op_id) = op_id { // If the operation ID was provided, we should also insert a binding entry. let binding = NewETHBinding { op_id, eth_op_id }; @@ -129,16 +148,29 @@ impl<'a> EthereumSchema<'a> { }) } + /// Retrieves the Ethereum operation ID given the tx hash. + fn get_eth_op_id(&self, hash: &H256) -> QueryResult { + let hash_entry = eth_tx_hashes::table + .filter(eth_tx_hashes::tx_hash.eq(hash.as_bytes())) + .first::(self.0.conn())?; + + Ok(hash_entry.eth_op_id) + } + /// Changes the last used gas for a transaction. Since for every sent transaction the gas /// is the only field changed, it makes no sense to duplicate many alike transactions for each /// operation. Instead we enforce using exactly one tx for each operation and store only the last /// used gas value (to increment later if we'll need to send the tx again). pub fn update_eth_tx_gas(&self, hash: &H256, new_gas_value: BigDecimal) -> QueryResult<()> { - update(eth_operations::table.filter(eth_operations::tx_hash.eq(hash.as_bytes()))) - .set(eth_operations::last_used_gas_price.eq(new_gas_value)) - .execute(self.0.conn())?; + self.0.conn().transaction(|| { + let eth_op_id = self.get_eth_op_id(hash)?; - Ok(()) + update(eth_operations::table.filter(eth_operations::id.eq(eth_op_id))) + .set(eth_operations::last_used_gas_price.eq(new_gas_value)) + .execute(self.0.conn())?; + + Ok(()) + }) } /// Updates the stats counter with the new operation reported. @@ -188,8 +220,10 @@ impl<'a> EthereumSchema<'a> { /// is marked as confirmed as well). pub fn confirm_eth_tx(&self, hash: &H256) -> QueryResult<()> { self.0.conn().transaction(|| { + let eth_op_id = self.get_eth_op_id(hash)?; + let updated: Vec = - update(eth_operations::table.filter(eth_operations::tx_hash.eq(hash.as_bytes()))) + update(eth_operations::table.filter(eth_operations::id.eq(eth_op_id))) .set(eth_operations::confirmed.eq(true)) .returning(eth_operations::id) .get_results(self.0.conn())?; diff --git a/core/storage/src/ethereum/records.rs b/core/storage/src/ethereum/records.rs index d710860b51..d03c417c83 100644 --- a/core/storage/src/ethereum/records.rs +++ b/core/storage/src/ethereum/records.rs @@ -10,19 +10,32 @@ pub struct StorageETHOperation { pub id: i64, pub nonce: i64, pub deadline_block: i64, - pub tx_hash: Vec, pub confirmed: bool, pub raw_tx: Vec, pub op_type: String, pub last_used_gas_price: BigDecimal, } +#[derive(Debug, Clone, Queryable, QueryableByName, PartialEq)] +#[table_name = "eth_tx_hashes"] +pub struct ETHTxHash { + pub id: i64, + pub eth_op_id: i64, + pub tx_hash: Vec, +} + +#[derive(Debug, Clone, Insertable, PartialEq)] +#[table_name = "eth_tx_hashes"] +pub struct NewETHTxHash { + pub eth_op_id: i64, + pub tx_hash: Vec, +} + #[derive(Debug, Insertable, PartialEq)] #[table_name = "eth_operations"] pub struct NewETHOperation { pub nonce: i64, pub deadline_block: i64, - pub tx_hash: Vec, pub raw_tx: Vec, pub op_type: String, pub last_used_gas_price: BigDecimal, diff --git a/core/storage/src/schema.rs b/core/storage/src/schema.rs index 7e516b8499..52f2d0b1f5 100644 --- a/core/storage/src/schema.rs +++ b/core/storage/src/schema.rs @@ -94,7 +94,6 @@ table! { id -> Int8, nonce -> Int8, deadline_block -> Int8, - tx_hash -> Bytea, confirmed -> Bool, raw_tx -> Bytea, op_type -> Text, @@ -119,6 +118,14 @@ table! { } } +table! { + eth_tx_hashes (id) { + id -> Int8, + eth_op_id -> Int8, + tx_hash -> Bytea, + } +} + table! { events_state (id) { id -> Int4, @@ -235,6 +242,7 @@ joinable!(balances -> accounts (account_id)); joinable!(balances -> tokens (coin_id)); joinable!(eth_ops_binding -> eth_operations (eth_op_id)); joinable!(eth_ops_binding -> operations (op_id)); +joinable!(eth_tx_hashes -> eth_operations (eth_op_id)); joinable!(executed_transactions -> mempool (tx_hash)); allow_tables_to_appear_in_same_query!( @@ -250,6 +258,7 @@ allow_tables_to_appear_in_same_query!( eth_operations, eth_ops_binding, eth_stats, + eth_tx_hashes, events_state, executed_priority_operations, executed_transactions, diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index cd98d581e4..162f9ddabc 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -64,7 +64,8 @@ impl EthereumTxParams { nonce: self.nonce as i64, deadline_block: self.deadline_block as i64, last_used_gas_price: self.gas_price.clone(), - tx_hash: self.hash.as_bytes().to_vec(), + // TODO: Hash should be used here + // tx_hash: self.hash.as_bytes().to_vec(), confirmed: false, raw_tx: self.raw_tx.clone(), } From 9ac5e855df28a257eb8edf08816b590634945b2c Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 24 Mar 2020 09:21:05 +0300 Subject: [PATCH 081/186] Add ETHOperations structure to models and use it --- core/models/src/ethereum.rs | 66 ++++++++++ core/models/src/lib.rs | 1 + .../2020-03-19-042712_more_eth_data/down.sql | 12 +- .../2020-03-19-042712_more_eth_data/up.sql | 13 +- core/storage/src/ethereum/mod.rs | 116 ++++++++++++------ core/storage/src/ethereum/records.rs | 5 +- core/storage/src/schema.rs | 3 +- core/storage/src/tests/chain/block.rs | 4 +- core/storage/src/tests/ethereum.rs | 8 +- 9 files changed, 178 insertions(+), 50 deletions(-) create mode 100644 core/models/src/ethereum.rs diff --git a/core/models/src/ethereum.rs b/core/models/src/ethereum.rs new file mode 100644 index 0000000000..f63438bd67 --- /dev/null +++ b/core/models/src/ethereum.rs @@ -0,0 +1,66 @@ +//! Common primitives for the Ethereum network interaction. +// Built-in deps +use std::str::FromStr; +// External uses +use web3::types::{H256, U256}; + +/// Type of the transactions sent to the Ethereum network. +#[derive(Debug, Clone, PartialEq)] +pub enum OperationType { + /// Commit action (`commitBlock` method of the smart contract). + Commit, + /// Verify action (`verifyBlock` method of the smart contract). + Verify, + /// Withdraw action (`completeWithdrawals` method of the smart contract). + Withdraw, +} + +impl OperationType { + pub fn to_string(&self) -> String { + match self { + Self::Commit => "commit".into(), + Self::Verify => "verify".into(), + Self::Withdraw => "withdraw".into(), + } + } +} + +impl FromStr for OperationType { + type Err = failure::Error; + + fn from_str(s: &str) -> Result { + let op = match s { + "commit" => Self::Commit, + "verify" => Self::Verify, + "withdraw" => Self::Withdraw, + _ => failure::bail!("Unknown type of operation: {}", s), + }; + + Ok(op) + } +} + +/// Stored Ethereum operation. +#[derive(Debug, Clone, PartialEq)] +pub struct ETHOperation { + // Numeric ID of the operation. + pub id: i64, + /// Type of the operation. + pub op_type: OperationType, + /// Used nonce (fixed for all the sent transactions). + pub nonce: U256, + /// Deadline block of the last sent transaction. + pub last_deadline_block: i64, + /// Gas price used in the last sent transaction. + pub last_used_gas_price: U256, + /// Hashes of all the sent transactions. + pub used_tx_hashes: Vec, + /// Tx payload (not signed). + pub encoded_tx_data: Vec, + /// Flag showing if the operation was completed and + /// confirmed on the Ethereum blockchain. + pub confirmed: bool, + /// Hash of the accepted Ethereum transaction (if operation + /// is confirmed). + pub final_hash: Option, +} diff --git a/core/models/src/lib.rs b/core/models/src/lib.rs index 286a09a976..a90e59d5f6 100644 --- a/core/models/src/lib.rs +++ b/core/models/src/lib.rs @@ -6,6 +6,7 @@ extern crate log; pub mod abi; pub mod circuit; pub mod config_options; +pub mod ethereum; pub mod merkle_tree; pub mod misc; pub mod node; diff --git a/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql b/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql index c286967a03..af3daf8af0 100644 --- a/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql +++ b/core/storage/migrations/2020-03-19-042712_more_eth_data/down.sql @@ -5,7 +5,17 @@ DROP TABLE IF EXISTS eth_ops_binding CASCADE; DROP TABLE IF EXISTS eth_tx_hashes CASCADE; ALTER TABLE eth_operations + -- Restore `op_id` ADD COLUMN op_id bigserial REFERENCES operations (id), + -- Restore `tx_hash` field + ADD COLUMN tx_hash bytea not null, + -- Remove `op_type` DROP COLUMN op_type CASCADE, + -- Rename `last_used_gas_price` to `gas_price` ADD COLUMN gas_price numeric not null, - DROP COLUMN last_used_gas_price CASCADE \ No newline at end of file + DROP COLUMN last_used_gas_price CASCADE, + -- Rename `last_deadline_block` to `deadline_block` + ADD COLUMN deadline_block bigint not null, + DROP COLUMN last_deadline_block CASCADE, + -- Remove `final_hash` + DROP COLUMN final_hash CASCADE diff --git a/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql b/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql index 3dd53a4258..f40d52899d 100644 --- a/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql +++ b/core/storage/migrations/2020-03-19-042712_more_eth_data/up.sql @@ -29,7 +29,7 @@ CREATE TABLE eth_tx_hashes ( id bigserial PRIMARY KEY, eth_op_id bigserial NOT NULL REFERENCES eth_operations (id), - tx_hash bytea not null + tx_hash bytea NOT NULL ); ALTER TABLE eth_operations @@ -38,9 +38,14 @@ ALTER TABLE eth_operations -- Remove the `op_id` field, since `withdraw` operation does not have an associated operation. -- The `eth_ops_binding` table should be used since now. DROP COLUMN op_id CASCADE, - -- Rename `gas_price` to `last_used_gas_price`, since it's the only field changed for resent txs - -- and it makes no sense to store every sent transaction separately. - DROP COLUMN gas_price CASCADE, -- Different tx hashes are now stored in the `eth_tx_hashes` table, so this field isn't needed anymore. DROP COLUMN tx_hash CASCADE, + -- Add the field containing the final hash of the committed tx. + -- This field is `null` until tx has enough confirmations. + ADD COLUMN final_hash bytea default null, + -- Rename `deadline_block` to `last_deadline_block` + DROP COLUMN deadline_block CASCADE, + ADD COLUMN last_deadline_block bigint not null, + -- Rename `gas_price` to `last_used_gas_price` + DROP COLUMN gas_price CASCADE, ADD COLUMN last_used_gas_price numeric not null diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 80d11a1a1b..0688b68dd7 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -1,11 +1,15 @@ // Built-in deps +use std::str::FromStr; // External imports use bigdecimal::BigDecimal; use diesel::dsl::{insert_into, update}; use diesel::prelude::*; -use web3::types::H256; +use web3::types::{H256, U256}; // Workspace imports -use models::Operation; +use models::{ + ethereum::{ETHOperation, OperationType}, + Operation, +}; // Local imports use self::records::{ ETHBinding, ETHNonce, ETHStats, ETHTxHash, NewETHBinding, NewETHOperation, NewETHTxHash, @@ -17,23 +21,6 @@ use crate::StorageProcessor; pub mod records; -#[derive(Debug, Clone, Copy)] -pub enum OperationType { - Commit, - Verify, - Withdraw, -} - -impl OperationType { - pub fn to_string(&self) -> String { - match self { - Self::Commit => "commit".into(), - Self::Verify => "verify".into(), - Self::Withdraw => "withdraw".into(), - } - } -} - /// Ethereum schema is capable of storing the information about the /// interaction with the Ethereum blockchain (mainly the list of sent /// Ethereum transactions). @@ -45,8 +32,7 @@ impl<'a> EthereumSchema<'a> { /// each operation has a list of sent Ethereum transactions. pub fn load_unconfirmed_operations( &self, - // TODO: move Eth transaction state to models and add it here - ) -> QueryResult)>> { + ) -> QueryResult)>> { // Load the operations with the associated Ethereum transactions // from the database. // Here we obtain a sequence of one-to-one mappings (ETH tx) -> (operation ID). @@ -70,19 +56,49 @@ impl<'a> EthereumSchema<'a> { })?; // Create a vector for the expected output. - let mut ops: Vec<(StorageETHOperation, Option)> = - Vec::with_capacity(raw_ops.len()); + let mut ops: Vec<(ETHOperation, Option)> = Vec::with_capacity(raw_ops.len()); - // TODO: load tx hashes. - - // Transform the `StoredOperation` to `Operation`. + // Transform the `StoredOperation` to `Operation` and `StoredETHOperation` to `ETHOperation`. for (eth_op, _, raw_op) in raw_ops { + // Load the stored txs hashes. + let eth_tx_hashes: Vec = eth_tx_hashes::table + .filter(eth_tx_hashes::eth_op_id.eq(eth_op.id)) + .load(self.0.conn())?; + assert!( + eth_tx_hashes.len() >= 1, + "No hashes stored for the Ethereum operation" + ); + + // If there is an operation, convert it to the `Operation` type. let op = if let Some(raw_op) = raw_op { Some(raw_op.into_op(self.0)?) } else { None }; + // Convert the fields into expected format. + let op_type = OperationType::from_str(eth_op.op_type.as_ref()) + .expect("Stored operation type must have a valid value"); + let last_used_gas_price = + U256::from_str(ð_op.last_used_gas_price.to_string()).unwrap(); + let used_tx_hashes = eth_tx_hashes + .iter() + .map(|entry| H256::from_slice(&entry.tx_hash)) + .collect(); + let final_hash = eth_op.final_hash.map(|hash| H256::from_slice(&hash)); + + let eth_op = ETHOperation { + id: eth_op.id, + op_type, + nonce: eth_op.nonce.into(), + last_deadline_block: eth_op.last_deadline_block, + last_used_gas_price, + used_tx_hashes, + encoded_tx_data: eth_op.raw_tx, + confirmed: eth_op.confirmed, + final_hash, + }; + ops.push((eth_op, op)); } @@ -103,12 +119,13 @@ impl<'a> EthereumSchema<'a> { let operation = NewETHOperation { op_type: op_type.to_string(), nonce: i64::from(nonce), - deadline_block: deadline_block as i64, + last_deadline_block: deadline_block as i64, last_used_gas_price: gas_price, raw_tx, }; self.0.conn().transaction(|| { + // Insert the operation itself. let inserted_tx = insert_into(eth_operations::table) .values(&operation) .returning(eth_operations::id) @@ -119,8 +136,10 @@ impl<'a> EthereumSchema<'a> { "Wrong amount of updated rows (eth_operations)" ); + // Obtain the operation ID for the follow-up queried. let eth_op_id = inserted_tx[0]; + // Add a hash entry. let hash_entry = NewETHTxHash { eth_op_id, tx_hash: hash.as_bytes().to_vec(), @@ -133,8 +152,8 @@ impl<'a> EthereumSchema<'a> { "Wrong amount of updated rows (eth_tx_hashes)" ); + // If the operation ID was provided, we should also insert a binding entry. if let Some(op_id) = op_id { - // If the operation ID was provided, we should also insert a binding entry. let binding = NewETHBinding { op_id, eth_op_id }; insert_into(eth_ops_binding::table) @@ -157,16 +176,36 @@ impl<'a> EthereumSchema<'a> { Ok(hash_entry.eth_op_id) } - /// Changes the last used gas for a transaction. Since for every sent transaction the gas - /// is the only field changed, it makes no sense to duplicate many alike transactions for each - /// operation. Instead we enforce using exactly one tx for each operation and store only the last - /// used gas value (to increment later if we'll need to send the tx again). - pub fn update_eth_tx_gas(&self, hash: &H256, new_gas_value: BigDecimal) -> QueryResult<()> { + /// Updates the Ethereum operation by adding a new tx data. + /// The new deadline block / gas value are placed instead of old values to the main entry, + /// and for hash a new `eth_tx_hashes` entry is added. + pub fn update_eth_tx( + &self, + eth_op_id: i64, + hash: &H256, + new_deadline_block: i64, + new_gas_value: BigDecimal, + ) -> QueryResult<()> { self.0.conn().transaction(|| { - let eth_op_id = self.get_eth_op_id(hash)?; + // Insert the new hash entry. + let hash_entry = NewETHTxHash { + eth_op_id, + tx_hash: hash.as_bytes().to_vec(), + }; + let inserted_hashes_rows = insert_into(eth_tx_hashes::table) + .values(&hash_entry) + .execute(self.0.conn())?; + assert_eq!( + inserted_hashes_rows, 1, + "Wrong amount of updated rows (eth_tx_hashes)" + ); + // Update the stored tx. update(eth_operations::table.filter(eth_operations::id.eq(eth_op_id))) - .set(eth_operations::last_used_gas_price.eq(new_gas_value)) + .set(( + eth_operations::last_used_gas_price.eq(new_gas_value), + eth_operations::last_deadline_block.eq(new_deadline_block), + )) .execute(self.0.conn())?; Ok(()) @@ -222,9 +261,13 @@ impl<'a> EthereumSchema<'a> { self.0.conn().transaction(|| { let eth_op_id = self.get_eth_op_id(hash)?; + // Set the `confirmed` and `final_hash` field of the entry. let updated: Vec = update(eth_operations::table.filter(eth_operations::id.eq(eth_op_id))) - .set(eth_operations::confirmed.eq(true)) + .set(( + eth_operations::confirmed.eq(true), + eth_operations::final_hash.eq(Some(hash.as_bytes().to_vec())), + )) .returning(eth_operations::id) .get_results(self.0.conn())?; @@ -241,6 +284,7 @@ impl<'a> EthereumSchema<'a> { .first::(self.0.conn()) .optional()?; + // If there is a ZKSync operation, mark it as confirmed as well. if let Some(binding) = binding { let op = operations::table .filter(operations::id.eq(binding.op_id)) diff --git a/core/storage/src/ethereum/records.rs b/core/storage/src/ethereum/records.rs index d03c417c83..85a044abf8 100644 --- a/core/storage/src/ethereum/records.rs +++ b/core/storage/src/ethereum/records.rs @@ -9,10 +9,11 @@ use crate::schema::*; pub struct StorageETHOperation { pub id: i64, pub nonce: i64, - pub deadline_block: i64, pub confirmed: bool, pub raw_tx: Vec, pub op_type: String, + pub final_hash: Option>, + pub last_deadline_block: i64, pub last_used_gas_price: BigDecimal, } @@ -35,9 +36,9 @@ pub struct NewETHTxHash { #[table_name = "eth_operations"] pub struct NewETHOperation { pub nonce: i64, - pub deadline_block: i64, pub raw_tx: Vec, pub op_type: String, + pub last_deadline_block: i64, pub last_used_gas_price: BigDecimal, } diff --git a/core/storage/src/schema.rs b/core/storage/src/schema.rs index 52f2d0b1f5..39a42551e3 100644 --- a/core/storage/src/schema.rs +++ b/core/storage/src/schema.rs @@ -93,10 +93,11 @@ table! { eth_operations (id) { id -> Int8, nonce -> Int8, - deadline_block -> Int8, confirmed -> Bool, raw_tx -> Bytea, op_type -> Text, + final_hash -> Nullable, + last_deadline_block -> Int8, last_used_gas_price -> Numeric, } } diff --git a/core/storage/src/tests/chain/block.rs b/core/storage/src/tests/chain/block.rs index daf6cd54bd..02052091bc 100644 --- a/core/storage/src/tests/chain/block.rs +++ b/core/storage/src/tests/chain/block.rs @@ -3,7 +3,7 @@ use web3::types::H256; // Workspace imports use crypto_exports::rand::XorShiftRng; use models::node::{apply_updates, block::Block, AccountMap, AccountUpdate, BlockNumber, Fr}; -use models::{Action, Operation}; +use models::{ethereum::OperationType, Action, Operation}; // Local imports use super::utils::{acc_create_random_updates, get_operation}; use crate::tests::{create_rng, db_test}; @@ -12,7 +12,7 @@ use crate::{ block::{records::BlockDetails, BlockSchema}, state::StateSchema, }, - ethereum::{EthereumSchema, OperationType}, + ethereum::EthereumSchema, prover::ProverSchema, StorageProcessor, }; diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index 162f9ddabc..35938bdd34 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -3,6 +3,7 @@ use bigdecimal::BigDecimal; use web3::types::H256; // Workspace imports use models::{ + ethereum::OperationType, node::{block::Block, BlockNumber, Fr}, Action, Operation, }; @@ -10,7 +11,7 @@ use models::{ use crate::tests::db_test; use crate::{ chain::block::BlockSchema, - ethereum::{records::StorageETHOperation, EthereumSchema, OperationType}, + ethereum::{records::StorageETHOperation, EthereumSchema}, StorageProcessor, }; @@ -62,10 +63,9 @@ impl EthereumTxParams { id: db_id, op_type: self.op_type.clone(), nonce: self.nonce as i64, - deadline_block: self.deadline_block as i64, + last_deadline_block: self.deadline_block as i64, last_used_gas_price: self.gas_price.clone(), - // TODO: Hash should be used here - // tx_hash: self.hash.as_bytes().to_vec(), + final_hash: Some(self.hash.as_bytes().to_vec()), confirmed: false, raw_tx: self.raw_tx.clone(), } From 63db45013aecb15ebcf9754c388f4fcd63bce00e Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 24 Mar 2020 09:26:33 +0300 Subject: [PATCH 082/186] Fix ethereum schema tests --- core/models/src/ethereum.rs | 2 +- core/storage/src/ethereum/mod.rs | 2 +- core/storage/src/tests/ethereum.rs | 34 +++++++++++++++++------------- 3 files changed, 21 insertions(+), 17 deletions(-) diff --git a/core/models/src/ethereum.rs b/core/models/src/ethereum.rs index f63438bd67..c2ada45ef7 100644 --- a/core/models/src/ethereum.rs +++ b/core/models/src/ethereum.rs @@ -50,7 +50,7 @@ pub struct ETHOperation { /// Used nonce (fixed for all the sent transactions). pub nonce: U256, /// Deadline block of the last sent transaction. - pub last_deadline_block: i64, + pub last_deadline_block: u64, /// Gas price used in the last sent transaction. pub last_used_gas_price: U256, /// Hashes of all the sent transactions. diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 0688b68dd7..154e696a1d 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -91,7 +91,7 @@ impl<'a> EthereumSchema<'a> { id: eth_op.id, op_type, nonce: eth_op.nonce.into(), - last_deadline_block: eth_op.last_deadline_block, + last_deadline_block: eth_op.last_deadline_block as u64, last_used_gas_price, used_tx_hashes, encoded_tx_data: eth_op.raw_tx, diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index 35938bdd34..d018e8eb02 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -1,19 +1,17 @@ +// Built-in deps +use std::str::FromStr; // External imports use bigdecimal::BigDecimal; -use web3::types::H256; +use web3::types::{H256, U256}; // Workspace imports use models::{ - ethereum::OperationType, + ethereum::{ETHOperation, OperationType}, node::{block::Block, BlockNumber, Fr}, Action, Operation, }; // Local imports use crate::tests::db_test; -use crate::{ - chain::block::BlockSchema, - ethereum::{records::StorageETHOperation, EthereumSchema}, - StorageProcessor, -}; +use crate::{chain::block::BlockSchema, ethereum::EthereumSchema, StorageProcessor}; /// Creates a sample operation to be stored in `operations` table. /// This function is required since `eth_operations` table is linked to @@ -58,16 +56,22 @@ impl EthereumTxParams { } } - pub fn to_eth_op(&self, db_id: i64) -> StorageETHOperation { - StorageETHOperation { + pub fn to_eth_op(&self, db_id: i64) -> ETHOperation { + let op_type = OperationType::from_str(self.op_type.as_ref()) + .expect("Stored operation type must have a valid value"); + let last_used_gas_price = U256::from_str(&self.gas_price.to_string()).unwrap(); + let used_tx_hashes = vec![self.hash.clone()]; + + ETHOperation { id: db_id, - op_type: self.op_type.clone(), - nonce: self.nonce as i64, - last_deadline_block: self.deadline_block as i64, - last_used_gas_price: self.gas_price.clone(), - final_hash: Some(self.hash.as_bytes().to_vec()), + op_type, + nonce: self.nonce.into(), + last_deadline_block: self.deadline_block, + last_used_gas_price, + used_tx_hashes, + encoded_tx_data: self.raw_tx.clone(), confirmed: false, - raw_tx: self.raw_tx.clone(), + final_hash: None, } } } From fb55cec21dc4fa114c9f5c39fae417aa07c11c56 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 24 Mar 2020 11:45:11 +0300 Subject: [PATCH 083/186] Move operation option to the ETHOperation model --- core/models/src/ethereum.rs | 24 +++++++++++++++++++++- core/storage/src/ethereum/mod.rs | 19 ++++++++---------- core/storage/src/tests/ethereum.rs | 32 +++++++++++++----------------- 3 files changed, 45 insertions(+), 30 deletions(-) diff --git a/core/models/src/ethereum.rs b/core/models/src/ethereum.rs index c2ada45ef7..0d46e79cbf 100644 --- a/core/models/src/ethereum.rs +++ b/core/models/src/ethereum.rs @@ -2,8 +2,13 @@ // Built-in deps use std::str::FromStr; // External uses +/// Local uses +use crate::Operation; use web3::types::{H256, U256}; +/// Numerical identifier of the Ethereum operation. +pub type EthOpId = i64; + /// Type of the transactions sent to the Ethereum network. #[derive(Debug, Clone, PartialEq)] pub enum OperationType { @@ -41,12 +46,14 @@ impl FromStr for OperationType { } /// Stored Ethereum operation. -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct ETHOperation { // Numeric ID of the operation. pub id: i64, /// Type of the operation. pub op_type: OperationType, + /// Optional ZKSync operation associated with Ethereum operation. + pub op: Option, /// Used nonce (fixed for all the sent transactions). pub nonce: U256, /// Deadline block of the last sent transaction. @@ -64,3 +71,18 @@ pub struct ETHOperation { /// is confirmed). pub final_hash: Option, } + +impl PartialEq for ETHOperation { + fn eq(&self, other: &Self) -> bool { + // We assume that there will be no two different `ETHOperation`s with + // the same identifiers. + // However, the volatile fields (e.g. `used_tx_hashes` and `confirmed`) may vary + // for the same operation in different states, so we compare them as well. + (self.id == other.id) + && (self.last_deadline_block == other.last_deadline_block) + && (self.last_used_gas_price == other.last_used_gas_price) + && (self.used_tx_hashes == other.used_tx_hashes) + && (self.confirmed == other.confirmed) + && (self.final_hash == other.final_hash) + } +} diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 154e696a1d..463702453a 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -6,10 +6,7 @@ use diesel::dsl::{insert_into, update}; use diesel::prelude::*; use web3::types::{H256, U256}; // Workspace imports -use models::{ - ethereum::{ETHOperation, OperationType}, - Operation, -}; +use models::ethereum::{ETHOperation, OperationType}; // Local imports use self::records::{ ETHBinding, ETHNonce, ETHStats, ETHTxHash, NewETHBinding, NewETHOperation, NewETHTxHash, @@ -30,9 +27,7 @@ pub struct EthereumSchema<'a>(pub &'a StorageProcessor); impl<'a> EthereumSchema<'a> { /// Loads the list of operations that were not confirmed on Ethereum, /// each operation has a list of sent Ethereum transactions. - pub fn load_unconfirmed_operations( - &self, - ) -> QueryResult)>> { + pub fn load_unconfirmed_operations(&self) -> QueryResult> { // Load the operations with the associated Ethereum transactions // from the database. // Here we obtain a sequence of one-to-one mappings (ETH tx) -> (operation ID). @@ -56,7 +51,7 @@ impl<'a> EthereumSchema<'a> { })?; // Create a vector for the expected output. - let mut ops: Vec<(ETHOperation, Option)> = Vec::with_capacity(raw_ops.len()); + let mut ops: Vec = Vec::with_capacity(raw_ops.len()); // Transform the `StoredOperation` to `Operation` and `StoredETHOperation` to `ETHOperation`. for (eth_op, _, raw_op) in raw_ops { @@ -90,6 +85,7 @@ impl<'a> EthereumSchema<'a> { let eth_op = ETHOperation { id: eth_op.id, op_type, + op, nonce: eth_op.nonce.into(), last_deadline_block: eth_op.last_deadline_block as u64, last_used_gas_price, @@ -99,7 +95,7 @@ impl<'a> EthereumSchema<'a> { final_hash, }; - ops.push((eth_op, op)); + ops.push(eth_op); } Ok(ops) @@ -115,7 +111,7 @@ impl<'a> EthereumSchema<'a> { nonce: u32, gas_price: BigDecimal, raw_tx: Vec, - ) -> QueryResult<()> { + ) -> QueryResult { let operation = NewETHOperation { op_type: op_type.to_string(), nonce: i64::from(nonce), @@ -161,9 +157,10 @@ impl<'a> EthereumSchema<'a> { .execute(self.0.conn())?; } + // Update the stored stats. self.report_created_operation(op_type)?; - Ok(()) + Ok(eth_op_id) }) } diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index d018e8eb02..49d6a4fbaf 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -35,7 +35,7 @@ pub fn get_operation(block_number: BlockNumber) -> Operation { #[derive(Debug)] pub struct EthereumTxParams { op_type: String, - op_id: i64, + op: Operation, hash: H256, deadline_block: u64, nonce: u32, @@ -44,11 +44,12 @@ pub struct EthereumTxParams { } impl EthereumTxParams { - pub fn new(op_type: String, op_id: i64, nonce: u32) -> Self { + pub fn new(op_type: String, op: Operation, nonce: u32) -> Self { + let op_id = op.id.unwrap() as u64; Self { op_type, - op_id, - hash: H256::from_low_u64_ne(op_id as u64), + op, + hash: H256::from_low_u64_ne(op_id), deadline_block: 100, nonce, gas_price: 1000.into(), @@ -65,6 +66,7 @@ impl EthereumTxParams { ETHOperation { id: db_id, op_type, + op: Some(self.op.clone()), nonce: self.nonce.into(), last_deadline_block: self.deadline_block, last_used_gas_price, @@ -112,10 +114,10 @@ fn ethereum_storage() { let operation = BlockSchema(&conn).execute_operation(get_operation(block_number))?; // Store the Ethereum transaction. - let params = EthereumTxParams::new("commit".into(), operation.id.unwrap(), 1); + let params = EthereumTxParams::new("commit".into(), operation.clone(), 1); EthereumSchema(&conn).save_new_eth_tx( OperationType::Commit, - Some(params.op_id), + Some(params.op.id.unwrap()), params.hash, params.deadline_block, params.nonce, @@ -125,11 +127,8 @@ fn ethereum_storage() { // Check that it can be loaded. let unconfirmed_operations = EthereumSchema(&conn).load_unconfirmed_operations()?; - let eth_op = unconfirmed_operations[0].0.clone(); - let op = unconfirmed_operations[0] - .1 - .clone() - .expect("No Operation entry"); + let eth_op = unconfirmed_operations[0].clone(); + let op = eth_op.op.clone().expect("No Operation entry"); assert_eq!(op.id, operation.id); // Load the database ID, since we can't predict it for sure. assert_eq!(eth_op, params.to_eth_op(eth_op.id)); @@ -139,10 +138,10 @@ fn ethereum_storage() { let operation_2 = BlockSchema(&conn).execute_operation(get_operation(block_number))?; // Create one more Ethereum transaction. - let params_2 = EthereumTxParams::new("commit".into(), operation_2.id.unwrap(), 2); + let params_2 = EthereumTxParams::new("commit".into(), operation_2.clone(), 2); EthereumSchema(&conn).save_new_eth_tx( OperationType::Commit, - Some(params_2.op_id), + Some(params_2.op.id.unwrap()), params_2.hash, params_2.deadline_block, params_2.nonce, @@ -153,11 +152,8 @@ fn ethereum_storage() { // Check that we now can load two operations. let unconfirmed_operations = EthereumSchema(&conn).load_unconfirmed_operations()?; assert_eq!(unconfirmed_operations.len(), 2); - let eth_op = unconfirmed_operations[1].0.clone(); - let op = unconfirmed_operations[1] - .1 - .clone() - .expect("No Operation entry"); + let eth_op = unconfirmed_operations[1].clone(); + let op = eth_op.op.clone().expect("No Operation entry"); assert_eq!(op.id, operation_2.id); assert_eq!(eth_op, params_2.to_eth_op(eth_op.id)); From 4f95457426b042bd2ab4dbe5e5c64dc18832f57c Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 24 Mar 2020 12:02:50 +0300 Subject: [PATCH 084/186] Adapt eth_sender to match new storage structure --- core/models/src/ethereum.rs | 21 +- core/server/src/eth_sender/database.rs | 94 +- core/server/src/eth_sender/mod.rs | 238 +++-- core/server/src/eth_sender/tests/mock.rs | 4 +- core/server/src/eth_sender/tests/mod.rs | 988 ++++++++++----------- core/server/src/eth_sender/transactions.rs | 62 +- core/server/src/eth_sender/tx_queue/mod.rs | 39 +- 7 files changed, 708 insertions(+), 738 deletions(-) diff --git a/core/models/src/ethereum.rs b/core/models/src/ethereum.rs index 0d46e79cbf..3e36b09d58 100644 --- a/core/models/src/ethereum.rs +++ b/core/models/src/ethereum.rs @@ -3,7 +3,7 @@ use std::str::FromStr; // External uses /// Local uses -use crate::Operation; +use crate::{Action, Operation}; use web3::types::{H256, U256}; /// Numerical identifier of the Ethereum operation. @@ -72,6 +72,25 @@ pub struct ETHOperation { pub final_hash: Option, } +impl ETHOperation { + /// Checks whether the transaction is considered "stuck". + /// "Stuck" transactions are ones that were not included into any block + /// within a desirable amount of time, and thus require re-sending with + /// increased gas amount. + pub fn is_stuck(&self, current_block: u64) -> bool { + current_block >= self.last_deadline_block + } + + /// Checks whether this object relates to the `Verify` ZK Sync operation. + pub fn is_verify(&self) -> bool { + if let Some(op) = &self.op { + matches!(op.action, Action::Verify { .. }) + } else { + false + } + } +} + impl PartialEq for ETHOperation { fn eq(&self, other: &Self) -> bool { // We assume that there will be no two different `ETHOperation`s with diff --git a/core/server/src/eth_sender/database.rs b/core/server/src/eth_sender/database.rs index e82e558895..c0ad74213e 100644 --- a/core/server/src/eth_sender/database.rs +++ b/core/server/src/eth_sender/database.rs @@ -4,23 +4,32 @@ //! database to run, which is required for tests. // Built-in deps -use std::collections::VecDeque; use std::str::FromStr; // External uses use bigdecimal::BigDecimal; -use web3::types::H256; +use web3::types::{H256, U256}; // Workspace uses +use models::ethereum::{ETHOperation, EthOpId}; use storage::ConnectionPool; // Local uses -use super::transactions::{ETHStats, OperationETHState, OperationType, TransactionETHState}; +use super::transactions::ETHStats; /// Abstract database access trait, optimized for the needs of `ETHSender`. pub(super) trait DatabaseAccess { /// Loads the unconfirmed operations from the database. - fn restore_state(&self) -> Result, failure::Error>; + fn restore_state(&self) -> Result, failure::Error>; - /// Saves an unconfirmed operation to the database. - fn save_unconfirmed_operation(&self, tx: &TransactionETHState) -> Result<(), failure::Error>; + /// Saves a new unconfirmed operation to the database. + fn save_new_eth_tx(&self, op: ÐOperation) -> Result; + + /// Adds a new tx info to the previously started Ethereum operation. + fn update_eth_tx( + &self, + eth_op_id: EthOpId, + hash: &H256, + new_deadline_block: i64, + new_gas_value: U256, + ) -> Result<(), failure::Error>; /// Marks an operation as completed in the database. fn confirm_operation(&self, hash: &H256) -> Result<(), failure::Error>; @@ -30,17 +39,6 @@ pub(super) trait DatabaseAccess { /// Loads the stored Ethereum operations stats. fn load_stats(&self) -> Result; - - /// Updates the stats counter with the new operation reported. - /// This method should be called once **per operation**. It means that if transaction - /// for some operation was stuck, and another transaction was created for it, this method - /// **should not** be invoked. - /// - /// This method expects the database to be initially prepared with inserting the actual - /// nonce value. Currently the script `db-insert-eth-data.sh` is responsible for that - /// and it's invoked within `db-reset` subcommand. - fn report_created_operation(&self, operation_type: OperationType) - -> Result<(), failure::Error>; } /// The actual database wrapper. @@ -57,33 +55,49 @@ impl Database { } impl DatabaseAccess for Database { - fn restore_state(&self) -> Result, failure::Error> { + fn restore_state(&self) -> Result, failure::Error> { let storage = self .db_pool .access_storage() .expect("Failed to access storage"); - let unconfirmed_ops = storage - .ethereum_schema() - .load_unconfirmed_operations()? - .into_iter() - .map(|(operation, txs)| OperationETHState { - operation, - txs: txs.into_iter().map(|tx| tx.into()).collect(), - }) - .collect(); + let unconfirmed_ops = storage.ethereum_schema().load_unconfirmed_operations()?; Ok(unconfirmed_ops) } - fn save_unconfirmed_operation(&self, tx: &TransactionETHState) -> Result<(), failure::Error> { + fn save_new_eth_tx(&self, op: ÐOperation) -> Result { let storage = self.db_pool.access_storage()?; - Ok(storage.ethereum_schema().save_operation_eth_tx( - tx.op_id, - tx.signed_tx.hash, - tx.deadline_block, - tx.signed_tx.nonce.as_u32(), - BigDecimal::from_str(&tx.signed_tx.gas_price.to_string()).unwrap(), - tx.signed_tx.raw_tx.clone(), + + assert_eq!( + op.used_tx_hashes.len(), + 1, + "For the new operation there should be exactly one tx hash" + ); + let tx_hash = op.used_tx_hashes[0]; + Ok(storage.ethereum_schema().save_new_eth_tx( + op.op_type.clone(), + op.op.clone().map(|op| op.id.unwrap()), + tx_hash, + op.last_deadline_block, + op.nonce.as_u32(), + BigDecimal::from_str(&op.last_used_gas_price.to_string()).unwrap(), + op.encoded_tx_data.clone(), + )?) + } + + fn update_eth_tx( + &self, + eth_op_id: EthOpId, + hash: &H256, + new_deadline_block: i64, + new_gas_value: U256, + ) -> Result<(), failure::Error> { + let storage = self.db_pool.access_storage()?; + Ok(storage.ethereum_schema().update_eth_tx( + eth_op_id, + hash, + new_deadline_block, + BigDecimal::from_str(&new_gas_value.to_string()).unwrap(), )?) } @@ -102,14 +116,4 @@ impl DatabaseAccess for Database { let stats = storage.ethereum_schema().load_stats()?; Ok(stats.into()) } - - fn report_created_operation( - &self, - operation_type: OperationType, - ) -> Result<(), failure::Error> { - let storage = self.db_pool.access_storage()?; - Ok(storage - .ethereum_schema() - .report_created_operation(operation_type)?) - } } diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 1fa8588578..69631a45b0 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -11,17 +11,23 @@ use futures::channel::mpsc; use tokio::runtime::Runtime; use tokio::time; use web3::contract::Options; -use web3::types::{TransactionReceipt, U256}; +use web3::types::{TransactionReceipt, H256, U256}; // Workspace uses -use models::config_options::{ConfigurationOptions, ThreadPanicNotify}; -use models::node::config; -use models::{Action, ActionType, Operation}; +use eth_client::SignedCallResult; +use models::{ + config_options::{ConfigurationOptions, ThreadPanicNotify}, + ethereum::{ETHOperation, OperationType}, + node::config, + Action, Operation, +}; use storage::ConnectionPool; // Local uses -use self::database::{Database, DatabaseAccess}; -use self::ethereum_interface::{EthereumHttpClient, EthereumInterface}; -use self::transactions::*; -use self::tx_queue::{TxData, TxQueue, TxQueueBuilder}; +use self::{ + database::{Database, DatabaseAccess}, + ethereum_interface::{EthereumHttpClient, EthereumInterface}, + transactions::*, + tx_queue::{TxData, TxQueue, TxQueueBuilder}, +}; mod database; mod ethereum_interface; @@ -71,7 +77,7 @@ const WAIT_CONFIRMATIONS: u64 = 1; /// which can be changed if needed. struct ETHSender { /// Ongoing operations queue. - ongoing_ops: VecDeque, + ongoing_ops: VecDeque, /// Connection to the database. db: DB, /// Ethereum intermediator. @@ -93,9 +99,11 @@ impl ETHSender { ) -> Self { const MAX_TXS_IN_FLIGHT: usize = 1; // TODO: Should be configurable. - let ongoing_ops = db + let ongoing_ops: VecDeque<_> = db .restore_state() - .expect("Failed loading unconfirmed operations from the storage"); + .expect("Failed loading unconfirmed operations from the storage") + .into_iter() + .collect(); let stats = db .load_stats() @@ -156,50 +164,24 @@ impl ETHSender { let current_block = self.ethereum.block_number()?; let deadline_block = self.get_deadline_block(current_block); - if let Some(operation) = tx.operation { - let mut eth_op = OperationETHState { - operation, - txs: Vec::new(), - }; - - let new_tx = - self.sign_raw_tx(eth_op.operation.id.unwrap(), tx.raw, deadline_block, None)?; - - self.db.save_unconfirmed_operation(&new_tx)?; - self.db.report_created_operation( - self.operation_type_for_action(ð_op.operation.action), - )?; - - eth_op.txs.push(new_tx.clone()); - info!( - "Sending tx for op, op_id: {} tx_hash: {:#x}, nonce: {}", - new_tx.op_id, new_tx.signed_tx.hash, new_tx.signed_tx.nonce, - ); - self.ethereum.send_tx(&new_tx.signed_tx)?; - - self.ongoing_ops.push_back(eth_op); - } else { - let mut options = Options::default(); - let nonce = self.db.next_nonce()?; - options.nonce = Some(nonce.into()); + let (mut new_tx, signed_tx) = + self.sign_new_tx(tx.op_type, tx.operation, tx.raw, deadline_block)?; - let tx = self - .ethereum - .sign_prepared_tx(tx.raw, options) - .map_err(|e| failure::format_err!("Failed to sign a prepared tx: {}", e))?; + let op_id = self.db.save_new_eth_tx(&new_tx)?; + new_tx.id = op_id; - // TODO: Operations w/o `Operation` field (e.g. withdrawals) should be stored to the DB as well. - // self.db - // .report_created_operation(self.operation_type_for_action(&op.operation.action))?; + info!( + "Sending ETH tx: ETH Operation {} ({:?}), ZKSync Operation {:?}", + new_tx.id, new_tx.op_type, new_tx.op, + ); + self.ethereum.send_tx(&signed_tx)?; - info!("Sending tx with hash: {:#?}", tx.hash); - self.ethereum.send_tx(&tx)?; - } + self.ongoing_ops.push_back(new_tx); Ok(()) } - fn try_commit(&mut self, mut operation: OperationETHState) { + fn try_commit(&mut self, mut operation: ETHOperation) { // Check the transactions associated with the operation, and send a new one if required. // We perform a commitment step here. In case of error, we suppose that this is some @@ -217,19 +199,17 @@ impl ETHSender { match result { OperationCommitment::Committed => { info!( - "Operation {}, {} block: {}, confirmed on ETH", - operation.operation.id.unwrap(), - operation.operation.action.to_string(), - operation.operation.block.block_number, + "Confirmed: ETH Operation {} ({:?}), ZKSync Operation {:?}", + operation.id, operation.op_type, operation.op, ); // Free a slot for the next tx in the queue. self.tx_queue.report_commitment(); - if operation.operation.action.get_type() == ActionType::VERIFY { + if operation.is_verify() { // We notify about verify only when commit is confirmed on the Ethereum. self.op_notify - .try_send(operation.operation) + .try_send(operation.op.expect("Should be verify operation")) .map_err(|e| warn!("Failed notify about verify op confirmation: {}", e)) .unwrap_or_default(); @@ -246,47 +226,42 @@ impl ETHSender { fn perform_commitment_step( &mut self, - op: &mut OperationETHState, + op: &mut ETHOperation, ) -> Result { assert!( - !op.txs.is_empty(), + !op.used_tx_hashes.is_empty(), "OperationETHState should have at least one transaction" ); let current_block = self.ethereum.block_number()?; // Check statuses of existing transactions. - let mut last_stuck_tx: Option<&TransactionETHState> = None; - // Go through every transaction in a loop. We will exit this method early // if there will be discovered a pending or successfully committed transaction. - for tx in &op.txs { - match self.check_transaction_state(tx, current_block)? { + for tx_hash in &op.used_tx_hashes { + match self.check_transaction_state(op, tx_hash, current_block)? { TxCheckOutcome::Pending => { // Transaction is pending, nothing to do yet. return Ok(OperationCommitment::Pending); } TxCheckOutcome::Committed => { info!( - "Operation {}, {} block: {}, committed, tx: {:#x}", - op.operation.id.unwrap(), - op.operation.action.to_string(), - op.operation.block.block_number, - tx.signed_tx.hash, + "Eth operation {}, ZKSync operation {:?}, committed, tx: {:#x}", + op.id, op.op, tx_hash, ); - self.db.confirm_operation(&tx.signed_tx.hash)?; + self.db.confirm_operation(tx_hash)?; return Ok(OperationCommitment::Committed); } TxCheckOutcome::Stuck => { - // Update the last stuck transaction. If we won't exit the loop early, - // it will be used to create a new transaction with higher gas limit. - last_stuck_tx = Some(tx); + // We do nothing for a stuck transaction. If this will be + // the last entry of the list, a new tx will be sent. } TxCheckOutcome::Failed(receipt) => { warn!( - "ETH transaction failed: tx: {:#x}, operation_id: {}; tx_receipt: {:#?} ", - tx.signed_tx.hash, - op.operation.id.unwrap(), + "ETH transaction failed: tx: {:#x}, op_type: {:?}, op: {:?}; tx_receipt: {:#?} ", + tx_hash, + op.op_type, + op.op, receipt, ); // Process the failure according to the chosen policy. @@ -297,41 +272,23 @@ impl ETHSender { // Reaching this point will mean that the latest transaction got stuck. // We should create another tx based on it, and send it. - assert!( - last_stuck_tx.is_some(), - "Loop didn't exit without a stuck tx" - ); let deadline_block = self.get_deadline_block(current_block); // Raw tx contents are the same for every transaction, so we just - // clone them from the first tx. - let raw_tx = op.txs[0].signed_tx.raw_tx.clone(); - let new_tx = self.sign_raw_tx( - op.operation.id.unwrap(), - raw_tx, - deadline_block, - last_stuck_tx, - )?; + // create a new one from the old one with updated parameters. + let new_tx = self.create_supplement_tx(deadline_block, op)?; // New transaction should be persisted in the DB *before* sending it. - self.db.save_unconfirmed_operation(&new_tx)?; - // Since we're processing the stuck operation, no need to invoke `report_created_operation`. + self.db + .update_eth_tx(op.id, &new_tx.hash, deadline_block as i64, new_tx.gas_price)?; - op.txs.push(new_tx.clone()); info!( - "Stuck tx processing: sending tx for op, op_id: {} tx_hash: {:#x}, nonce: {}", - new_tx.op_id, new_tx.signed_tx.hash, new_tx.signed_tx.nonce, + "Stuck tx processing: sending tx for op, eth_op_id: {} tx_hash: {:#x}, nonce: {}", + op.id, new_tx.hash, new_tx.nonce, ); - self.ethereum.send_tx(&new_tx.signed_tx)?; + self.ethereum.send_tx(&new_tx)?; Ok(OperationCommitment::Pending) } - fn operation_type_for_action(&self, action: &Action) -> OperationType { - match action { - Action::Commit => OperationType::Commit, - Action::Verify { .. } => OperationType::Verify, - } - } - /// Handles a transaction execution failure by reporting the issue to the log /// and terminating the node. fn failure_handler(&self, receipt: &TransactionReceipt) -> ! { @@ -351,10 +308,11 @@ impl ETHSender { /// and reduces it to the simpler `TxCheckOutcome` report. fn check_transaction_state( &self, - tx: &TransactionETHState, + op: ÐOperation, + tx_hash: &H256, current_block: u64, ) -> Result { - let status = self.ethereum.get_tx_status(&tx.signed_tx.hash)?; + let status = self.ethereum.get_tx_status(tx_hash)?; let outcome = match status { // Successful execution. @@ -378,7 +336,7 @@ impl ETHSender { TxCheckOutcome::Failed(Box::new(status.receipt.unwrap())) } // Stuck transaction. - None if tx.is_stuck(current_block) => TxCheckOutcome::Stuck, + None if op.is_stuck(current_block) => TxCheckOutcome::Stuck, // No status and not stuck yet, thus considered pending. None => TxCheckOutcome::Pending, }; @@ -386,30 +344,54 @@ impl ETHSender { Ok(outcome) } - /// Creates a new transaction. If stuck tx is provided, the new transaction will be - /// and updated version of it; otherwise a brand new transaction will be created. - fn sign_raw_tx( + /// Creates a new Ethereum operation. + fn sign_new_tx( &self, - op_id: i64, + op_type: OperationType, + op: Option, raw_tx: Vec, deadline_block: u64, - stuck_tx: Option<&TransactionETHState>, - ) -> Result { - let tx_options = if let Some(stuck_tx) = stuck_tx { - self.tx_options_from_stuck_tx(stuck_tx)? - } else { + ) -> Result<(ETHOperation, SignedCallResult), failure::Error> { + let tx_options = { let mut options = Options::default(); let nonce = self.db.next_nonce()?; options.nonce = Some(nonce.into()); options }; + let signed_tx = self.ethereum.sign_prepared_tx(raw_tx.clone(), tx_options)?; + let state = ETHOperation { + id: 0, // Will be initialized later. + op_type, + op, + nonce: signed_tx.nonce, + last_deadline_block: deadline_block, + last_used_gas_price: signed_tx.gas_price, + used_tx_hashes: vec![signed_tx.hash], + encoded_tx_data: raw_tx, + confirmed: false, + final_hash: None, + }; + Ok((state, signed_tx)) + } + + /// Creates a new transaction for the existing Ethereum operation. + /// This method is used to create supplement transactions instead of the stuck one. + fn create_supplement_tx( + &self, + deadline_block: u64, + stuck_tx: &mut ETHOperation, + ) -> Result { + let tx_options = self.tx_options_from_stuck_tx(stuck_tx)?; + + let raw_tx = stuck_tx.encoded_tx_data.clone(); let signed_tx = self.ethereum.sign_prepared_tx(raw_tx, tx_options)?; - Ok(TransactionETHState { - op_id, - deadline_block, - signed_tx, - }) + + stuck_tx.last_deadline_block = deadline_block; + stuck_tx.last_used_gas_price = signed_tx.gas_price; + stuck_tx.used_tx_hashes.push(signed_tx.hash.clone()); + + Ok(signed_tx) } // Calculates a new gas amount for the replacement of the stuck tx. @@ -422,19 +404,18 @@ impl ETHSender { /// Creates a new tx options from a stuck transaction, with updated gas amount /// and nonce. - fn tx_options_from_stuck_tx( - &self, - stuck_tx: &TransactionETHState, - ) -> Result { - let old_tx_gas_price = - U256::from_dec_str(&stuck_tx.signed_tx.gas_price.to_string()).unwrap(); + fn tx_options_from_stuck_tx(&self, stuck_tx: ÐOperation) -> Result { + let old_tx_gas_price = stuck_tx.last_used_gas_price; let new_gas_price = self.scale_gas(old_tx_gas_price)?; - let nonce = stuck_tx.signed_tx.nonce; + let nonce = stuck_tx.nonce; info!( "Replacing tx: hash: {:#x}, old_gas: {}, new_gas: {}, used nonce: {}", - stuck_tx.signed_tx.hash, old_tx_gas_price, new_gas_price, nonce + stuck_tx.used_tx_hashes.last().unwrap(), + old_tx_gas_price, + new_gas_price, + nonce ); Ok(Options::with(move |opt| { @@ -490,15 +471,18 @@ impl ETHSender { match &op.action { Action::Commit => { - self.tx_queue - .add_commit_operation(TxData::from_operation(op, raw_tx)); + self.tx_queue.add_commit_operation(TxData::from_operation( + OperationType::Commit, + op, + raw_tx, + )); } - Action::Verify { proof } => { + Action::Verify { .. } => { let block_number = op.block.block_number; self.tx_queue.add_verify_operation( block_number as usize, - TxData::from_operation(op, raw_tx), + TxData::from_operation(OperationType::Verify, op, raw_tx), ); } } @@ -512,7 +496,7 @@ impl ETHSender { ); self.tx_queue - .add_withdraw_operation(TxData::from_raw(raw_tx)); + .add_withdraw_operation(TxData::from_raw(OperationType::Withdraw, raw_tx)); } } diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 85a3e9a427..3e0e162e93 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -9,7 +9,7 @@ use web3::contract::{tokens::Tokenize, Options}; use web3::types::{H256, U256}; // Workspace uses use eth_client::SignedCallResult; -use models::Operation; +use models::{ethereum::ETHOperation, Operation}; // Local uses use super::ETHSender; use crate::eth_sender::database::DatabaseAccess; @@ -91,7 +91,7 @@ impl MockDatabase { } impl DatabaseAccess for MockDatabase { - fn restore_state(&self) -> Result, failure::Error> { + fn restore_state(&self) -> Result)>, failure::Error> { Ok(self.restore_state.clone()) } diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index 60e20ff8c4..e3f056e59a 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -1,518 +1,518 @@ -// External uses -use web3::contract::Options; -// Local uses -use self::mock::{create_signed_tx, default_eth_sender, restored_eth_sender}; -use super::{ - database::DatabaseAccess, - ethereum_interface::EthereumInterface, - transactions::{ - ETHStats, ExecutedTxStatus, OperationETHState, TransactionETHState, TxCheckOutcome, - }, - ETHSender, -}; - -mod mock; -mod test_data; - -/// Basic test that `ETHSender` creation does not panic and initializes correctly. -#[test] -fn basic_test() { - let (eth_sender, _, _) = default_eth_sender(); - - // Check that there are no unconfirmed operations by default. - assert!(eth_sender.ongoing_ops.is_empty()); -} - -/// Check for the gas scaling: gas is expected to be increased by 15% or set equal -/// to gas cost suggested by Ethereum (if it's greater). -#[test] -fn scale_gas() { - let (mut eth_sender, _, _) = default_eth_sender(); - - // Set the gas price in Ethereum to 1000. - eth_sender.ethereum.gas_price = 1000.into(); - - // Check that gas price of 1000 is increased to 1150. - let scaled_gas = eth_sender.scale_gas(1000.into()).unwrap(); - assert_eq!(scaled_gas, 1150.into()); - - // Check that gas price of 100 is increased to 1000 (price in Ethereum object). - let scaled_gas = eth_sender.scale_gas(100.into()).unwrap(); - assert_eq!(scaled_gas, 1000.into()); -} - -/// Checks that deadline block is chosen according to the expected policy. -#[test] -fn deadline_block() { - let (eth_sender, _, _) = default_eth_sender(); - - assert_eq!( - eth_sender.get_deadline_block(0), - super::EXPECTED_WAIT_TIME_BLOCKS - ); - assert_eq!( - eth_sender.get_deadline_block(10), - 10 + super::EXPECTED_WAIT_TIME_BLOCKS - ); -} - -/// Checks that received transaction response is reduced to the -/// `TxCheckOutcome` correctly. -/// -/// Here we check every possible output of the `check_transaction_state` method. -#[test] -fn transaction_state() { - let (mut eth_sender, _, _) = default_eth_sender(); - let current_block = eth_sender.ethereum.block_number; - let deadline_block = eth_sender.get_deadline_block(current_block); - let operations: Vec = vec![ - test_data::commit_operation(0), // Will be committed. - test_data::commit_operation(1), // Will be pending because of not enough confirmations. - test_data::commit_operation(2), // Will be failed. - test_data::commit_operation(3), // Will be stuck. - test_data::commit_operation(4), // Will be pending due no response. - ] - .iter() - .enumerate() - .map(|(nonce, op)| create_signed_tx(ð_sender, op, deadline_block, nonce as i64)) - .collect(); - - // Committed operation. - let committed_response = ExecutedTxStatus { - confirmations: super::WAIT_CONFIRMATIONS, - success: true, - receipt: None, - }; - eth_sender - .ethereum - .add_execution(&operations[0], &committed_response); - - // Pending operation. - let pending_response = ExecutedTxStatus { - confirmations: super::WAIT_CONFIRMATIONS - 1, - success: true, - receipt: None, - }; - eth_sender - .ethereum - .add_execution(&operations[1], &pending_response); - - // Failed operation. - let failed_response = ExecutedTxStatus { - confirmations: super::WAIT_CONFIRMATIONS, - success: false, - receipt: Some(Default::default()), - }; - eth_sender - .ethereum - .add_execution(&operations[2], &failed_response); - - // Checks. - - // Committed operation. - assert_eq!( - eth_sender - .check_transaction_state( - &operations[0], - current_block + committed_response.confirmations - ) - .unwrap(), - TxCheckOutcome::Committed - ); - - // Pending operation (no enough confirmations). - assert_eq!( - eth_sender - .check_transaction_state( - &operations[1], - current_block + pending_response.confirmations - ) - .unwrap(), - TxCheckOutcome::Pending - ); - - // Failed operation. - assert_eq!( - eth_sender - .check_transaction_state( - &operations[2], - current_block + failed_response.confirmations - ) - .unwrap(), - TxCheckOutcome::Failed(Default::default()) - ); - - // Stuck operation. - assert_eq!( - eth_sender - .check_transaction_state( - &operations[3], - current_block + super::EXPECTED_WAIT_TIME_BLOCKS - ) - .unwrap(), - TxCheckOutcome::Stuck - ); - - // Pending operation (no response yet). - assert_eq!( - eth_sender - .check_transaction_state( - &operations[4], - current_block + super::EXPECTED_WAIT_TIME_BLOCKS - 1 - ) - .unwrap(), - TxCheckOutcome::Pending - ); -} - -/// Test for a normal `ETHSender` workflow: -/// - we send the two sequential operations (commit and verify); -/// - they are successfully committed to the Ethereum; -/// - `completeWithdrawals` tx is sent to the Ethereum; -/// - notification is sent after `verify` operation is committed. -#[test] -fn operation_commitment_workflow() { - let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); - - // In this test we will run one commit and one verify operation and should - // obtain a notification about the operation being completed in the end. - let operations = vec![ - test_data::commit_operation(0), - test_data::verify_operation(0), - ]; - - let verify_operation_id = operations[1].id; - - for (nonce, operation) in operations.iter().enumerate() { - // Send an operation to `ETHSender`. - sender.try_send(operation.clone()).unwrap(); - - // Retrieve it there and then process. - eth_sender.retrieve_operations(); - eth_sender.proceed_next_operations(); - - // Now we should see that transaction is stored in the database and sent to the Ethereum. - let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); - eth_sender.db.assert_stored(&expected_tx); - eth_sender.ethereum.assert_sent(&expected_tx); - - // No confirmation should be done yet. - assert!(receiver.try_next().is_err()); - - // Increment block, make the transaction look successfully executed, and process the - // operation again. - eth_sender - .ethereum - .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operations(); - - // Check that operation is confirmed. - eth_sender.db.assert_confirmed(&expected_tx); - } - - // Process the next operation and check that `completeWithdrawals` transaction is sent. - eth_sender.proceed_next_operations(); - let mut options = Options::default(); - let nonce = operations.len().into(); - options.nonce = Some(nonce); - let raw_tx = eth_sender.ethereum.encode_tx_data( - "completeWithdrawals", - models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, - ); - let tx = eth_sender - .ethereum - .sign_prepared_tx(raw_tx, options) - .unwrap(); - eth_sender.ethereum.assert_sent_by_hash(&tx.hash); - - // We should be notified about verify operation being completed. - assert_eq!( - receiver.try_next().unwrap().unwrap().id, - verify_operation_id - ); -} - -/// A simple scenario for a stuck transaction: -/// - A transaction is sent to the Ethereum. -/// - It is not processed after some blocks. -/// - `ETHSender` creates a new transaction with increased gas. -/// - This transaction is completed successfully. -#[test] -fn stuck_transaction() { - let (mut eth_sender, mut sender, _) = default_eth_sender(); - - // Workflow for the test is similar to `operation_commitment_workflow`. - let operation = test_data::commit_operation(0); - sender.try_send(operation.clone()).unwrap(); - - eth_sender.retrieve_operations(); - eth_sender.proceed_next_operations(); - - let nonce = 0; - let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); - - // Skip some blocks and expect sender to send a new tx. - eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; - eth_sender.proceed_next_operations(); - - // Check that new transaction is sent (and created based on the previous stuck tx). - let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); - let expected_tx = eth_sender - .sign_raw_tx( - stuck_tx.op_id, - raw_tx, - eth_sender.get_deadline_block(eth_sender.ethereum.block_number), - Some(&stuck_tx), - ) - .unwrap(); - eth_sender.db.assert_stored(&expected_tx); - eth_sender.ethereum.assert_sent(&expected_tx); - - // Increment block, make the transaction look successfully executed, and process the - // operation again. - eth_sender - .ethereum - .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operations(); - - // Check that operation is confirmed. - eth_sender.db.assert_confirmed(&expected_tx); -} - -// TODO: Restore once withdraw operations are fixed in `eth_sender`. -// Currently this test is too hard to implement, since withdraw txs are not stored in the database. -// /// This test verifies that with multiple operations received all-together, -// /// their order is respected and no processing of the next operation is started until -// /// the previous one is committed. +// // External uses +// use web3::contract::Options; +// // Local uses +// use self::mock::{create_signed_tx, default_eth_sender, restored_eth_sender}; +// use super::{ +// database::DatabaseAccess, +// ethereum_interface::EthereumInterface, +// transactions::{ +// ETHStats, ExecutedTxStatus, OperationETHState, TransactionETHState, TxCheckOutcome, +// }, +// ETHSender, +// }; + +// mod mock; +// mod test_data; + +// /// Basic test that `ETHSender` creation does not panic and initializes correctly. // #[test] -// fn operations_order() { -// let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); +// fn basic_test() { +// let (eth_sender, _, _) = default_eth_sender(); -// // We send multiple the operations at once to the channel. -// let operations_count = 3; -// let mut operations = Vec::new(); -// let commit_operations = &test_data::COMMIT_OPERATIONS[..operations_count]; -// let verify_operations = &test_data::VERIFY_OPERATIONS[..operations_count]; -// operations.extend_from_slice(commit_operations); -// operations.extend_from_slice(verify_operations); +// // Check that there are no unconfirmed operations by default. +// assert!(eth_sender.ongoing_ops.is_empty()); +// } -// // Also we create the list of expected transactions. -// let mut expected_txs = Vec::new(); +// /// Check for the gas scaling: gas is expected to be increased by 15% or set equal +// /// to gas cost suggested by Ethereum (if it's greater). +// #[test] +// fn scale_gas() { +// let (mut eth_sender, _, _) = default_eth_sender(); -// // Create expected txs from all the operations. -// for (idx, (commit_operation, verify_operation)) in -// commit_operations.iter().zip(verify_operations).enumerate() -// { -// // Create the commit operation. -// let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; -// let deadline_block = eth_sender.get_deadline_block(start_block); -// let nonce = idx * 3; +// // Set the gas price in Ethereum to 1000. +// eth_sender.ethereum.gas_price = 1000.into(); -// let commit_op_tx = -// create_signed_tx(ð_sender, commit_operation, deadline_block, nonce as i64); +// // Check that gas price of 1000 is increased to 1150. +// let scaled_gas = eth_sender.scale_gas(1000.into()).unwrap(); +// assert_eq!(scaled_gas, 1150.into()); -// expected_txs.push(commit_op_tx); +// // Check that gas price of 100 is increased to 1000 (price in Ethereum object). +// let scaled_gas = eth_sender.scale_gas(100.into()).unwrap(); +// assert_eq!(scaled_gas, 1000.into()); +// } + +// /// Checks that deadline block is chosen according to the expected policy. +// #[test] +// fn deadline_block() { +// let (eth_sender, _, _) = default_eth_sender(); + +// assert_eq!( +// eth_sender.get_deadline_block(0), +// super::EXPECTED_WAIT_TIME_BLOCKS +// ); +// assert_eq!( +// eth_sender.get_deadline_block(10), +// 10 + super::EXPECTED_WAIT_TIME_BLOCKS +// ); +// } + +// /// Checks that received transaction response is reduced to the +// /// `TxCheckOutcome` correctly. +// /// +// /// Here we check every possible output of the `check_transaction_state` method. +// #[test] +// fn transaction_state() { +// let (mut eth_sender, _, _) = default_eth_sender(); +// let current_block = eth_sender.ethereum.block_number; +// let deadline_block = eth_sender.get_deadline_block(current_block); +// let operations: Vec = vec![ +// test_data::commit_operation(0), // Will be committed. +// test_data::commit_operation(1), // Will be pending because of not enough confirmations. +// test_data::commit_operation(2), // Will be failed. +// test_data::commit_operation(3), // Will be stuck. +// test_data::commit_operation(4), // Will be pending due no response. +// ] +// .iter() +// .enumerate() +// .map(|(nonce, op)| create_signed_tx(ð_sender, op, deadline_block, nonce as i64)) +// .collect(); + +// // Committed operation. +// let committed_response = ExecutedTxStatus { +// confirmations: super::WAIT_CONFIRMATIONS, +// success: true, +// receipt: None, +// }; +// eth_sender +// .ethereum +// .add_execution(&operations[0], &committed_response); + +// // Pending operation. +// let pending_response = ExecutedTxStatus { +// confirmations: super::WAIT_CONFIRMATIONS - 1, +// success: true, +// receipt: None, +// }; +// eth_sender +// .ethereum +// .add_execution(&operations[1], &pending_response); + +// // Failed operation. +// let failed_response = ExecutedTxStatus { +// confirmations: super::WAIT_CONFIRMATIONS, +// success: false, +// receipt: Some(Default::default()), +// }; +// eth_sender +// .ethereum +// .add_execution(&operations[2], &failed_response); + +// // Checks. + +// // Committed operation. +// assert_eq!( +// eth_sender +// .check_transaction_state( +// &operations[0], +// current_block + committed_response.confirmations +// ) +// .unwrap(), +// TxCheckOutcome::Committed +// ); + +// // Pending operation (no enough confirmations). +// assert_eq!( +// eth_sender +// .check_transaction_state( +// &operations[1], +// current_block + pending_response.confirmations +// ) +// .unwrap(), +// TxCheckOutcome::Pending +// ); + +// // Failed operation. +// assert_eq!( +// eth_sender +// .check_transaction_state( +// &operations[2], +// current_block + failed_response.confirmations +// ) +// .unwrap(), +// TxCheckOutcome::Failed(Default::default()) +// ); + +// // Stuck operation. +// assert_eq!( +// eth_sender +// .check_transaction_state( +// &operations[3], +// current_block + super::EXPECTED_WAIT_TIME_BLOCKS +// ) +// .unwrap(), +// TxCheckOutcome::Stuck +// ); + +// // Pending operation (no response yet). +// assert_eq!( +// eth_sender +// .check_transaction_state( +// &operations[4], +// current_block + super::EXPECTED_WAIT_TIME_BLOCKS - 1 +// ) +// .unwrap(), +// TxCheckOutcome::Pending +// ); +// } -// // Create the verify operation, as by priority it will be processed right after `commit`. -// let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 1) as u64; -// let deadline_block = eth_sender.get_deadline_block(start_block); -// let nonce = idx * 3 + 1; +// /// Test for a normal `ETHSender` workflow: +// /// - we send the two sequential operations (commit and verify); +// /// - they are successfully committed to the Ethereum; +// /// - `completeWithdrawals` tx is sent to the Ethereum; +// /// - notification is sent after `verify` operation is committed. +// #[test] +// fn operation_commitment_workflow() { +// let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); -// let verify_op_tx = -// create_signed_tx(ð_sender, verify_operation, deadline_block, nonce as i64); +// // In this test we will run one commit and one verify operation and should +// // obtain a notification about the operation being completed in the end. +// let operations = vec![ +// test_data::commit_operation(0), +// test_data::verify_operation(0), +// ]; -// expected_txs.push(verify_op_tx); -// } +// let verify_operation_id = operations[1].id; -// for operation in operations.iter() { +// for (nonce, operation) in operations.iter().enumerate() { +// // Send an operation to `ETHSender`. // sender.try_send(operation.clone()).unwrap(); -// } -// eth_sender.retrieve_operations(); -// // Then we go through the operations and check that the order of operations is preserved. -// for (idx, tx) in expected_txs.iter().enumerate() { +// // Retrieve it there and then process. +// eth_sender.retrieve_operations(); // eth_sender.proceed_next_operations(); -// // Check that current expected tx is stored, but the next ones are not. -// eth_sender.db.assert_stored(tx); -// eth_sender.ethereum.assert_sent(tx); +// // Now we should see that transaction is stored in the database and sent to the Ethereum. +// let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); +// let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); +// eth_sender.db.assert_stored(&expected_tx); +// eth_sender.ethereum.assert_sent(&expected_tx); -// for following_tx in expected_txs[idx + 1..].iter() { -// eth_sender.db.assert_not_stored(following_tx) -// } +// // No confirmation should be done yet. +// assert!(receiver.try_next().is_err()); +// // Increment block, make the transaction look successfully executed, and process the +// // operation again. // eth_sender // .ethereum -// .add_successfull_execution(tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); +// .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); // eth_sender.proceed_next_operations(); -// eth_sender.db.assert_confirmed(tx); - -// if idx % 2 == 1 { -// // For every verify operation, we should also add a withdraw operation and process it. -// let raw_tx = eth_sender.ethereum.encode_tx_data( -// "completeWithdrawals", -// models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, -// ); - -// let nonce = (idx / 2) * 3 + 2; -// let mut options = Options::default(); -// options.nonce = Some(nonce.into()); - -// let signed_tx = eth_sender -// .ethereum -// .sign_prepared_tx(raw_tx, options) -// .unwrap(); - -// eth_sender -// .ethereum -// .add_successfull_execution(signed_tx.hash, super::WAIT_CONFIRMATIONS); -// eth_sender.proceed_next_operations(); -// eth_sender.proceed_next_operations(); -// } + +// // Check that operation is confirmed. +// eth_sender.db.assert_confirmed(&expected_tx); // } -// // We should be notified about all the verify operations being completed. -// for _ in 0..operations_count { -// assert!(receiver.try_next().unwrap().is_some()); +// // Process the next operation and check that `completeWithdrawals` transaction is sent. +// eth_sender.proceed_next_operations(); +// let mut options = Options::default(); +// let nonce = operations.len().into(); +// options.nonce = Some(nonce); +// let raw_tx = eth_sender.ethereum.encode_tx_data( +// "completeWithdrawals", +// models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, +// ); +// let tx = eth_sender +// .ethereum +// .sign_prepared_tx(raw_tx, options) +// .unwrap(); +// eth_sender.ethereum.assert_sent_by_hash(&tx.hash); + +// // We should be notified about verify operation being completed. +// assert_eq!( +// receiver.try_next().unwrap().unwrap().id, +// verify_operation_id +// ); +// } + +// /// A simple scenario for a stuck transaction: +// /// - A transaction is sent to the Ethereum. +// /// - It is not processed after some blocks. +// /// - `ETHSender` creates a new transaction with increased gas. +// /// - This transaction is completed successfully. +// #[test] +// fn stuck_transaction() { +// let (mut eth_sender, mut sender, _) = default_eth_sender(); + +// // Workflow for the test is similar to `operation_commitment_workflow`. +// let operation = test_data::commit_operation(0); +// sender.try_send(operation.clone()).unwrap(); + +// eth_sender.retrieve_operations(); +// eth_sender.proceed_next_operations(); + +// let nonce = 0; +// let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); +// let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + +// // Skip some blocks and expect sender to send a new tx. +// eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; +// eth_sender.proceed_next_operations(); + +// // Check that new transaction is sent (and created based on the previous stuck tx). +// let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); +// let expected_tx = eth_sender +// .sign_raw_tx( +// stuck_tx.op_id, +// raw_tx, +// eth_sender.get_deadline_block(eth_sender.ethereum.block_number), +// Some(&stuck_tx), +// ) +// .unwrap(); +// eth_sender.db.assert_stored(&expected_tx); +// eth_sender.ethereum.assert_sent(&expected_tx); + +// // Increment block, make the transaction look successfully executed, and process the +// // operation again. +// eth_sender +// .ethereum +// .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); +// eth_sender.proceed_next_operations(); + +// // Check that operation is confirmed. +// eth_sender.db.assert_confirmed(&expected_tx); +// } + +// // TODO: Restore once withdraw operations are fixed in `eth_sender`. +// // Currently this test is too hard to implement, since withdraw txs are not stored in the database. +// // /// This test verifies that with multiple operations received all-together, +// // /// their order is respected and no processing of the next operation is started until +// // /// the previous one is committed. +// // #[test] +// // fn operations_order() { +// // let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); + +// // // We send multiple the operations at once to the channel. +// // let operations_count = 3; +// // let mut operations = Vec::new(); +// // let commit_operations = &test_data::COMMIT_OPERATIONS[..operations_count]; +// // let verify_operations = &test_data::VERIFY_OPERATIONS[..operations_count]; +// // operations.extend_from_slice(commit_operations); +// // operations.extend_from_slice(verify_operations); + +// // // Also we create the list of expected transactions. +// // let mut expected_txs = Vec::new(); + +// // // Create expected txs from all the operations. +// // for (idx, (commit_operation, verify_operation)) in +// // commit_operations.iter().zip(verify_operations).enumerate() +// // { +// // // Create the commit operation. +// // let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; +// // let deadline_block = eth_sender.get_deadline_block(start_block); +// // let nonce = idx * 3; + +// // let commit_op_tx = +// // create_signed_tx(ð_sender, commit_operation, deadline_block, nonce as i64); + +// // expected_txs.push(commit_op_tx); + +// // // Create the verify operation, as by priority it will be processed right after `commit`. +// // let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 1) as u64; +// // let deadline_block = eth_sender.get_deadline_block(start_block); +// // let nonce = idx * 3 + 1; + +// // let verify_op_tx = +// // create_signed_tx(ð_sender, verify_operation, deadline_block, nonce as i64); + +// // expected_txs.push(verify_op_tx); +// // } + +// // for operation in operations.iter() { +// // sender.try_send(operation.clone()).unwrap(); +// // } +// // eth_sender.retrieve_operations(); + +// // // Then we go through the operations and check that the order of operations is preserved. +// // for (idx, tx) in expected_txs.iter().enumerate() { +// // eth_sender.proceed_next_operations(); + +// // // Check that current expected tx is stored, but the next ones are not. +// // eth_sender.db.assert_stored(tx); +// // eth_sender.ethereum.assert_sent(tx); + +// // for following_tx in expected_txs[idx + 1..].iter() { +// // eth_sender.db.assert_not_stored(following_tx) +// // } + +// // eth_sender +// // .ethereum +// // .add_successfull_execution(tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); +// // eth_sender.proceed_next_operations(); +// // eth_sender.db.assert_confirmed(tx); + +// // if idx % 2 == 1 { +// // // For every verify operation, we should also add a withdraw operation and process it. +// // let raw_tx = eth_sender.ethereum.encode_tx_data( +// // "completeWithdrawals", +// // models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, +// // ); + +// // let nonce = (idx / 2) * 3 + 2; +// // let mut options = Options::default(); +// // options.nonce = Some(nonce.into()); + +// // let signed_tx = eth_sender +// // .ethereum +// // .sign_prepared_tx(raw_tx, options) +// // .unwrap(); + +// // eth_sender +// // .ethereum +// // .add_successfull_execution(signed_tx.hash, super::WAIT_CONFIRMATIONS); +// // eth_sender.proceed_next_operations(); +// // eth_sender.proceed_next_operations(); +// // } +// // } + +// // // We should be notified about all the verify operations being completed. +// // for _ in 0..operations_count { +// // assert!(receiver.try_next().unwrap().is_some()); +// // } +// // } + +// /// Check that upon a transaction failure the incident causes a panic by default. +// #[test] +// #[should_panic(expected = "Cannot operate after unexpected TX failure")] +// fn transaction_failure() { +// let (mut eth_sender, mut sender, _) = default_eth_sender(); + +// // Workflow for the test is similar to `operation_commitment_workflow`. +// let operation = test_data::commit_operation(0); +// sender.try_send(operation.clone()).unwrap(); + +// let nonce = 0; +// let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); +// let failing_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + +// eth_sender.retrieve_operations(); +// eth_sender.proceed_next_operations(); + +// eth_sender +// .ethereum +// .add_failed_execution(&failing_tx, super::WAIT_CONFIRMATIONS); +// eth_sender.proceed_next_operations(); +// } + +// /// Check that after recovering state with several non-processed operations +// /// they will be processed normally. +// #[test] +// fn restore_state() { +// let (operations, stored_operations) = { +// // This `eth_sender` is required to generate the input only. +// let (eth_sender, _, _) = default_eth_sender(); + +// let commit_op = test_data::commit_operation(0); +// let verify_op = test_data::verify_operation(0); + +// let deadline_block = eth_sender.get_deadline_block(1); +// let commit_op_tx = create_signed_tx(ð_sender, &commit_op, deadline_block, 0); +// let deadline_block = eth_sender.get_deadline_block(2); +// let verify_op_tx = create_signed_tx(ð_sender, &verify_op, deadline_block, 1); + +// let operations = vec![commit_op.clone(), verify_op.clone()]; + +// // Create `OperationETHState` objects from operations and restore state +// let stored_operations = vec![ +// OperationETHState { +// operation: commit_op, +// txs: vec![commit_op_tx], +// }, +// OperationETHState { +// operation: verify_op, +// txs: vec![verify_op_tx], +// }, +// ]; + +// (operations, stored_operations) +// }; + +// let stats = ETHStats { +// commit_ops: 1, +// verify_ops: 1, +// withdraw_ops: 1, +// }; +// let (mut eth_sender, _, mut receiver) = restored_eth_sender(stored_operations.clone(), stats); + +// // We have to store txs in the database, since we've used them for the data restore. +// eth_sender +// .db +// .save_unconfirmed_operation(&stored_operations[0].txs[0]) +// .unwrap(); +// eth_sender +// .db +// .save_unconfirmed_operation(&stored_operations[1].txs[0]) +// .unwrap(); + +// for (nonce, operation) in operations.iter().enumerate() { +// // Note that we DO NOT send an operation to `ETHSender` and neither receive it. + +// // We do process operations restored from the DB though. +// // The rest of this test is the same as in `operation_commitment_workflow`. +// eth_sender.proceed_next_operations(); + +// let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); +// let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); + +// eth_sender +// .ethereum +// .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); +// eth_sender.proceed_next_operations(); +// eth_sender.db.assert_confirmed(&expected_tx); // } + +// assert!(receiver.try_next().unwrap().is_some()); // } -/// Check that upon a transaction failure the incident causes a panic by default. -#[test] -#[should_panic(expected = "Cannot operate after unexpected TX failure")] -fn transaction_failure() { - let (mut eth_sender, mut sender, _) = default_eth_sender(); - - // Workflow for the test is similar to `operation_commitment_workflow`. - let operation = test_data::commit_operation(0); - sender.try_send(operation.clone()).unwrap(); - - let nonce = 0; - let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let failing_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); - - eth_sender.retrieve_operations(); - eth_sender.proceed_next_operations(); - - eth_sender - .ethereum - .add_failed_execution(&failing_tx, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operations(); -} - -/// Check that after recovering state with several non-processed operations -/// they will be processed normally. -#[test] -fn restore_state() { - let (operations, stored_operations) = { - // This `eth_sender` is required to generate the input only. - let (eth_sender, _, _) = default_eth_sender(); - - let commit_op = test_data::commit_operation(0); - let verify_op = test_data::verify_operation(0); - - let deadline_block = eth_sender.get_deadline_block(1); - let commit_op_tx = create_signed_tx(ð_sender, &commit_op, deadline_block, 0); - let deadline_block = eth_sender.get_deadline_block(2); - let verify_op_tx = create_signed_tx(ð_sender, &verify_op, deadline_block, 1); - - let operations = vec![commit_op.clone(), verify_op.clone()]; - - // Create `OperationETHState` objects from operations and restore state - let stored_operations = vec![ - OperationETHState { - operation: commit_op, - txs: vec![commit_op_tx], - }, - OperationETHState { - operation: verify_op, - txs: vec![verify_op_tx], - }, - ]; - - (operations, stored_operations) - }; - - let stats = ETHStats { - commit_ops: 1, - verify_ops: 1, - withdraw_ops: 1, - }; - let (mut eth_sender, _, mut receiver) = restored_eth_sender(stored_operations.clone(), stats); - - // We have to store txs in the database, since we've used them for the data restore. - eth_sender - .db - .save_unconfirmed_operation(&stored_operations[0].txs[0]) - .unwrap(); - eth_sender - .db - .save_unconfirmed_operation(&stored_operations[1].txs[0]) - .unwrap(); - - for (nonce, operation) in operations.iter().enumerate() { - // Note that we DO NOT send an operation to `ETHSender` and neither receive it. - - // We do process operations restored from the DB though. - // The rest of this test is the same as in `operation_commitment_workflow`. - eth_sender.proceed_next_operations(); - - let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); - - eth_sender - .ethereum - .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operations(); - eth_sender.db.assert_confirmed(&expected_tx); - } - - assert!(receiver.try_next().unwrap().is_some()); -} - -/// Checks that even after getting the first transaction stuck and sending the next -/// one, confirmation for the first (stuck) transaction is processed and leads -/// to the operation commitment. -#[test] -fn confirmations_independence() { - // Workflow in the test is the same as in `stuck_transaction`, except for the fact - // that confirmation is obtained for the stuck transaction instead of the latter one. - - let (mut eth_sender, mut sender, _) = default_eth_sender(); - - let operation = test_data::commit_operation(0); - sender.try_send(operation.clone()).unwrap(); - - eth_sender.retrieve_operations(); - eth_sender.proceed_next_operations(); - - let nonce = 0; - let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); - - eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; - eth_sender.proceed_next_operations(); - - let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); - let next_tx = eth_sender - .sign_raw_tx( - stuck_tx.op_id, - raw_tx, - eth_sender.get_deadline_block(eth_sender.ethereum.block_number), - Some(&stuck_tx), - ) - .unwrap(); - eth_sender.db.assert_stored(&next_tx); - eth_sender.ethereum.assert_sent(&next_tx); - - // Add a confirmation for a *stuck* transaction. - eth_sender - .ethereum - .add_successfull_execution(stuck_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); - eth_sender.proceed_next_operations(); - - // Check that operation is confirmed. - eth_sender.db.assert_confirmed(&stuck_tx); -} +// /// Checks that even after getting the first transaction stuck and sending the next +// /// one, confirmation for the first (stuck) transaction is processed and leads +// /// to the operation commitment. +// #[test] +// fn confirmations_independence() { +// // Workflow in the test is the same as in `stuck_transaction`, except for the fact +// // that confirmation is obtained for the stuck transaction instead of the latter one. + +// let (mut eth_sender, mut sender, _) = default_eth_sender(); + +// let operation = test_data::commit_operation(0); +// sender.try_send(operation.clone()).unwrap(); + +// eth_sender.retrieve_operations(); +// eth_sender.proceed_next_operations(); + +// let nonce = 0; +// let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); +// let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + +// eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; +// eth_sender.proceed_next_operations(); + +// let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); +// let next_tx = eth_sender +// .sign_raw_tx( +// stuck_tx.op_id, +// raw_tx, +// eth_sender.get_deadline_block(eth_sender.ethereum.block_number), +// Some(&stuck_tx), +// ) +// .unwrap(); +// eth_sender.db.assert_stored(&next_tx); +// eth_sender.ethereum.assert_sent(&next_tx); + +// // Add a confirmation for a *stuck* transaction. +// eth_sender +// .ethereum +// .add_successfull_execution(stuck_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); +// eth_sender.proceed_next_operations(); + +// // Check that operation is confirmed. +// eth_sender.db.assert_confirmed(&stuck_tx); +// } diff --git a/core/server/src/eth_sender/transactions.rs b/core/server/src/eth_sender/transactions.rs index 13e4fbfe24..539e100208 100644 --- a/core/server/src/eth_sender/transactions.rs +++ b/core/server/src/eth_sender/transactions.rs @@ -4,15 +4,10 @@ //! ZKSync and Ethereum blockchains synchronization. // Built-in deps -use std::str::FromStr; // External uses -use web3::types::{TransactionReceipt, H256, U256}; +use web3::types::TransactionReceipt; // Workspace uses -use eth_client::SignedCallResult; -use models::Operation; -use storage::ethereum::records::{ETHStats as StorageETHStats, StorageETHOperation}; - -pub use storage::ethereum::OperationType; +use storage::ethereum::records::ETHStats as StorageETHStats; /// Collected statistics of the amount of operations sent to the Ethereum. /// This structure represents the count of **operations**, and not transactions. @@ -38,59 +33,6 @@ impl From for ETHStats { } } -/// An intermediate state of the operation to be stored on -/// the Ethereum chain. -#[derive(Debug, Clone)] -pub(super) struct OperationETHState { - /// ZKSync operation to be stored. - pub operation: Operation, - /// List of sent Ethereum transactions that persist the - /// ZKSync operation. - /// It is empty at the beginning, and if everything goes - /// smoothly, it will not be extended more than once. - /// However, transactions can "stuck" and not be included in - /// the block, so `ETHSender` may try to send more transactions - /// to resolve the situation. - pub txs: Vec, -} - -/// Representation of the transaction sent to the Ethereum chain. -#[derive(Debug, Clone, PartialEq)] -pub struct TransactionETHState { - /// ZKSync operation identifier. - pub op_id: i64, - /// Block until which transaction should be committed. - /// Exceeding this limit will make the transaction considered to be stuck. - pub deadline_block: u64, - /// Raw Ethereum transaction with additional meta-information. - pub signed_tx: SignedCallResult, -} - -impl From for TransactionETHState { - fn from(stored: StorageETHOperation) -> Self { - TransactionETHState { - op_id: stored.op_id, - deadline_block: stored.deadline_block as u64, - signed_tx: SignedCallResult { - raw_tx: stored.raw_tx, - gas_price: U256::from_str(&stored.gas_price.to_string()).unwrap(), - nonce: U256::from(stored.nonce as u128), - hash: H256::from_slice(&stored.tx_hash), - }, - } - } -} - -impl TransactionETHState { - /// Checks whether the transaction is considered "stuck". - /// "Stuck" transactions are ones that were not included into any block - /// within a desirable amount of time, and thus require re-sending with - /// increased gas amount. - pub fn is_stuck(&self, current_block: u64) -> bool { - current_block >= self.deadline_block - } -} - /// State of the executed Ethereum transaction. #[derive(Debug, Clone)] pub(super) struct ExecutedTxStatus { diff --git a/core/server/src/eth_sender/tx_queue/mod.rs b/core/server/src/eth_sender/tx_queue/mod.rs index 5e202d9217..e1e0ca942d 100644 --- a/core/server/src/eth_sender/tx_queue/mod.rs +++ b/core/server/src/eth_sender/tx_queue/mod.rs @@ -1,5 +1,5 @@ // Workspace imports -use models::Operation; +use models::{ethereum::OperationType, Operation}; // Local imports use self::{counter_queue::CounterQueue, sparse_queue::SparseQueue}; @@ -10,6 +10,7 @@ pub type RawTxData = Vec; #[derive(Debug)] pub struct TxData { + pub op_type: OperationType, pub raw: RawTxData, pub operation: Option, } @@ -21,15 +22,17 @@ impl PartialEq for TxData { } impl TxData { - pub fn from_operation(operation: Operation, raw: RawTxData) -> Self { + pub fn from_operation(op_type: OperationType, operation: Operation, raw: RawTxData) -> Self { Self { + op_type, raw, operation: Some(operation), } } - pub fn from_raw(raw: RawTxData) -> Self { + pub fn from_raw(op_type: OperationType, raw: RawTxData) -> Self { Self { + op_type, raw, operation: None, } @@ -238,12 +241,30 @@ mod tests { let mut queue = TxQueue::new(MAX_IN_FLY); // Add 2 commit, 2 verify and 2 withdraw operations. - queue.add_commit_operation(TxData::from_raw(vec![COMMIT_MARK, 0])); - queue.add_commit_operation(TxData::from_raw(vec![COMMIT_MARK, 1])); - queue.add_verify_operation(0, TxData::from_raw(vec![VERIFY_MARK, 0])); - queue.add_verify_operation(1, TxData::from_raw(vec![VERIFY_MARK, 1])); - queue.add_withdraw_operation(TxData::from_raw(vec![WITHDRAW_MARK, 0])); - queue.add_withdraw_operation(TxData::from_raw(vec![WITHDRAW_MARK, 1])); + queue.add_commit_operation(TxData::from_raw( + OperationType::Commit, + vec![COMMIT_MARK, 0], + )); + queue.add_commit_operation(TxData::from_raw( + OperationType::Commit, + vec![COMMIT_MARK, 1], + )); + queue.add_verify_operation( + 0, + TxData::from_raw(OperationType::Verify, vec![VERIFY_MARK, 0]), + ); + queue.add_verify_operation( + 1, + TxData::from_raw(OperationType::Verify, vec![VERIFY_MARK, 1]), + ); + queue.add_withdraw_operation(TxData::from_raw( + OperationType::Withdraw, + vec![WITHDRAW_MARK, 0], + )); + queue.add_withdraw_operation(TxData::from_raw( + OperationType::Withdraw, + vec![WITHDRAW_MARK, 1], + )); // Retrieve the next {MAX_IN_FLY} operations. From 4b917c78c0954997545b4643af637c23eed420a9 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 24 Mar 2020 11:11:03 +0200 Subject: [PATCH 085/186] CleaningUp => Preparation --- contracts/contracts/Events.sol | 4 ++-- contracts/contracts/UpgradeGatekeeper.sol | 24 +++++++++---------- contracts/scripts/test-upgrade-franklin.ts | 2 +- .../test/unit_tests/upgradeGatekeeper_test.ts | 10 ++++---- 4 files changed, 20 insertions(+), 20 deletions(-) diff --git a/contracts/contracts/Events.sol b/contracts/contracts/Events.sol index 65a51d322e..4b18a1615c 100644 --- a/contracts/contracts/Events.sol +++ b/contracts/contracts/Events.sol @@ -69,8 +69,8 @@ contract UpgradeEvents { uint64 version ); - /// @notice Upgrade mode cleaning up status event - event UpgradeModeCleaningUpStatusActivated( + /// @notice Upgrade mode preparation status event + event UpgradeModePreparationStatusActivated( address proxyAddress, uint64 version ); diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index e0909d7bd7..23f0acd702 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -8,7 +8,7 @@ import "./Ownable.sol"; /// @author Matter Labs contract UpgradeGatekeeper is UpgradeEvents, Ownable { - /// @notice Notice period before activation cleaning up status of upgrade mode (in seconds) + /// @notice Notice period before activation preparation status of upgrade mode (in seconds) uint256 constant NOTICE_PERIOD = 2 weeks; /// @notice Versions of proxy contracts @@ -21,7 +21,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { enum UpgradeStatus { Idle, NoticePeriod, - CleaningUp + Preparation } /// @notice Info for upgrade proxy @@ -37,7 +37,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { address nextTarget; /// @notice Number of priority operations that must be verified by main contract at the time of finishing upgrade - /// @dev Will store zero in case of not active upgrade mode or not active cleaning up status of upgrade mode + /// @dev Will store zero in case of not active upgrade mode or not active preparation status of upgrade mode uint64 priorityOperationsToProcessBeforeUpgrade; } @@ -80,18 +80,18 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { emit UpgradeCanceled(proxyAddress, version[proxyAddress]); } - /// @notice Checks that cleaning up status is active and activates it if needed + /// @notice Checks that preparation status is active and activates it if needed /// @param proxyAddress Address of proxy to process - /// @return Bool flag indicating that cleaning up status is active after this call - function activateCleaningUpStatusOfUpgrade(address proxyAddress) public returns (bool) { - require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeGatekeeper.UpgradeStatus.Idle, "uaf11"); // uaf11 - unable to activate cleaning up status in case of not active upgrade mode + /// @return Bool flag indicating that preparation status is active after this call + function startPreparation(address proxyAddress) public returns (bool) { + require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeGatekeeper.UpgradeStatus.Idle, "uaf11"); // uaf11 - unable to activate preparation status in case of not active upgrade mode - if (upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.CleaningUp) { + if (upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Preparation) { return true; } if (now >= upgradeInfo[proxyAddress].activationTime + NOTICE_PERIOD) { - upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.CleaningUp; + upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Preparation; (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("totalRegisteredPriorityOperations()") @@ -100,7 +100,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { uint64 totalRegisteredPriorityOperations = abi.decode(encodedResult, (uint64)); upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = totalRegisteredPriorityOperations; - emit UpgradeModeCleaningUpStatusActivated(proxyAddress, version[proxyAddress]); + emit UpgradeModePreparationStatusActivated(proxyAddress, version[proxyAddress]); return true; } else { return false; @@ -112,7 +112,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @param newTargetInitializationParameters New target initialization parameters function finishProxyUpgrade(address proxyAddress, bytes calldata newTargetInitializationParameters) external { requireMaster(msg.sender); - require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.CleaningUp, "umf11"); // umf11 - unable to finish upgrade without cleaning up status active + require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Preparation, "umf11"); // umf11 - unable to finish upgrade without preparation status active (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("totalVerifiedPriorityOperations()") @@ -120,7 +120,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { require(mainContractCallSuccess, "umf12"); // umf12 - main contract static call failed uint64 totalVerifiedPriorityOperations = abi.decode(encodedResult, (uint64)); - require(totalVerifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, "umf13"); // umf13 - can't finish upgrade before verifing all priority operations received before start of cleaning up status + require(totalVerifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, "umf13"); // umf13 - can't finish upgrade before verifing all priority operations received before start of preparation status (bool proxyUpgradeCallSuccess, ) = proxyAddress.call( abi.encodeWithSignature("upgradeTarget(address,bytes)", upgradeInfo[proxyAddress].nextTarget, newTargetInitializationParameters) diff --git a/contracts/scripts/test-upgrade-franklin.ts b/contracts/scripts/test-upgrade-franklin.ts index e9bc12534f..a2e7293f61 100644 --- a/contracts/scripts/test-upgrade-franklin.ts +++ b/contracts/scripts/test-upgrade-franklin.ts @@ -57,7 +57,7 @@ async function main() { await new Promise(r => setTimeout(r, notice_period * 1000 + 10)); // finish upgrade - await (await upgradeGatekeeper.activateCleaningUpStatusOfUpgrade(proxyContract.address)).wait(); + await (await upgradeGatekeeper.startPreparation(proxyContract.address)).wait(); await (await upgradeGatekeeper.finishProxyUpgrade(proxyContract.address, [])).wait(); await expect(await proxyContract.getTarget()) diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index 919ae66a51..1eacecee6d 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -50,7 +50,7 @@ describe("UpgradeGatekeeper unit tests", function () { it("checking UpgradeGatekeeper reverts; activation and cancelation upgrade", async () => { expect((await getCallRevertReason( () => UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address) )).revertReason).equal("umc11") - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.activateCleaningUpStatusOfUpgrade(proxyTestContract.address) )).revertReason).equal("uaf11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startPreparation(proxyTestContract.address) )).revertReason).equal("uaf11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf11") await expect(UpgradeGatekeeperContract.startProxyUpgrade(proxyTestContract.address, DummySecond.address)) @@ -72,7 +72,7 @@ describe("UpgradeGatekeeper unit tests", function () { let activated_time = performance.now(); - // wait and activate cleaning up status + // wait and activate preparation status let all_time_in_sec = parseInt(await UpgradeGatekeeperContract.get_NOTICE_PERIOD()); for (let step = 1; step <= 3; step++) { if (step != 3) { @@ -86,10 +86,10 @@ describe("UpgradeGatekeeper unit tests", function () { } if (step != 3) { - await UpgradeGatekeeperContract.activateCleaningUpStatusOfUpgrade(proxyTestContract.address); + await UpgradeGatekeeperContract.startPreparation(proxyTestContract.address); } else { - await expect(UpgradeGatekeeperContract.activateCleaningUpStatusOfUpgrade(proxyTestContract.address)) - .to.emit(UpgradeGatekeeperContract, 'UpgradeModeCleaningUpStatusActivated') + await expect(UpgradeGatekeeperContract.startPreparation(proxyTestContract.address)) + .to.emit(UpgradeGatekeeperContract, 'UpgradeModePreparationStatusActivated') .withArgs(proxyTestContract.address, 0) } } From 72a60ac91d07e8d5f7c5610567a87cd2311d8aa2 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 24 Mar 2020 12:26:46 +0300 Subject: [PATCH 086/186] Make mock structures compilable again --- core/server/src/eth_sender/tests/mock.rs | 220 +++-- core/server/src/eth_sender/tests/mod.rs | 990 ++++++++++++----------- 2 files changed, 605 insertions(+), 605 deletions(-) diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 3e0e162e93..87b48da8aa 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -2,118 +2,149 @@ // Built-in deps use std::cell::{Cell, RefCell}; -use std::collections::{HashMap, VecDeque}; +use std::collections::HashMap; // External uses use futures::channel::mpsc; use web3::contract::{tokens::Tokenize, Options}; use web3::types::{H256, U256}; // Workspace uses use eth_client::SignedCallResult; -use models::{ethereum::ETHOperation, Operation}; +use models::{ + ethereum::{ETHOperation, EthOpId}, + Operation, +}; // Local uses use super::ETHSender; use crate::eth_sender::database::DatabaseAccess; use crate::eth_sender::ethereum_interface::EthereumInterface; -use crate::eth_sender::transactions::{ - ETHStats, ExecutedTxStatus, OperationETHState, OperationType, TransactionETHState, -}; +use crate::eth_sender::transactions::{ETHStats, ExecutedTxStatus}; const CHANNEL_CAPACITY: usize = 16; /// Mock database is capable of recording all the incoming requests for the further analysis. #[derive(Debug, Default)] pub(super) struct MockDatabase { - restore_state: VecDeque, - unconfirmed_operations: RefCell>, - confirmed_operations: RefCell>, + restore_state: Vec, + unconfirmed_operations: RefCell>, + confirmed_operations: RefCell>, nonce: Cell, + pending_op_id: Cell, stats: RefCell, } impl MockDatabase { /// Creates a database with emulation of previously stored uncommitted requests. pub fn with_restorable_state( - restore_state: impl IntoIterator, + restore_state: impl IntoIterator, stats: ETHStats, ) -> Self { - let restore_state: VecDeque<_> = restore_state.into_iter().collect(); - let nonce = restore_state.iter().fold(0, |acc, op| acc + op.txs.len()); + let restore_state: Vec<_> = restore_state.into_iter().collect(); + let nonce = restore_state + .iter() + .fold(0, |acc, op| acc + op.used_tx_hashes.len()); + let pending_op_id = restore_state.len(); Self { restore_state, nonce: Cell::new(nonce as i64), + pending_op_id: Cell::new(pending_op_id as EthOpId), stats: RefCell::new(stats), ..Default::default() } } /// Ensures that the provided transaction is stored in the database and not confirmed yet. - pub fn assert_stored(&self, tx: &TransactionETHState) { - assert_eq!( - self.unconfirmed_operations.borrow().get(&tx.signed_tx.hash), - Some(tx) - ); + pub fn assert_stored(&self, id: i64, tx: ÐOperation) { + assert_eq!(self.unconfirmed_operations.borrow().get(&id), Some(tx)); - assert!(self - .confirmed_operations - .borrow() - .get(&tx.signed_tx.hash) - .is_none()); + assert!(self.confirmed_operations.borrow().get(&id).is_none()); } /// Ensures that the provided transaction is not stored in the database. - pub fn assert_not_stored(&self, tx: &TransactionETHState) { - assert!(self - .confirmed_operations - .borrow() - .get(&tx.signed_tx.hash) - .is_none()); + pub fn assert_not_stored(&self, id: i64, tx: ÐOperation) { + assert!(self.confirmed_operations.borrow().get(&id).is_none()); - assert!(self - .unconfirmed_operations - .borrow() - .get(&tx.signed_tx.hash) - .is_none()); + assert!(self.unconfirmed_operations.borrow().get(&id).is_none()); } /// Ensures that the provided transaction is stored as confirmed. - pub fn assert_confirmed(&self, tx: &TransactionETHState) { - assert_eq!( - self.confirmed_operations.borrow().get(&tx.signed_tx.hash), - Some(tx) - ); + pub fn assert_confirmed(&self, id: i64, tx: ÐOperation) { + assert_eq!(self.confirmed_operations.borrow().get(&id), Some(tx)); - assert!(self - .unconfirmed_operations - .borrow() - .get(&tx.signed_tx.hash) - .is_none()); + assert!(self.unconfirmed_operations.borrow().get(&id).is_none()); } } impl DatabaseAccess for MockDatabase { - fn restore_state(&self) -> Result)>, failure::Error> { + fn restore_state(&self) -> Result, failure::Error> { Ok(self.restore_state.clone()) } - fn save_unconfirmed_operation(&self, tx: &TransactionETHState) -> Result<(), failure::Error> { + fn save_new_eth_tx(&self, op: ÐOperation) -> Result { + let id = self.pending_op_id.get(); + let new_id = id + 1; + self.pending_op_id.set(new_id); + + self.unconfirmed_operations + .borrow_mut() + .insert(id, op.clone()); + + Ok(id) + } + + fn update_eth_tx( + &self, + eth_op_id: EthOpId, + hash: &H256, + new_deadline_block: i64, + new_gas_value: U256, + ) -> Result<(), failure::Error> { + assert!( + self.unconfirmed_operations + .borrow() + .contains_key(ð_op_id), + "Attempt to update tx that is not unconfirmed" + ); + + let mut op = self + .unconfirmed_operations + .borrow() + .get(ð_op_id) + .unwrap() + .clone(); + + op.last_deadline_block = new_deadline_block as u64; + op.last_used_gas_price = new_gas_value; + op.used_tx_hashes.push(*hash); + self.unconfirmed_operations .borrow_mut() - .insert(tx.signed_tx.hash, tx.clone()); + .insert(eth_op_id, op); Ok(()) } fn confirm_operation(&self, hash: &H256) -> Result<(), failure::Error> { let mut unconfirmed_operations = self.unconfirmed_operations.borrow_mut(); + let mut op_idx: Option = None; + for operation in unconfirmed_operations.values_mut() { + if operation.used_tx_hashes.contains(hash) { + operation.confirmed = true; + operation.final_hash = Some(*hash); + op_idx = Some(operation.id); + break; + } + } + assert!( - unconfirmed_operations.contains_key(hash), + op_idx.is_some(), "Request to confirm operation that was not stored" ); + let op_idx = op_idx.unwrap(); - let operation = unconfirmed_operations.remove(hash).unwrap(); + let operation = unconfirmed_operations.remove(&op_idx).unwrap(); self.confirmed_operations .borrow_mut() - .insert(*hash, operation); + .insert(op_idx, operation); Ok(()) } @@ -129,27 +160,6 @@ impl DatabaseAccess for MockDatabase { fn load_stats(&self) -> Result { Ok(self.stats.borrow().clone()) } - - fn report_created_operation( - &self, - operation_type: OperationType, - ) -> Result<(), failure::Error> { - let mut stats = self.stats.borrow_mut(); - - match operation_type { - OperationType::Commit => { - stats.commit_ops += 1; - } - OperationType::Verify => { - stats.verify_ops += 1; - } - OperationType::Withdraw => { - stats.withdraw_ops += 1; - } - } - - Ok(()) - } } /// Mock Ethereum client is capable of recording all the incoming requests for the further analysis. @@ -188,15 +198,7 @@ impl MockEthereum { } /// Checks that there was a request to send the provided transaction. - pub fn assert_sent(&self, tx: &TransactionETHState) { - assert_eq!( - self.sent_txs.borrow().get(&tx.signed_tx.hash), - Some(&tx.signed_tx) - ); - } - - /// Checks that there was a request to send a transaction with the provided hash. - pub fn assert_sent_by_hash(&self, hash: &H256) { + pub fn assert_sent(&self, hash: &H256) { assert!( self.sent_txs.borrow().get(hash).is_some(), format!("Transaction with hash {:?} was not sent", hash), @@ -204,10 +206,8 @@ impl MockEthereum { } /// Adds an response for the sent transaction for `ETHSender` to receive. - pub fn add_execution(&mut self, tx: &TransactionETHState, status: &ExecutedTxStatus) { - self.tx_statuses - .borrow_mut() - .insert(tx.signed_tx.hash, status.clone()); + pub fn add_execution(&mut self, hash: &H256, status: &ExecutedTxStatus) { + self.tx_statuses.borrow_mut().insert(*hash, status.clone()); } /// Increments the blocks by a provided `confirmations` and marks the sent transaction @@ -224,7 +224,7 @@ impl MockEthereum { } /// Same as `add_successfull_execution`, but marks the transaction as a failure. - pub fn add_failed_execution(&mut self, tx: &TransactionETHState, confirmations: u64) { + pub fn add_failed_execution(&mut self, hash: &H256, confirmations: u64) { self.block_number += confirmations; let status = ExecutedTxStatus { @@ -232,9 +232,7 @@ impl MockEthereum { success: false, receipt: Some(Default::default()), }; - self.tx_statuses - .borrow_mut() - .insert(tx.signed_tx.hash, status); + self.tx_statuses.borrow_mut().insert(*hash, status); } } @@ -300,7 +298,7 @@ pub(super) fn default_eth_sender() -> ( /// Creates an `ETHSender` with mock Ethereum connection/database and restores its state "from DB". /// Returns the `ETHSender` itself along with communication channels to interact with it. pub(super) fn restored_eth_sender( - restore_state: impl IntoIterator, + restore_state: impl IntoIterator, stats: ETHStats, ) -> ( ETHSender, @@ -320,27 +318,27 @@ pub(super) fn restored_eth_sender( ) } -/// Behaves the same as `ETHSender::sign_new_tx`, but does not affect nonce. -/// This method should be used to create expected tx copies which won't affect -/// the internal `ETHSender` state. -pub(super) fn create_signed_tx( - eth_sender: ÐSender, - operation: &Operation, - deadline_block: u64, - nonce: i64, -) -> TransactionETHState { - let mut options = Options::default(); - options.nonce = Some(nonce.into()); - - let raw_tx = eth_sender.operation_to_raw_tx(&operation); - let signed_tx = eth_sender - .ethereum - .sign_prepared_tx(raw_tx, options) - .unwrap(); - - TransactionETHState { - op_id: operation.id.unwrap(), - deadline_block, - signed_tx, - } -} +// /// Behaves the same as `ETHSender::sign_new_tx`, but does not affect nonce. +// /// This method should be used to create expected tx copies which won't affect +// /// the internal `ETHSender` state. +// pub(super) fn create_signed_tx( +// eth_sender: ÐSender, +// operation: &Operation, +// deadline_block: u64, +// nonce: i64, +// ) -> ETHOperation { +// let mut options = Options::default(); +// options.nonce = Some(nonce.into()); + +// let raw_tx = eth_sender.operation_to_raw_tx(&operation); +// let signed_tx = eth_sender +// .ethereum +// .sign_prepared_tx(raw_tx, options) +// .unwrap(); + +// TransactionETHState { +// op_id: operation.id.unwrap(), +// deadline_block, +// signed_tx, +// } +// } diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index e3f056e59a..e2adf0092f 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -1,518 +1,520 @@ -// // External uses -// use web3::contract::Options; -// // Local uses -// use self::mock::{create_signed_tx, default_eth_sender, restored_eth_sender}; -// use super::{ -// database::DatabaseAccess, -// ethereum_interface::EthereumInterface, -// transactions::{ -// ETHStats, ExecutedTxStatus, OperationETHState, TransactionETHState, TxCheckOutcome, -// }, -// ETHSender, -// }; - -// mod mock; -// mod test_data; - -// /// Basic test that `ETHSender` creation does not panic and initializes correctly. +// External uses +use web3::contract::Options; +// Local uses +use self::mock::{default_eth_sender, restored_eth_sender}; +use super::{ + database::DatabaseAccess, + ethereum_interface::EthereumInterface, + transactions::{ETHStats, ExecutedTxStatus, TxCheckOutcome}, + ETHSender, +}; + +mod mock; +mod test_data; + +/* + +/// Basic test that `ETHSender` creation does not panic and initializes correctly. +#[test] +fn basic_test() { + let (eth_sender, _, _) = default_eth_sender(); + + // Check that there are no unconfirmed operations by default. + assert!(eth_sender.ongoing_ops.is_empty()); +} + +/// Check for the gas scaling: gas is expected to be increased by 15% or set equal +/// to gas cost suggested by Ethereum (if it's greater). +#[test] +fn scale_gas() { + let (mut eth_sender, _, _) = default_eth_sender(); + + // Set the gas price in Ethereum to 1000. + eth_sender.ethereum.gas_price = 1000.into(); + + // Check that gas price of 1000 is increased to 1150. + let scaled_gas = eth_sender.scale_gas(1000.into()).unwrap(); + assert_eq!(scaled_gas, 1150.into()); + + // Check that gas price of 100 is increased to 1000 (price in Ethereum object). + let scaled_gas = eth_sender.scale_gas(100.into()).unwrap(); + assert_eq!(scaled_gas, 1000.into()); +} + +/// Checks that deadline block is chosen according to the expected policy. +#[test] +fn deadline_block() { + let (eth_sender, _, _) = default_eth_sender(); + + assert_eq!( + eth_sender.get_deadline_block(0), + super::EXPECTED_WAIT_TIME_BLOCKS + ); + assert_eq!( + eth_sender.get_deadline_block(10), + 10 + super::EXPECTED_WAIT_TIME_BLOCKS + ); +} + +/// Checks that received transaction response is reduced to the +/// `TxCheckOutcome` correctly. +/// +/// Here we check every possible output of the `check_transaction_state` method. +#[test] +fn transaction_state() { + let (mut eth_sender, _, _) = default_eth_sender(); + let current_block = eth_sender.ethereum.block_number; + let deadline_block = eth_sender.get_deadline_block(current_block); + let operations: Vec = vec![ + test_data::commit_operation(0), // Will be committed. + test_data::commit_operation(1), // Will be pending because of not enough confirmations. + test_data::commit_operation(2), // Will be failed. + test_data::commit_operation(3), // Will be stuck. + test_data::commit_operation(4), // Will be pending due no response. + ] + .iter() + .enumerate() + .map(|(nonce, op)| create_signed_tx(ð_sender, op, deadline_block, nonce as i64)) + .collect(); + + // Committed operation. + let committed_response = ExecutedTxStatus { + confirmations: super::WAIT_CONFIRMATIONS, + success: true, + receipt: None, + }; + eth_sender + .ethereum + .add_execution(&operations[0], &committed_response); + + // Pending operation. + let pending_response = ExecutedTxStatus { + confirmations: super::WAIT_CONFIRMATIONS - 1, + success: true, + receipt: None, + }; + eth_sender + .ethereum + .add_execution(&operations[1], &pending_response); + + // Failed operation. + let failed_response = ExecutedTxStatus { + confirmations: super::WAIT_CONFIRMATIONS, + success: false, + receipt: Some(Default::default()), + }; + eth_sender + .ethereum + .add_execution(&operations[2], &failed_response); + + // Checks. + + // Committed operation. + assert_eq!( + eth_sender + .check_transaction_state( + &operations[0], + current_block + committed_response.confirmations + ) + .unwrap(), + TxCheckOutcome::Committed + ); + + // Pending operation (no enough confirmations). + assert_eq!( + eth_sender + .check_transaction_state( + &operations[1], + current_block + pending_response.confirmations + ) + .unwrap(), + TxCheckOutcome::Pending + ); + + // Failed operation. + assert_eq!( + eth_sender + .check_transaction_state( + &operations[2], + current_block + failed_response.confirmations + ) + .unwrap(), + TxCheckOutcome::Failed(Default::default()) + ); + + // Stuck operation. + assert_eq!( + eth_sender + .check_transaction_state( + &operations[3], + current_block + super::EXPECTED_WAIT_TIME_BLOCKS + ) + .unwrap(), + TxCheckOutcome::Stuck + ); + + // Pending operation (no response yet). + assert_eq!( + eth_sender + .check_transaction_state( + &operations[4], + current_block + super::EXPECTED_WAIT_TIME_BLOCKS - 1 + ) + .unwrap(), + TxCheckOutcome::Pending + ); +} + +/// Test for a normal `ETHSender` workflow: +/// - we send the two sequential operations (commit and verify); +/// - they are successfully committed to the Ethereum; +/// - `completeWithdrawals` tx is sent to the Ethereum; +/// - notification is sent after `verify` operation is committed. +#[test] +fn operation_commitment_workflow() { + let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); + + // In this test we will run one commit and one verify operation and should + // obtain a notification about the operation being completed in the end. + let operations = vec![ + test_data::commit_operation(0), + test_data::verify_operation(0), + ]; + + let verify_operation_id = operations[1].id; + + for (nonce, operation) in operations.iter().enumerate() { + // Send an operation to `ETHSender`. + sender.try_send(operation.clone()).unwrap(); + + // Retrieve it there and then process. + eth_sender.retrieve_operations(); + eth_sender.proceed_next_operations(); + + // Now we should see that transaction is stored in the database and sent to the Ethereum. + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); + eth_sender.db.assert_stored(&expected_tx); + eth_sender.ethereum.assert_sent(&expected_tx); + + // No confirmation should be done yet. + assert!(receiver.try_next().is_err()); + + // Increment block, make the transaction look successfully executed, and process the + // operation again. + eth_sender + .ethereum + .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); + + // Check that operation is confirmed. + eth_sender.db.assert_confirmed(&expected_tx); + } + + // Process the next operation and check that `completeWithdrawals` transaction is sent. + eth_sender.proceed_next_operations(); + let mut options = Options::default(); + let nonce = operations.len().into(); + options.nonce = Some(nonce); + let raw_tx = eth_sender.ethereum.encode_tx_data( + "completeWithdrawals", + models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, + ); + let tx = eth_sender + .ethereum + .sign_prepared_tx(raw_tx, options) + .unwrap(); + eth_sender.ethereum.assert_sent_by_hash(&tx.hash); + + // We should be notified about verify operation being completed. + assert_eq!( + receiver.try_next().unwrap().unwrap().id, + verify_operation_id + ); +} + +/// A simple scenario for a stuck transaction: +/// - A transaction is sent to the Ethereum. +/// - It is not processed after some blocks. +/// - `ETHSender` creates a new transaction with increased gas. +/// - This transaction is completed successfully. +#[test] +fn stuck_transaction() { + let (mut eth_sender, mut sender, _) = default_eth_sender(); + + // Workflow for the test is similar to `operation_commitment_workflow`. + let operation = test_data::commit_operation(0); + sender.try_send(operation.clone()).unwrap(); + + eth_sender.retrieve_operations(); + eth_sender.proceed_next_operations(); + + let nonce = 0; + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + + // Skip some blocks and expect sender to send a new tx. + eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; + eth_sender.proceed_next_operations(); + + // Check that new transaction is sent (and created based on the previous stuck tx). + let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); + let expected_tx = eth_sender + .sign_raw_tx( + stuck_tx.op_id, + raw_tx, + eth_sender.get_deadline_block(eth_sender.ethereum.block_number), + Some(&stuck_tx), + ) + .unwrap(); + eth_sender.db.assert_stored(&expected_tx); + eth_sender.ethereum.assert_sent(&expected_tx); + + // Increment block, make the transaction look successfully executed, and process the + // operation again. + eth_sender + .ethereum + .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); + + // Check that operation is confirmed. + eth_sender.db.assert_confirmed(&expected_tx); +} + +// TODO: Restore once withdraw operations are fixed in `eth_sender`. +// Currently this test is too hard to implement, since withdraw txs are not stored in the database. +// /// This test verifies that with multiple operations received all-together, +// /// their order is respected and no processing of the next operation is started until +// /// the previous one is committed. // #[test] -// fn basic_test() { -// let (eth_sender, _, _) = default_eth_sender(); - -// // Check that there are no unconfirmed operations by default. -// assert!(eth_sender.ongoing_ops.is_empty()); -// } - -// /// Check for the gas scaling: gas is expected to be increased by 15% or set equal -// /// to gas cost suggested by Ethereum (if it's greater). -// #[test] -// fn scale_gas() { -// let (mut eth_sender, _, _) = default_eth_sender(); - -// // Set the gas price in Ethereum to 1000. -// eth_sender.ethereum.gas_price = 1000.into(); - -// // Check that gas price of 1000 is increased to 1150. -// let scaled_gas = eth_sender.scale_gas(1000.into()).unwrap(); -// assert_eq!(scaled_gas, 1150.into()); - -// // Check that gas price of 100 is increased to 1000 (price in Ethereum object). -// let scaled_gas = eth_sender.scale_gas(100.into()).unwrap(); -// assert_eq!(scaled_gas, 1000.into()); -// } - -// /// Checks that deadline block is chosen according to the expected policy. -// #[test] -// fn deadline_block() { -// let (eth_sender, _, _) = default_eth_sender(); - -// assert_eq!( -// eth_sender.get_deadline_block(0), -// super::EXPECTED_WAIT_TIME_BLOCKS -// ); -// assert_eq!( -// eth_sender.get_deadline_block(10), -// 10 + super::EXPECTED_WAIT_TIME_BLOCKS -// ); -// } - -// /// Checks that received transaction response is reduced to the -// /// `TxCheckOutcome` correctly. -// /// -// /// Here we check every possible output of the `check_transaction_state` method. -// #[test] -// fn transaction_state() { -// let (mut eth_sender, _, _) = default_eth_sender(); -// let current_block = eth_sender.ethereum.block_number; -// let deadline_block = eth_sender.get_deadline_block(current_block); -// let operations: Vec = vec![ -// test_data::commit_operation(0), // Will be committed. -// test_data::commit_operation(1), // Will be pending because of not enough confirmations. -// test_data::commit_operation(2), // Will be failed. -// test_data::commit_operation(3), // Will be stuck. -// test_data::commit_operation(4), // Will be pending due no response. -// ] -// .iter() -// .enumerate() -// .map(|(nonce, op)| create_signed_tx(ð_sender, op, deadline_block, nonce as i64)) -// .collect(); - -// // Committed operation. -// let committed_response = ExecutedTxStatus { -// confirmations: super::WAIT_CONFIRMATIONS, -// success: true, -// receipt: None, -// }; -// eth_sender -// .ethereum -// .add_execution(&operations[0], &committed_response); - -// // Pending operation. -// let pending_response = ExecutedTxStatus { -// confirmations: super::WAIT_CONFIRMATIONS - 1, -// success: true, -// receipt: None, -// }; -// eth_sender -// .ethereum -// .add_execution(&operations[1], &pending_response); - -// // Failed operation. -// let failed_response = ExecutedTxStatus { -// confirmations: super::WAIT_CONFIRMATIONS, -// success: false, -// receipt: Some(Default::default()), -// }; -// eth_sender -// .ethereum -// .add_execution(&operations[2], &failed_response); - -// // Checks. - -// // Committed operation. -// assert_eq!( -// eth_sender -// .check_transaction_state( -// &operations[0], -// current_block + committed_response.confirmations -// ) -// .unwrap(), -// TxCheckOutcome::Committed -// ); - -// // Pending operation (no enough confirmations). -// assert_eq!( -// eth_sender -// .check_transaction_state( -// &operations[1], -// current_block + pending_response.confirmations -// ) -// .unwrap(), -// TxCheckOutcome::Pending -// ); - -// // Failed operation. -// assert_eq!( -// eth_sender -// .check_transaction_state( -// &operations[2], -// current_block + failed_response.confirmations -// ) -// .unwrap(), -// TxCheckOutcome::Failed(Default::default()) -// ); - -// // Stuck operation. -// assert_eq!( -// eth_sender -// .check_transaction_state( -// &operations[3], -// current_block + super::EXPECTED_WAIT_TIME_BLOCKS -// ) -// .unwrap(), -// TxCheckOutcome::Stuck -// ); - -// // Pending operation (no response yet). -// assert_eq!( -// eth_sender -// .check_transaction_state( -// &operations[4], -// current_block + super::EXPECTED_WAIT_TIME_BLOCKS - 1 -// ) -// .unwrap(), -// TxCheckOutcome::Pending -// ); -// } - -// /// Test for a normal `ETHSender` workflow: -// /// - we send the two sequential operations (commit and verify); -// /// - they are successfully committed to the Ethereum; -// /// - `completeWithdrawals` tx is sent to the Ethereum; -// /// - notification is sent after `verify` operation is committed. -// #[test] -// fn operation_commitment_workflow() { +// fn operations_order() { // let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); -// // In this test we will run one commit and one verify operation and should -// // obtain a notification about the operation being completed in the end. -// let operations = vec![ -// test_data::commit_operation(0), -// test_data::verify_operation(0), -// ]; +// // We send multiple the operations at once to the channel. +// let operations_count = 3; +// let mut operations = Vec::new(); +// let commit_operations = &test_data::COMMIT_OPERATIONS[..operations_count]; +// let verify_operations = &test_data::VERIFY_OPERATIONS[..operations_count]; +// operations.extend_from_slice(commit_operations); +// operations.extend_from_slice(verify_operations); -// let verify_operation_id = operations[1].id; +// // Also we create the list of expected transactions. +// let mut expected_txs = Vec::new(); -// for (nonce, operation) in operations.iter().enumerate() { -// // Send an operation to `ETHSender`. -// sender.try_send(operation.clone()).unwrap(); +// // Create expected txs from all the operations. +// for (idx, (commit_operation, verify_operation)) in +// commit_operations.iter().zip(verify_operations).enumerate() +// { +// // Create the commit operation. +// let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; +// let deadline_block = eth_sender.get_deadline_block(start_block); +// let nonce = idx * 3; -// // Retrieve it there and then process. -// eth_sender.retrieve_operations(); -// eth_sender.proceed_next_operations(); +// let commit_op_tx = +// create_signed_tx(ð_sender, commit_operation, deadline_block, nonce as i64); -// // Now we should see that transaction is stored in the database and sent to the Ethereum. -// let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); -// let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); -// eth_sender.db.assert_stored(&expected_tx); -// eth_sender.ethereum.assert_sent(&expected_tx); +// expected_txs.push(commit_op_tx); -// // No confirmation should be done yet. -// assert!(receiver.try_next().is_err()); +// // Create the verify operation, as by priority it will be processed right after `commit`. +// let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 1) as u64; +// let deadline_block = eth_sender.get_deadline_block(start_block); +// let nonce = idx * 3 + 1; -// // Increment block, make the transaction look successfully executed, and process the -// // operation again. -// eth_sender -// .ethereum -// .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); -// eth_sender.proceed_next_operations(); +// let verify_op_tx = +// create_signed_tx(ð_sender, verify_operation, deadline_block, nonce as i64); -// // Check that operation is confirmed. -// eth_sender.db.assert_confirmed(&expected_tx); +// expected_txs.push(verify_op_tx); // } -// // Process the next operation and check that `completeWithdrawals` transaction is sent. -// eth_sender.proceed_next_operations(); -// let mut options = Options::default(); -// let nonce = operations.len().into(); -// options.nonce = Some(nonce); -// let raw_tx = eth_sender.ethereum.encode_tx_data( -// "completeWithdrawals", -// models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, -// ); -// let tx = eth_sender -// .ethereum -// .sign_prepared_tx(raw_tx, options) -// .unwrap(); -// eth_sender.ethereum.assert_sent_by_hash(&tx.hash); - -// // We should be notified about verify operation being completed. -// assert_eq!( -// receiver.try_next().unwrap().unwrap().id, -// verify_operation_id -// ); -// } - -// /// A simple scenario for a stuck transaction: -// /// - A transaction is sent to the Ethereum. -// /// - It is not processed after some blocks. -// /// - `ETHSender` creates a new transaction with increased gas. -// /// - This transaction is completed successfully. -// #[test] -// fn stuck_transaction() { -// let (mut eth_sender, mut sender, _) = default_eth_sender(); - -// // Workflow for the test is similar to `operation_commitment_workflow`. -// let operation = test_data::commit_operation(0); -// sender.try_send(operation.clone()).unwrap(); - -// eth_sender.retrieve_operations(); -// eth_sender.proceed_next_operations(); - -// let nonce = 0; -// let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); -// let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); - -// // Skip some blocks and expect sender to send a new tx. -// eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; -// eth_sender.proceed_next_operations(); - -// // Check that new transaction is sent (and created based on the previous stuck tx). -// let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); -// let expected_tx = eth_sender -// .sign_raw_tx( -// stuck_tx.op_id, -// raw_tx, -// eth_sender.get_deadline_block(eth_sender.ethereum.block_number), -// Some(&stuck_tx), -// ) -// .unwrap(); -// eth_sender.db.assert_stored(&expected_tx); -// eth_sender.ethereum.assert_sent(&expected_tx); - -// // Increment block, make the transaction look successfully executed, and process the -// // operation again. -// eth_sender -// .ethereum -// .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); -// eth_sender.proceed_next_operations(); - -// // Check that operation is confirmed. -// eth_sender.db.assert_confirmed(&expected_tx); -// } - -// // TODO: Restore once withdraw operations are fixed in `eth_sender`. -// // Currently this test is too hard to implement, since withdraw txs are not stored in the database. -// // /// This test verifies that with multiple operations received all-together, -// // /// their order is respected and no processing of the next operation is started until -// // /// the previous one is committed. -// // #[test] -// // fn operations_order() { -// // let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); - -// // // We send multiple the operations at once to the channel. -// // let operations_count = 3; -// // let mut operations = Vec::new(); -// // let commit_operations = &test_data::COMMIT_OPERATIONS[..operations_count]; -// // let verify_operations = &test_data::VERIFY_OPERATIONS[..operations_count]; -// // operations.extend_from_slice(commit_operations); -// // operations.extend_from_slice(verify_operations); - -// // // Also we create the list of expected transactions. -// // let mut expected_txs = Vec::new(); - -// // // Create expected txs from all the operations. -// // for (idx, (commit_operation, verify_operation)) in -// // commit_operations.iter().zip(verify_operations).enumerate() -// // { -// // // Create the commit operation. -// // let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; -// // let deadline_block = eth_sender.get_deadline_block(start_block); -// // let nonce = idx * 3; - -// // let commit_op_tx = -// // create_signed_tx(ð_sender, commit_operation, deadline_block, nonce as i64); - -// // expected_txs.push(commit_op_tx); - -// // // Create the verify operation, as by priority it will be processed right after `commit`. -// // let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 1) as u64; -// // let deadline_block = eth_sender.get_deadline_block(start_block); -// // let nonce = idx * 3 + 1; - -// // let verify_op_tx = -// // create_signed_tx(ð_sender, verify_operation, deadline_block, nonce as i64); - -// // expected_txs.push(verify_op_tx); -// // } - -// // for operation in operations.iter() { -// // sender.try_send(operation.clone()).unwrap(); -// // } -// // eth_sender.retrieve_operations(); - -// // // Then we go through the operations and check that the order of operations is preserved. -// // for (idx, tx) in expected_txs.iter().enumerate() { -// // eth_sender.proceed_next_operations(); - -// // // Check that current expected tx is stored, but the next ones are not. -// // eth_sender.db.assert_stored(tx); -// // eth_sender.ethereum.assert_sent(tx); - -// // for following_tx in expected_txs[idx + 1..].iter() { -// // eth_sender.db.assert_not_stored(following_tx) -// // } - -// // eth_sender -// // .ethereum -// // .add_successfull_execution(tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); -// // eth_sender.proceed_next_operations(); -// // eth_sender.db.assert_confirmed(tx); - -// // if idx % 2 == 1 { -// // // For every verify operation, we should also add a withdraw operation and process it. -// // let raw_tx = eth_sender.ethereum.encode_tx_data( -// // "completeWithdrawals", -// // models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, -// // ); - -// // let nonce = (idx / 2) * 3 + 2; -// // let mut options = Options::default(); -// // options.nonce = Some(nonce.into()); - -// // let signed_tx = eth_sender -// // .ethereum -// // .sign_prepared_tx(raw_tx, options) -// // .unwrap(); - -// // eth_sender -// // .ethereum -// // .add_successfull_execution(signed_tx.hash, super::WAIT_CONFIRMATIONS); -// // eth_sender.proceed_next_operations(); -// // eth_sender.proceed_next_operations(); -// // } -// // } - -// // // We should be notified about all the verify operations being completed. -// // for _ in 0..operations_count { -// // assert!(receiver.try_next().unwrap().is_some()); -// // } -// // } - -// /// Check that upon a transaction failure the incident causes a panic by default. -// #[test] -// #[should_panic(expected = "Cannot operate after unexpected TX failure")] -// fn transaction_failure() { -// let (mut eth_sender, mut sender, _) = default_eth_sender(); - -// // Workflow for the test is similar to `operation_commitment_workflow`. -// let operation = test_data::commit_operation(0); -// sender.try_send(operation.clone()).unwrap(); - -// let nonce = 0; -// let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); -// let failing_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); - +// for operation in operations.iter() { +// sender.try_send(operation.clone()).unwrap(); +// } // eth_sender.retrieve_operations(); -// eth_sender.proceed_next_operations(); - -// eth_sender -// .ethereum -// .add_failed_execution(&failing_tx, super::WAIT_CONFIRMATIONS); -// eth_sender.proceed_next_operations(); -// } -// /// Check that after recovering state with several non-processed operations -// /// they will be processed normally. -// #[test] -// fn restore_state() { -// let (operations, stored_operations) = { -// // This `eth_sender` is required to generate the input only. -// let (eth_sender, _, _) = default_eth_sender(); - -// let commit_op = test_data::commit_operation(0); -// let verify_op = test_data::verify_operation(0); - -// let deadline_block = eth_sender.get_deadline_block(1); -// let commit_op_tx = create_signed_tx(ð_sender, &commit_op, deadline_block, 0); -// let deadline_block = eth_sender.get_deadline_block(2); -// let verify_op_tx = create_signed_tx(ð_sender, &verify_op, deadline_block, 1); - -// let operations = vec![commit_op.clone(), verify_op.clone()]; - -// // Create `OperationETHState` objects from operations and restore state -// let stored_operations = vec![ -// OperationETHState { -// operation: commit_op, -// txs: vec![commit_op_tx], -// }, -// OperationETHState { -// operation: verify_op, -// txs: vec![verify_op_tx], -// }, -// ]; - -// (operations, stored_operations) -// }; - -// let stats = ETHStats { -// commit_ops: 1, -// verify_ops: 1, -// withdraw_ops: 1, -// }; -// let (mut eth_sender, _, mut receiver) = restored_eth_sender(stored_operations.clone(), stats); - -// // We have to store txs in the database, since we've used them for the data restore. -// eth_sender -// .db -// .save_unconfirmed_operation(&stored_operations[0].txs[0]) -// .unwrap(); -// eth_sender -// .db -// .save_unconfirmed_operation(&stored_operations[1].txs[0]) -// .unwrap(); - -// for (nonce, operation) in operations.iter().enumerate() { -// // Note that we DO NOT send an operation to `ETHSender` and neither receive it. - -// // We do process operations restored from the DB though. -// // The rest of this test is the same as in `operation_commitment_workflow`. +// // Then we go through the operations and check that the order of operations is preserved. +// for (idx, tx) in expected_txs.iter().enumerate() { // eth_sender.proceed_next_operations(); -// let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); -// let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); +// // Check that current expected tx is stored, but the next ones are not. +// eth_sender.db.assert_stored(tx); +// eth_sender.ethereum.assert_sent(tx); + +// for following_tx in expected_txs[idx + 1..].iter() { +// eth_sender.db.assert_not_stored(following_tx) +// } // eth_sender // .ethereum -// .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); +// .add_successfull_execution(tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); // eth_sender.proceed_next_operations(); -// eth_sender.db.assert_confirmed(&expected_tx); +// eth_sender.db.assert_confirmed(tx); + +// if idx % 2 == 1 { +// // For every verify operation, we should also add a withdraw operation and process it. +// let raw_tx = eth_sender.ethereum.encode_tx_data( +// "completeWithdrawals", +// models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, +// ); + +// let nonce = (idx / 2) * 3 + 2; +// let mut options = Options::default(); +// options.nonce = Some(nonce.into()); + +// let signed_tx = eth_sender +// .ethereum +// .sign_prepared_tx(raw_tx, options) +// .unwrap(); + +// eth_sender +// .ethereum +// .add_successfull_execution(signed_tx.hash, super::WAIT_CONFIRMATIONS); +// eth_sender.proceed_next_operations(); +// eth_sender.proceed_next_operations(); +// } // } -// assert!(receiver.try_next().unwrap().is_some()); +// // We should be notified about all the verify operations being completed. +// for _ in 0..operations_count { +// assert!(receiver.try_next().unwrap().is_some()); +// } // } -// /// Checks that even after getting the first transaction stuck and sending the next -// /// one, confirmation for the first (stuck) transaction is processed and leads -// /// to the operation commitment. -// #[test] -// fn confirmations_independence() { -// // Workflow in the test is the same as in `stuck_transaction`, except for the fact -// // that confirmation is obtained for the stuck transaction instead of the latter one. - -// let (mut eth_sender, mut sender, _) = default_eth_sender(); - -// let operation = test_data::commit_operation(0); -// sender.try_send(operation.clone()).unwrap(); - -// eth_sender.retrieve_operations(); -// eth_sender.proceed_next_operations(); - -// let nonce = 0; -// let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); -// let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); - -// eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; -// eth_sender.proceed_next_operations(); - -// let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); -// let next_tx = eth_sender -// .sign_raw_tx( -// stuck_tx.op_id, -// raw_tx, -// eth_sender.get_deadline_block(eth_sender.ethereum.block_number), -// Some(&stuck_tx), -// ) -// .unwrap(); -// eth_sender.db.assert_stored(&next_tx); -// eth_sender.ethereum.assert_sent(&next_tx); - -// // Add a confirmation for a *stuck* transaction. -// eth_sender -// .ethereum -// .add_successfull_execution(stuck_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); -// eth_sender.proceed_next_operations(); - -// // Check that operation is confirmed. -// eth_sender.db.assert_confirmed(&stuck_tx); -// } +/// Check that upon a transaction failure the incident causes a panic by default. +#[test] +#[should_panic(expected = "Cannot operate after unexpected TX failure")] +fn transaction_failure() { + let (mut eth_sender, mut sender, _) = default_eth_sender(); + + // Workflow for the test is similar to `operation_commitment_workflow`. + let operation = test_data::commit_operation(0); + sender.try_send(operation.clone()).unwrap(); + + let nonce = 0; + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let failing_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + + eth_sender.retrieve_operations(); + eth_sender.proceed_next_operations(); + + eth_sender + .ethereum + .add_failed_execution(&failing_tx, super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); +} + +/// Check that after recovering state with several non-processed operations +/// they will be processed normally. +#[test] +fn restore_state() { + let (operations, stored_operations) = { + // This `eth_sender` is required to generate the input only. + let (eth_sender, _, _) = default_eth_sender(); + + let commit_op = test_data::commit_operation(0); + let verify_op = test_data::verify_operation(0); + + let deadline_block = eth_sender.get_deadline_block(1); + let commit_op_tx = create_signed_tx(ð_sender, &commit_op, deadline_block, 0); + let deadline_block = eth_sender.get_deadline_block(2); + let verify_op_tx = create_signed_tx(ð_sender, &verify_op, deadline_block, 1); + + let operations = vec![commit_op.clone(), verify_op.clone()]; + + // Create `OperationETHState` objects from operations and restore state + let stored_operations = vec![ + OperationETHState { + operation: commit_op, + txs: vec![commit_op_tx], + }, + OperationETHState { + operation: verify_op, + txs: vec![verify_op_tx], + }, + ]; + + (operations, stored_operations) + }; + + let stats = ETHStats { + commit_ops: 1, + verify_ops: 1, + withdraw_ops: 1, + }; + let (mut eth_sender, _, mut receiver) = restored_eth_sender(stored_operations.clone(), stats); + + // We have to store txs in the database, since we've used them for the data restore. + eth_sender + .db + .save_unconfirmed_operation(&stored_operations[0].txs[0]) + .unwrap(); + eth_sender + .db + .save_unconfirmed_operation(&stored_operations[1].txs[0]) + .unwrap(); + + for (nonce, operation) in operations.iter().enumerate() { + // Note that we DO NOT send an operation to `ETHSender` and neither receive it. + + // We do process operations restored from the DB though. + // The rest of this test is the same as in `operation_commitment_workflow`. + eth_sender.proceed_next_operations(); + + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); + + eth_sender + .ethereum + .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); + eth_sender.db.assert_confirmed(&expected_tx); + } + + assert!(receiver.try_next().unwrap().is_some()); +} + +/// Checks that even after getting the first transaction stuck and sending the next +/// one, confirmation for the first (stuck) transaction is processed and leads +/// to the operation commitment. +#[test] +fn confirmations_independence() { + // Workflow in the test is the same as in `stuck_transaction`, except for the fact + // that confirmation is obtained for the stuck transaction instead of the latter one. + + let (mut eth_sender, mut sender, _) = default_eth_sender(); + + let operation = test_data::commit_operation(0); + sender.try_send(operation.clone()).unwrap(); + + eth_sender.retrieve_operations(); + eth_sender.proceed_next_operations(); + + let nonce = 0; + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + + eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; + eth_sender.proceed_next_operations(); + + let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); + let next_tx = eth_sender + .sign_raw_tx( + stuck_tx.op_id, + raw_tx, + eth_sender.get_deadline_block(eth_sender.ethereum.block_number), + Some(&stuck_tx), + ) + .unwrap(); + eth_sender.db.assert_stored(&next_tx); + eth_sender.ethereum.assert_sent(&next_tx); + + // Add a confirmation for a *stuck* transaction. + eth_sender + .ethereum + .add_successfull_execution(stuck_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); + + // Check that operation is confirmed. + eth_sender.db.assert_confirmed(&stuck_tx); +} + +*/ From 1c273e6c4851c4dc151fbd29ef58d5ad8bcab38d Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 24 Mar 2020 13:36:00 +0300 Subject: [PATCH 087/186] Fix most of tests --- core/server/src/eth_sender/mod.rs | 33 ++++++- core/server/src/eth_sender/tests/mock.rs | 86 ++++++++++-------- core/server/src/eth_sender/tests/mod.rs | 106 +++++++++++++++-------- 3 files changed, 148 insertions(+), 77 deletions(-) diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 69631a45b0..d03695cd08 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -41,6 +41,20 @@ const EXPECTED_WAIT_TIME_BLOCKS: u64 = 30; const TX_POLL_PERIOD: Duration = Duration::from_secs(5); const WAIT_CONFIRMATIONS: u64 = 1; +/// `TxCheckMode` enum determines the policy on the obtaining the tx status. +/// The latest sent transaction can be pending (we're still waiting for it), +/// but if there is more than one tx for some Ethereum operation, it means that we +/// already know that these transactions were considered stuck. Thus, lack of +/// response (either successful or unsuccessful) for any of the old txs means +/// that this transaction is still stuck. +#[derive(Debug, Clone, PartialEq)] +enum TxCheckMode { + /// Mode for the latest sent tx (pending state is allowed). + Latest, + /// Mode for the latest sent tx (pending state is not allowed). + Old, +} + /// `ETHSender` is a structure capable of anchoring /// the ZKSync operations to the Ethereum blockchain. /// @@ -238,8 +252,14 @@ impl ETHSender { // Check statuses of existing transactions. // Go through every transaction in a loop. We will exit this method early // if there will be discovered a pending or successfully committed transaction. - for tx_hash in &op.used_tx_hashes { - match self.check_transaction_state(op, tx_hash, current_block)? { + for (idx, tx_hash) in op.used_tx_hashes.iter().enumerate() { + let mode = if idx == op.used_tx_hashes.len() - 1 { + TxCheckMode::Latest + } else { + TxCheckMode::Old + }; + + match self.check_transaction_state(mode, op, tx_hash, current_block)? { TxCheckOutcome::Pending => { // Transaction is pending, nothing to do yet. return Ok(OperationCommitment::Pending); @@ -308,6 +328,7 @@ impl ETHSender { /// and reduces it to the simpler `TxCheckOutcome` report. fn check_transaction_state( &self, + mode: TxCheckMode, op: ÐOperation, tx_hash: &H256, current_block: u64, @@ -337,8 +358,12 @@ impl ETHSender { } // Stuck transaction. None if op.is_stuck(current_block) => TxCheckOutcome::Stuck, - // No status and not stuck yet, thus considered pending. - None => TxCheckOutcome::Pending, + // No status yet. If this is a latest transaction, it's pending. + // For an old tx it means that it's still stuck. + None => match mode { + TxCheckMode::Latest => TxCheckOutcome::Pending, + TxCheckMode::Old => TxCheckOutcome::Stuck, + }, }; Ok(outcome) diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 87b48da8aa..45090ea7a2 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -10,8 +10,8 @@ use web3::types::{H256, U256}; // Workspace uses use eth_client::SignedCallResult; use models::{ - ethereum::{ETHOperation, EthOpId}, - Operation, + ethereum::{ETHOperation, EthOpId, OperationType}, + Action, Operation, }; // Local uses use super::ETHSender; @@ -53,24 +53,24 @@ impl MockDatabase { } /// Ensures that the provided transaction is stored in the database and not confirmed yet. - pub fn assert_stored(&self, id: i64, tx: ÐOperation) { - assert_eq!(self.unconfirmed_operations.borrow().get(&id), Some(tx)); + pub fn assert_stored(&self, tx: ÐOperation) { + assert_eq!(self.unconfirmed_operations.borrow().get(&tx.id), Some(tx)); - assert!(self.confirmed_operations.borrow().get(&id).is_none()); + assert!(self.confirmed_operations.borrow().get(&tx.id).is_none()); } /// Ensures that the provided transaction is not stored in the database. - pub fn assert_not_stored(&self, id: i64, tx: ÐOperation) { - assert!(self.confirmed_operations.borrow().get(&id).is_none()); + pub fn assert_not_stored(&self, tx: ÐOperation) { + assert!(self.confirmed_operations.borrow().get(&tx.id).is_none()); - assert!(self.unconfirmed_operations.borrow().get(&id).is_none()); + assert!(self.unconfirmed_operations.borrow().get(&tx.id).is_none()); } /// Ensures that the provided transaction is stored as confirmed. - pub fn assert_confirmed(&self, id: i64, tx: ÐOperation) { - assert_eq!(self.confirmed_operations.borrow().get(&id), Some(tx)); + pub fn assert_confirmed(&self, tx: ÐOperation) { + assert_eq!(self.confirmed_operations.borrow().get(&tx.id), Some(tx)); - assert!(self.unconfirmed_operations.borrow().get(&id).is_none()); + assert!(self.unconfirmed_operations.borrow().get(&tx.id).is_none()); } } @@ -84,6 +84,10 @@ impl DatabaseAccess for MockDatabase { let new_id = id + 1; self.pending_op_id.set(new_id); + // Store with the assigned ID. + let mut op = op.clone(); + op.id = id; + self.unconfirmed_operations .borrow_mut() .insert(id, op.clone()); @@ -318,27 +322,39 @@ pub(super) fn restored_eth_sender( ) } -// /// Behaves the same as `ETHSender::sign_new_tx`, but does not affect nonce. -// /// This method should be used to create expected tx copies which won't affect -// /// the internal `ETHSender` state. -// pub(super) fn create_signed_tx( -// eth_sender: ÐSender, -// operation: &Operation, -// deadline_block: u64, -// nonce: i64, -// ) -> ETHOperation { -// let mut options = Options::default(); -// options.nonce = Some(nonce.into()); - -// let raw_tx = eth_sender.operation_to_raw_tx(&operation); -// let signed_tx = eth_sender -// .ethereum -// .sign_prepared_tx(raw_tx, options) -// .unwrap(); - -// TransactionETHState { -// op_id: operation.id.unwrap(), -// deadline_block, -// signed_tx, -// } -// } +/// Behaves the same as `ETHSender::sign_new_tx`, but does not affect nonce. +/// This method should be used to create expected tx copies which won't affect +/// the internal `ETHSender` state. +pub(super) fn create_signed_tx( + eth_sender: ÐSender, + operation: &Operation, + deadline_block: u64, + nonce: i64, +) -> ETHOperation { + let mut options = Options::default(); + options.nonce = Some(nonce.into()); + + let raw_tx = eth_sender.operation_to_raw_tx(&operation); + let signed_tx = eth_sender + .ethereum + .sign_prepared_tx(raw_tx.clone(), options) + .unwrap(); + + let op_type = match operation.action { + Action::Commit => OperationType::Commit, + Action::Verify { .. } => OperationType::Verify, + }; + + ETHOperation { + id: 0, // Will be initialized later. + op_type, + op: Some(operation.clone()), + nonce: signed_tx.nonce, + last_deadline_block: deadline_block, + last_used_gas_price: signed_tx.gas_price, + used_tx_hashes: vec![signed_tx.hash], + encoded_tx_data: raw_tx, + confirmed: false, + final_hash: None, + } +} diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index e2adf0092f..80d1f6eabf 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -1,19 +1,19 @@ // External uses use web3::contract::Options; +// Workspace uses +use models::ethereum::ETHOperation; // Local uses -use self::mock::{default_eth_sender, restored_eth_sender}; +use self::mock::{create_signed_tx, default_eth_sender, restored_eth_sender}; use super::{ database::DatabaseAccess, ethereum_interface::EthereumInterface, transactions::{ETHStats, ExecutedTxStatus, TxCheckOutcome}, - ETHSender, + ETHSender, TxCheckMode, }; mod mock; mod test_data; -/* - /// Basic test that `ETHSender` creation does not panic and initializes correctly. #[test] fn basic_test() { @@ -65,7 +65,7 @@ fn transaction_state() { let (mut eth_sender, _, _) = default_eth_sender(); let current_block = eth_sender.ethereum.block_number; let deadline_block = eth_sender.get_deadline_block(current_block); - let operations: Vec = vec![ + let operations: Vec = vec![ test_data::commit_operation(0), // Will be committed. test_data::commit_operation(1), // Will be pending because of not enough confirmations. test_data::commit_operation(2), // Will be failed. @@ -85,7 +85,7 @@ fn transaction_state() { }; eth_sender .ethereum - .add_execution(&operations[0], &committed_response); + .add_execution(&operations[0].used_tx_hashes[0], &committed_response); // Pending operation. let pending_response = ExecutedTxStatus { @@ -95,7 +95,7 @@ fn transaction_state() { }; eth_sender .ethereum - .add_execution(&operations[1], &pending_response); + .add_execution(&operations[1].used_tx_hashes[0], &pending_response); // Failed operation. let failed_response = ExecutedTxStatus { @@ -105,7 +105,7 @@ fn transaction_state() { }; eth_sender .ethereum - .add_execution(&operations[2], &failed_response); + .add_execution(&operations[2].used_tx_hashes[0], &failed_response); // Checks. @@ -113,7 +113,9 @@ fn transaction_state() { assert_eq!( eth_sender .check_transaction_state( + TxCheckMode::Latest, &operations[0], + &operations[0].used_tx_hashes[0], current_block + committed_response.confirmations ) .unwrap(), @@ -124,7 +126,9 @@ fn transaction_state() { assert_eq!( eth_sender .check_transaction_state( + TxCheckMode::Latest, &operations[1], + &operations[1].used_tx_hashes[0], current_block + pending_response.confirmations ) .unwrap(), @@ -135,7 +139,9 @@ fn transaction_state() { assert_eq!( eth_sender .check_transaction_state( + TxCheckMode::Latest, &operations[2], + &operations[2].used_tx_hashes[0], current_block + failed_response.confirmations ) .unwrap(), @@ -146,7 +152,9 @@ fn transaction_state() { assert_eq!( eth_sender .check_transaction_state( + TxCheckMode::Latest, &operations[3], + &operations[3].used_tx_hashes[0], current_block + super::EXPECTED_WAIT_TIME_BLOCKS ) .unwrap(), @@ -157,12 +165,27 @@ fn transaction_state() { assert_eq!( eth_sender .check_transaction_state( + TxCheckMode::Latest, &operations[4], + &operations[4].used_tx_hashes[0], current_block + super::EXPECTED_WAIT_TIME_BLOCKS - 1 ) .unwrap(), TxCheckOutcome::Pending ); + + // Pending old operation should be considered stuck. + assert_eq!( + eth_sender + .check_transaction_state( + TxCheckMode::Old, + &operations[4], + &operations[4].used_tx_hashes[0], + current_block + super::EXPECTED_WAIT_TIME_BLOCKS - 1 + ) + .unwrap(), + TxCheckOutcome::Stuck + ); } /// Test for a normal `ETHSender` workflow: @@ -183,7 +206,9 @@ fn operation_commitment_workflow() { let verify_operation_id = operations[1].id; - for (nonce, operation) in operations.iter().enumerate() { + for (eth_op_id, operation) in operations.iter().enumerate() { + let nonce = eth_op_id; + // Send an operation to `ETHSender`. sender.try_send(operation.clone()).unwrap(); @@ -193,9 +218,14 @@ fn operation_commitment_workflow() { // Now we should see that transaction is stored in the database and sent to the Ethereum. let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); + let mut expected_tx = + create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); + expected_tx.id = eth_op_id as i64; // We have to set the ID manually. + eth_sender.db.assert_stored(&expected_tx); - eth_sender.ethereum.assert_sent(&expected_tx); + eth_sender + .ethereum + .assert_sent(&expected_tx.used_tx_hashes[0]); // No confirmation should be done yet. assert!(receiver.try_next().is_err()); @@ -204,10 +234,12 @@ fn operation_commitment_workflow() { // operation again. eth_sender .ethereum - .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + .add_successfull_execution(expected_tx.used_tx_hashes[0], super::WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); // Check that operation is confirmed. + expected_tx.confirmed = true; + expected_tx.final_hash = Some(expected_tx.used_tx_hashes[0]); eth_sender.db.assert_confirmed(&expected_tx); } @@ -224,7 +256,7 @@ fn operation_commitment_workflow() { .ethereum .sign_prepared_tx(raw_tx, options) .unwrap(); - eth_sender.ethereum.assert_sent_by_hash(&tx.hash); + eth_sender.ethereum.assert_sent(&tx.hash); // We should be notified about verify operation being completed. assert_eq!( @@ -251,34 +283,33 @@ fn stuck_transaction() { let nonce = 0; let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + let mut stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); // Skip some blocks and expect sender to send a new tx. eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; eth_sender.proceed_next_operations(); // Check that new transaction is sent (and created based on the previous stuck tx). - let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); - let expected_tx = eth_sender - .sign_raw_tx( - stuck_tx.op_id, - raw_tx, + let expected_sent_tx = eth_sender + .create_supplement_tx( eth_sender.get_deadline_block(eth_sender.ethereum.block_number), - Some(&stuck_tx), + &mut stuck_tx, ) .unwrap(); - eth_sender.db.assert_stored(&expected_tx); - eth_sender.ethereum.assert_sent(&expected_tx); + eth_sender.db.assert_stored(&stuck_tx); + eth_sender.ethereum.assert_sent(&expected_sent_tx.hash); // Increment block, make the transaction look successfully executed, and process the // operation again. eth_sender .ethereum - .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + .add_successfull_execution(stuck_tx.used_tx_hashes[1], super::WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); - // Check that operation is confirmed. - eth_sender.db.assert_confirmed(&expected_tx); + // Check that operation is confirmed (we set the final hash to the second sent tx). + stuck_tx.confirmed = true; + stuck_tx.final_hash = Some(stuck_tx.used_tx_hashes[1]); + eth_sender.db.assert_confirmed(&stuck_tx); } // TODO: Restore once withdraw operations are fixed in `eth_sender`. @@ -398,10 +429,11 @@ fn transaction_failure() { eth_sender .ethereum - .add_failed_execution(&failing_tx, super::WAIT_CONFIRMATIONS); + .add_failed_execution(&failing_tx.used_tx_hashes[0], super::WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); } +/* /// Check that after recovering state with several non-processed operations /// they will be processed normally. #[test] @@ -471,6 +503,7 @@ fn restore_state() { assert!(receiver.try_next().unwrap().is_some()); } +*/ /// Checks that even after getting the first transaction stuck and sending the next /// one, confirmation for the first (stuck) transaction is processed and leads @@ -490,31 +523,28 @@ fn confirmations_independence() { let nonce = 0; let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + let mut stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; eth_sender.proceed_next_operations(); - let raw_tx = stuck_tx.signed_tx.raw_tx.clone(); let next_tx = eth_sender - .sign_raw_tx( - stuck_tx.op_id, - raw_tx, + .create_supplement_tx( eth_sender.get_deadline_block(eth_sender.ethereum.block_number), - Some(&stuck_tx), + &mut stuck_tx, ) .unwrap(); - eth_sender.db.assert_stored(&next_tx); - eth_sender.ethereum.assert_sent(&next_tx); + eth_sender.db.assert_stored(&stuck_tx); + eth_sender.ethereum.assert_sent(&next_tx.hash); // Add a confirmation for a *stuck* transaction. eth_sender .ethereum - .add_successfull_execution(stuck_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + .add_successfull_execution(stuck_tx.used_tx_hashes[0], super::WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); - // Check that operation is confirmed. + // Check that operation is confirmed (we set the final hash to the *first* sent tx). + stuck_tx.confirmed = true; + stuck_tx.final_hash = Some(stuck_tx.used_tx_hashes[0]); eth_sender.db.assert_confirmed(&stuck_tx); } - -*/ From 6eef2263997ebfaa39d86878ae09268e0707ccb7 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 24 Mar 2020 14:01:04 +0300 Subject: [PATCH 088/186] Fix restore_state test --- core/server/src/eth_sender/tests/mock.rs | 5 +++ core/server/src/eth_sender/tests/mod.rs | 45 +++++++++--------------- 2 files changed, 21 insertions(+), 29 deletions(-) diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 45090ea7a2..f72801f556 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -43,11 +43,16 @@ impl MockDatabase { .iter() .fold(0, |acc, op| acc + op.used_tx_hashes.len()); let pending_op_id = restore_state.len(); + + let unconfirmed_operations: HashMap = + restore_state.iter().map(|op| (op.id, op.clone())).collect(); + Self { restore_state, nonce: Cell::new(nonce as i64), pending_op_id: Cell::new(pending_op_id as EthOpId), stats: RefCell::new(stats), + unconfirmed_operations: RefCell::new(unconfirmed_operations), ..Default::default() } } diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index 80d1f6eabf..c4c8d21f19 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -433,7 +433,6 @@ fn transaction_failure() { eth_sender.proceed_next_operations(); } -/* /// Check that after recovering state with several non-processed operations /// they will be processed normally. #[test] @@ -447,22 +446,13 @@ fn restore_state() { let deadline_block = eth_sender.get_deadline_block(1); let commit_op_tx = create_signed_tx(ð_sender, &commit_op, deadline_block, 0); + let deadline_block = eth_sender.get_deadline_block(2); - let verify_op_tx = create_signed_tx(ð_sender, &verify_op, deadline_block, 1); + let mut verify_op_tx = create_signed_tx(ð_sender, &verify_op, deadline_block, 1); + verify_op_tx.id = 1; let operations = vec![commit_op.clone(), verify_op.clone()]; - - // Create `OperationETHState` objects from operations and restore state - let stored_operations = vec![ - OperationETHState { - operation: commit_op, - txs: vec![commit_op_tx], - }, - OperationETHState { - operation: verify_op, - txs: vec![verify_op_tx], - }, - ]; + let stored_operations = vec![commit_op_tx, verify_op_tx]; (operations, stored_operations) }; @@ -470,21 +460,11 @@ fn restore_state() { let stats = ETHStats { commit_ops: 1, verify_ops: 1, - withdraw_ops: 1, + withdraw_ops: 0, }; let (mut eth_sender, _, mut receiver) = restored_eth_sender(stored_operations.clone(), stats); - // We have to store txs in the database, since we've used them for the data restore. - eth_sender - .db - .save_unconfirmed_operation(&stored_operations[0].txs[0]) - .unwrap(); - eth_sender - .db - .save_unconfirmed_operation(&stored_operations[1].txs[0]) - .unwrap(); - - for (nonce, operation) in operations.iter().enumerate() { + for (eth_op_id, operation) in operations.iter().enumerate() { // Note that we DO NOT send an operation to `ETHSender` and neither receive it. // We do process operations restored from the DB though. @@ -492,18 +472,25 @@ fn restore_state() { eth_sender.proceed_next_operations(); let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let expected_tx = create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); + let nonce = eth_op_id; + let mut expected_tx = + create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); + expected_tx.id = eth_op_id as i64; + + eth_sender.db.assert_stored(&expected_tx); eth_sender .ethereum - .add_successfull_execution(expected_tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); + .add_successfull_execution(expected_tx.used_tx_hashes[0], super::WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); + + expected_tx.confirmed = true; + expected_tx.final_hash = Some(expected_tx.used_tx_hashes[0]); eth_sender.db.assert_confirmed(&expected_tx); } assert!(receiver.try_next().unwrap().is_some()); } -*/ /// Checks that even after getting the first transaction stuck and sending the next /// one, confirmation for the first (stuck) transaction is processed and leads From ad276040fa3f5076f20ca54c302c64681f11b480 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 24 Mar 2020 17:14:45 +0300 Subject: [PATCH 089/186] Fix several bugs in eth_sender --- core/server/src/eth_sender/mod.rs | 34 ++++++++--- core/server/src/eth_sender/tests/mock.rs | 38 +++++++++++- core/server/src/eth_sender/tests/mod.rs | 67 ++++++++++++++++------ core/server/src/eth_sender/tx_queue/mod.rs | 13 +++-- core/storage/src/ethereum/mod.rs | 4 +- 5 files changed, 121 insertions(+), 35 deletions(-) diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index d03695cd08..1e096f5758 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -156,6 +156,12 @@ impl ETHSender { fn retrieve_operations(&mut self) { while let Ok(Some(operation)) = self.rx_for_eth.try_next() { + info!( + "Adding ZKSync operation to queue", + operation.id.expect("ID must be set"), + operation.action.to_string(), + operation.block.block_number + ); self.add_operation_to_queue(operation); } } @@ -185,8 +191,8 @@ impl ETHSender { new_tx.id = op_id; info!( - "Sending ETH tx: ETH Operation {} ({:?}), ZKSync Operation {:?}", - new_tx.id, new_tx.op_type, new_tx.op, + "Sending new tx: [ETH Operation . Tx hash: <{:#x}>. ZKSync operation: {}]", + new_tx.id, new_tx.op_type, new_tx.used_tx_hashes[0], self.zksync_operation_description(&new_tx), ); self.ethereum.send_tx(&signed_tx)?; @@ -212,11 +218,6 @@ impl ETHSender { // Check if we've completed the commitment. match result { OperationCommitment::Committed => { - info!( - "Confirmed: ETH Operation {} ({:?}), ZKSync Operation {:?}", - operation.id, operation.op_type, operation.op, - ); - // Free a slot for the next tx in the queue. self.tx_queue.report_commitment(); @@ -238,6 +239,19 @@ impl ETHSender { } } + fn zksync_operation_description(&self, operation: ÐOperation) -> String { + if let Some(op) = &operation.op { + format!( + "", + op.id.expect("ID must be set"), + op.action.to_string(), + op.block.block_number + ) + } else { + "".into() + } + } + fn perform_commitment_step( &mut self, op: &mut ETHOperation, @@ -266,8 +280,8 @@ impl ETHSender { } TxCheckOutcome::Committed => { info!( - "Eth operation {}, ZKSync operation {:?}, committed, tx: {:#x}", - op.id, op.op, tx_hash, + "Confirmed: [ETH Operation . Tx hash: <{:#x}>. ZKSync operation: {}]", + op.id, op.op_type, tx_hash, self.zksync_operation_description(op), ); self.db.confirm_operation(tx_hash)?; return Ok(OperationCommitment::Committed); @@ -520,6 +534,8 @@ impl ETHSender { config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, ); + info!("Adding withdraw operation to queue"); + self.tx_queue .add_withdraw_operation(TxData::from_raw(OperationType::Withdraw, raw_tx)); } diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index f72801f556..267f96d3cf 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -331,6 +331,7 @@ pub(super) fn restored_eth_sender( /// This method should be used to create expected tx copies which won't affect /// the internal `ETHSender` state. pub(super) fn create_signed_tx( + id: i64, eth_sender: ÐSender, operation: &Operation, deadline_block: u64, @@ -351,7 +352,7 @@ pub(super) fn create_signed_tx( }; ETHOperation { - id: 0, // Will be initialized later. + id, op_type, op: Some(operation.clone()), nonce: signed_tx.nonce, @@ -363,3 +364,38 @@ pub(super) fn create_signed_tx( final_hash: None, } } + +/// Creates an `ETHOperation` object for a withdraw operation. +pub(super) fn create_signed_withdraw_tx( + id: i64, + eth_sender: ÐSender, + deadline_block: u64, + nonce: i64, +) -> ETHOperation { + let mut options = Options::default(); + options.nonce = Some(nonce.into()); + + let raw_tx = eth_sender.ethereum.encode_tx_data( + "completeWithdrawals", + models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, + ); + let signed_tx = eth_sender + .ethereum + .sign_prepared_tx(raw_tx.clone(), options) + .unwrap(); + + let op_type = OperationType::Withdraw; + + ETHOperation { + id, + op_type, + op: None, + nonce: signed_tx.nonce, + last_deadline_block: deadline_block, + last_used_gas_price: signed_tx.gas_price, + used_tx_hashes: vec![signed_tx.hash], + encoded_tx_data: raw_tx, + confirmed: false, + final_hash: None, + } +} diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index c4c8d21f19..c50e911f12 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -74,7 +74,10 @@ fn transaction_state() { ] .iter() .enumerate() - .map(|(nonce, op)| create_signed_tx(ð_sender, op, deadline_block, nonce as i64)) + .map(|(eth_op_id, op)| { + let nonce = eth_op_id as i64; + create_signed_tx(eth_op_id as i64, ð_sender, op, deadline_block, nonce) + }) .collect(); // Committed operation. @@ -207,7 +210,7 @@ fn operation_commitment_workflow() { let verify_operation_id = operations[1].id; for (eth_op_id, operation) in operations.iter().enumerate() { - let nonce = eth_op_id; + let nonce = eth_op_id as i64; // Send an operation to `ETHSender`. sender.try_send(operation.clone()).unwrap(); @@ -218,8 +221,13 @@ fn operation_commitment_workflow() { // Now we should see that transaction is stored in the database and sent to the Ethereum. let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let mut expected_tx = - create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); + let mut expected_tx = create_signed_tx( + eth_op_id as i64, + ð_sender, + operation, + deadline_block, + nonce, + ); expected_tx.id = eth_op_id as i64; // We have to set the ID manually. eth_sender.db.assert_stored(&expected_tx); @@ -281,9 +289,10 @@ fn stuck_transaction() { eth_sender.retrieve_operations(); eth_sender.proceed_next_operations(); + let eth_op_id = 0; let nonce = 0; let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let mut stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + let mut stuck_tx = create_signed_tx(eth_op_id, ð_sender, &operation, deadline_block, nonce); // Skip some blocks and expect sender to send a new tx. eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; @@ -339,22 +348,36 @@ fn stuck_transaction() { // // Create the commit operation. // let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; // let deadline_block = eth_sender.get_deadline_block(start_block); -// let nonce = idx * 3; +// let eth_op_idx = (idx * 3) as i64; +// let nonce = eth_op_idx; -// let commit_op_tx = -// create_signed_tx(ð_sender, commit_operation, deadline_block, nonce as i64); +// let mut commit_op_tx = create_signed_tx( +// eth_op_idx, +// ð_sender, +// commit_operation, +// deadline_block, +// nonce, +// ); // expected_txs.push(commit_op_tx); // // Create the verify operation, as by priority it will be processed right after `commit`. // let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 1) as u64; // let deadline_block = eth_sender.get_deadline_block(start_block); -// let nonce = idx * 3 + 1; +// let eth_op_idx = (idx * 3 + 1) as i64; +// let nonce = eth_op_idx; -// let verify_op_tx = -// create_signed_tx(ð_sender, verify_operation, deadline_block, nonce as i64); +// let mut verify_op_tx = create_signed_tx( +// eth_op_idx, +// ð_sender, +// verify_operation, +// deadline_block, +// nonce, +// ); // expected_txs.push(verify_op_tx); + +// // Create the withdraw operation. // } // for operation in operations.iter() { @@ -420,9 +443,10 @@ fn transaction_failure() { let operation = test_data::commit_operation(0); sender.try_send(operation.clone()).unwrap(); + let eth_op_id = 0; let nonce = 0; let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let failing_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + let failing_tx = create_signed_tx(eth_op_id, ð_sender, &operation, deadline_block, nonce); eth_sender.retrieve_operations(); eth_sender.proceed_next_operations(); @@ -445,11 +469,10 @@ fn restore_state() { let verify_op = test_data::verify_operation(0); let deadline_block = eth_sender.get_deadline_block(1); - let commit_op_tx = create_signed_tx(ð_sender, &commit_op, deadline_block, 0); + let commit_op_tx = create_signed_tx(0, ð_sender, &commit_op, deadline_block, 0); let deadline_block = eth_sender.get_deadline_block(2); - let mut verify_op_tx = create_signed_tx(ð_sender, &verify_op, deadline_block, 1); - verify_op_tx.id = 1; + let verify_op_tx = create_signed_tx(1, ð_sender, &verify_op, deadline_block, 1); let operations = vec![commit_op.clone(), verify_op.clone()]; let stored_operations = vec![commit_op_tx, verify_op_tx]; @@ -472,9 +495,14 @@ fn restore_state() { eth_sender.proceed_next_operations(); let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let nonce = eth_op_id; - let mut expected_tx = - create_signed_tx(ð_sender, operation, deadline_block, nonce as i64); + let nonce = eth_op_id as i64; + let mut expected_tx = create_signed_tx( + eth_op_id as i64, + ð_sender, + operation, + deadline_block, + nonce, + ); expected_tx.id = eth_op_id as i64; eth_sender.db.assert_stored(&expected_tx); @@ -508,9 +536,10 @@ fn confirmations_independence() { eth_sender.retrieve_operations(); eth_sender.proceed_next_operations(); + let eth_op_id = 0; let nonce = 0; let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); - let mut stuck_tx = create_signed_tx(ð_sender, &operation, deadline_block, nonce); + let mut stuck_tx = create_signed_tx(eth_op_id, ð_sender, &operation, deadline_block, nonce); eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; eth_sender.proceed_next_operations(); diff --git a/core/server/src/eth_sender/tx_queue/mod.rs b/core/server/src/eth_sender/tx_queue/mod.rs index e1e0ca942d..17571aefd8 100644 --- a/core/server/src/eth_sender/tx_queue/mod.rs +++ b/core/server/src/eth_sender/tx_queue/mod.rs @@ -99,12 +99,15 @@ impl TxQueueBuilder { /// Finishes the queue building process. pub fn build(self) -> TxQueue { + // Block numbers are indexed starting from 1, so we have to increment. + let verify_operations_next_block = self.verify_operations_count + 1; + TxQueue { max_pending_txs: self.max_pending_txs, sent_pending_txs: self.sent_pending_txs, commit_operations: CounterQueue::new_with_count(self.commit_operations_count), - verify_operations: SparseQueue::new_from(self.verify_operations_count), + verify_operations: SparseQueue::new_from(verify_operations_next_block), withdraw_operations: CounterQueue::new_with_count(self.withdraw_operations_count), } } @@ -187,16 +190,16 @@ impl TxQueue { // If we've committed a corresponding `Commit` operation, and // there is a pending `verify` operation, chose it. let next_verify_op_id = self.verify_operations.next_id(); - if next_verify_op_id < self.commit_operations.get_count() - && self.verify_operations.has_next() - { + let next_commit_op_id = self.commit_operations.get_count() + 1; + if next_verify_op_id < next_commit_op_id && self.verify_operations.has_next() { return Some(self.verify_operations.pop_front().unwrap()); } // 2. After verify operations we should process withdraw operation. // We don't want to be ahead of the last verify operation. - if self.withdraw_operations.get_count() < next_verify_op_id { + let next_withdraw_op_id = self.withdraw_operations.get_count() + 1; + if next_withdraw_op_id < next_verify_op_id { if let Some(withdraw_operation) = self.withdraw_operations.pop_front() { return Some(withdraw_operation); } diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 463702453a..f85a21c790 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -55,9 +55,11 @@ impl<'a> EthereumSchema<'a> { // Transform the `StoredOperation` to `Operation` and `StoredETHOperation` to `ETHOperation`. for (eth_op, _, raw_op) in raw_ops { - // Load the stored txs hashes. + // Load the stored txs hashes ordered by their ID, + // so the latest added hash will be the last one in the list. let eth_tx_hashes: Vec = eth_tx_hashes::table .filter(eth_tx_hashes::eth_op_id.eq(eth_op.id)) + .order_by(eth_tx_hashes::id.asc()) .load(self.0.conn())?; assert!( eth_tx_hashes.len() >= 1, From 76ddfebff881b9b0ec24d2de779a531a330fb526 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 24 Mar 2020 17:42:19 +0300 Subject: [PATCH 090/186] Fix queue and tests --- core/server/src/eth_sender/mod.rs | 86 ++++++++++--------- core/server/src/eth_sender/tests/test_data.rs | 8 +- core/server/src/eth_sender/tx_queue/mod.rs | 6 +- 3 files changed, 51 insertions(+), 49 deletions(-) diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 1e096f5758..1b40ac5f36 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -173,11 +173,51 @@ impl ETHSender { }); } - // Commit the next operation (if any). - // TODO: should not be `if let`, but rather `while let`. - if let Some(current_op) = self.ongoing_ops.pop_front() { - self.try_commit(current_op); + // Commit the next operations (if any). + let mut new_ongoing_ops = VecDeque::new(); + + while let Some(mut current_op) = self.ongoing_ops.pop_front() { + // We perform a commitment step here. In case of error, we suppose that this is some + // network issue which won't appear the next time, so we report the situation to the + // log and consider the operation pending (meaning that we won't process it on this + // step, but will try to do so on the next one). + let commitment = self + .perform_commitment_step(&mut current_op) + .map_err(|e| { + warn!("Error while trying to complete uncommitted op: {}", e); + }) + .unwrap_or(OperationCommitment::Pending); + + match commitment { + OperationCommitment::Committed => { + // Free a slot for the next tx in the queue. + self.tx_queue.report_commitment(); + + if current_op.is_verify() { + // We notify about verify only when it's confirmed on the Ethereum. + self.op_notify + .try_send(current_op.op.expect("Should be verify operation")) + .map_err(|e| warn!("Failed notify about verify op confirmation: {}", e)) + .unwrap_or_default(); + + // Complete pending withdrawals after each verify. + self.add_complete_withdrawals_to_queue(); + } + } + OperationCommitment::Pending => { + // Poll this operation on the next iteration. + new_ongoing_ops.push_back(current_op); + } + } } + + assert!( + self.ongoing_ops.is_empty(), + "Ongoing ops queue should be empty after draining" + ); + + // Store the ongoing operations for the next round. + self.ongoing_ops = new_ongoing_ops; } fn initialize_operation(&mut self, tx: TxData) -> Result<(), failure::Error> { @@ -201,44 +241,6 @@ impl ETHSender { Ok(()) } - fn try_commit(&mut self, mut operation: ETHOperation) { - // Check the transactions associated with the operation, and send a new one if required. - - // We perform a commitment step here. In case of error, we suppose that this is some - // network issue which won't appear the next time, so we report the situation to the - // log and consider the operation pending (meaning that we won't process it on this - // step, but will try to do so on the next one). - let result = self - .perform_commitment_step(&mut operation) - .map_err(|e| { - warn!("Error while trying to complete uncommitted op: {}", e); - }) - .unwrap_or(OperationCommitment::Pending); - - // Check if we've completed the commitment. - match result { - OperationCommitment::Committed => { - // Free a slot for the next tx in the queue. - self.tx_queue.report_commitment(); - - if operation.is_verify() { - // We notify about verify only when commit is confirmed on the Ethereum. - self.op_notify - .try_send(operation.op.expect("Should be verify operation")) - .map_err(|e| warn!("Failed notify about verify op confirmation: {}", e)) - .unwrap_or_default(); - - // Complete pending withdrawals after each verify. - self.add_complete_withdrawals_to_queue(); - } - } - OperationCommitment::Pending => { - // Retry the operation again the next time. - self.ongoing_ops.push_front(operation); - } - } - } - fn zksync_operation_description(&self, operation: ÐOperation) -> String { if let Some(op) = &operation.op { format!( diff --git a/core/server/src/eth_sender/tests/test_data.rs b/core/server/src/eth_sender/tests/test_data.rs index f81d36a95c..cd015bb790 100644 --- a/core/server/src/eth_sender/tests/test_data.rs +++ b/core/server/src/eth_sender/tests/test_data.rs @@ -23,13 +23,13 @@ fn get_operation(id: i64, block_number: u32, action: Action) -> Operation { } lazy_static! { - pub static ref COMMIT_OPERATIONS: Vec = (1..7) - .map(|id| get_operation(id, (id - 1) as u32, Action::Commit)) + pub static ref COMMIT_OPERATIONS: Vec = (1..10) + .map(|id| get_operation(id, id as u32, Action::Commit)) .collect(); - pub static ref VERIFY_OPERATIONS: Vec = (7..14) + pub static ref VERIFY_OPERATIONS: Vec = (11..20) .map(|id| get_operation( id, - (id - 7) as u32, + (id - 10) as u32, Action::Verify { proof: Default::default() } diff --git a/core/server/src/eth_sender/tx_queue/mod.rs b/core/server/src/eth_sender/tx_queue/mod.rs index 17571aefd8..423c9da466 100644 --- a/core/server/src/eth_sender/tx_queue/mod.rs +++ b/core/server/src/eth_sender/tx_queue/mod.rs @@ -145,7 +145,7 @@ impl TxQueue { sent_pending_txs: 0, commit_operations: CounterQueue::new(), - verify_operations: SparseQueue::new(), + verify_operations: SparseQueue::new_from(1), // Blocks are starting from the index 1. withdraw_operations: CounterQueue::new(), } } @@ -253,11 +253,11 @@ mod tests { vec![COMMIT_MARK, 1], )); queue.add_verify_operation( - 0, + 1, TxData::from_raw(OperationType::Verify, vec![VERIFY_MARK, 0]), ); queue.add_verify_operation( - 1, + 2, TxData::from_raw(OperationType::Verify, vec![VERIFY_MARK, 1]), ); queue.add_withdraw_operation(TxData::from_raw( From be7aeceaf2dc8e54a7a37292ebc2fc583c621b5d Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Tue, 24 Mar 2020 18:22:09 +0200 Subject: [PATCH 091/186] Simultaneous upgrade of all proxies --- bin/prepare-test-contracts.sh | 7 +- contracts/contracts/Events.sol | 23 ++- contracts/contracts/UpgradeGatekeeper.sol | 172 ++++++++++++------ contracts/scripts/test-upgrade-franklin.ts | 17 +- contracts/src.ts/deploy.ts | 3 + .../test/unit_tests/upgradeGatekeeper_test.ts | 45 ++--- 6 files changed, 161 insertions(+), 106 deletions(-) diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index 42ea69de16..22398570b9 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -19,6 +19,7 @@ cp $IN_DIR/Bytes.sol $OUT_DIR/Bytes.sol cp $IN_DIR/Events.sol $OUT_DIR/Events.sol cp $IN_DIR/Operations.sol $OUT_DIR/Operations.sol cp $IN_DIR/VerificationKey.sol $OUT_DIR/VerificationKey.sol +cp $IN_DIR/Governance.sol $OUT_DIR/GovernanceTestNoInit.sol cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTestNoInit.sol # Change dependencies @@ -31,7 +32,8 @@ ssed 's/Storage/StorageTest/' -i $OUT_DIR/*.sol ssed 's/Config/ConfigTest/' -i $OUT_DIR/*.sol ssed 's/UpgradeGatekeeper/UpgradeGatekeeperTest/' -i $OUT_DIR/*.sol -# Renaming contract in FranklinTestNoInit.sol +# Renaming no init contracts +ssed 's/contract GovernanceTest/contract GovernanceTestNoInit/' -i $OUT_DIR/GovernanceTestNoInit.sol ssed 's/contract FranklinTest/contract FranklinTestNoInit/' -i $OUT_DIR/FranklinTestNoInit.sol @@ -58,5 +60,6 @@ create_constant_getter NOTICE_PERIOD $OUT_DIR/UpgradeGatekeeperTest.sol # Verify always true set_constant DUMMY_VERIFIER true $OUT_DIR/VerifierTest.sol -# Make initialize function in FranklinTestNoInit to do nothing +# Make initialize function in no init contracts to do nothing +ssed -E "s/ function initialize(.*)/ function initialize\1\n return;/" -i $OUT_DIR/GovernanceTestNoInit.sol ssed -E "s/ function initialize(.*)/ function initialize\1\n return;/" -i $OUT_DIR/FranklinTestNoInit.sol diff --git a/contracts/contracts/Events.sol b/contracts/contracts/Events.sol index 4b18a1615c..d813aef9ce 100644 --- a/contracts/contracts/Events.sol +++ b/contracts/contracts/Events.sol @@ -57,23 +57,22 @@ contract Events { /// @author Matter Labs contract UpgradeEvents { - /// @notice Upgrade mode enter event - event UpgradeModeActivated( - address proxyAddress, - uint64 version + /// @notice Event emitted when new proxy is added to upgrade gatekeeper + event ProxyAdded( + address proxyAddress ); + /// @notice Event emitted when list of proxies managed by the upgrade gatekeeper is cleared + event ProxyListCleared(); + + /// @notice Upgrade mode enter event + event UpgradeModeActivated(); + /// @notice Upgrade mode cancel event - event UpgradeCanceled( - address proxyAddress, - uint64 version - ); + event UpgradeCanceled(); /// @notice Upgrade mode preparation status event - event UpgradeModePreparationStatusActivated( - address proxyAddress, - uint64 version - ); + event UpgradeModePreparationStatusActivated(); /// @notice Upgrade mode complete event event UpgradeCompleted( diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 23f0acd702..9ca93ad0dc 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -2,6 +2,7 @@ pragma solidity 0.5.16; import "./Events.sol"; import "./Ownable.sol"; +import "./Bytes.sol"; /// @title Upgrade Gatekeeper Contract @@ -9,7 +10,7 @@ import "./Ownable.sol"; contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @notice Notice period before activation preparation status of upgrade mode (in seconds) - uint256 constant NOTICE_PERIOD = 2 weeks; + uint constant NOTICE_PERIOD = 2 weeks; /// @notice Versions of proxy contracts mapping(address => uint64) public version; @@ -17,6 +18,12 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @notice Contract which processes priority operations address public mainContractAddress; + /// @notice Number of proxy contracts managed by the gatekeeper + uint64 public numberOfProxies; + + /// @notice Addresses of proxy contracts managed by the gatekeeper + mapping(uint64 => address) public proxyAddress; + /// @notice Upgrade mode statuses enum UpgradeStatus { Idle, @@ -24,25 +31,19 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { Preparation } - /// @notice Info for upgrade proxy - struct UpgradeInfo { - UpgradeStatus upgradeStatus; - - /// @notice Notice period activation timestamp (in seconds) - /// @dev Will be equal to zero in case of not active mode - uint256 activationTime; + UpgradeStatus upgradeStatus; - /// @notice Address of the next version of the contract to be upgraded - /// @dev Will store zero in case of not active upgrade mode - address nextTarget; + /// @notice Notice period activation timestamp (in seconds) + /// @dev Will be equal to zero in case of not active mode + uint activationTime; - /// @notice Number of priority operations that must be verified by main contract at the time of finishing upgrade - /// @dev Will store zero in case of not active upgrade mode or not active preparation status of upgrade mode - uint64 priorityOperationsToProcessBeforeUpgrade; - } + /// @notice Address of the next version of the contract to be upgraded per each proxy + /// @dev Will store zero in case of not active upgrade mode + mapping(address => address) nextTarget; - /// @notice UpgradeInfo per each proxy - mapping(address => UpgradeInfo) public upgradeInfo; + /// @notice Number of priority operations that must be verified by main contract at the time of finishing upgrade + /// @dev Will store zero in case of not active upgrade mode or not active preparation status of upgrade mode + uint64 priorityOperationsToProcessBeforeUpgrade; /// @notice Contract constructor /// @param _mainContractAddress Address of contract which processes priority operations @@ -51,56 +52,88 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { mainContractAddress = _mainContractAddress; } - /// @notice Starts upgrade (activates notice period) - /// @param proxyAddress Address of proxy to process - /// @param newTarget New target - function startProxyUpgrade(address proxyAddress, address newTarget) external { + /// @notice Clears list of proxies managed by the gatekeeper (for case of mistake when adding new proxies to the gatekeeper) + function clearProxyList() external { + requireMaster(msg.sender); + + upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Idle; + activationTime = 0; + for (uint64 i = 0; i < numberOfProxies; i++) { + address proxy = proxyAddress[i]; + nextTarget[proxy] = address(0); + } + priorityOperationsToProcessBeforeUpgrade = 0; + + numberOfProxies = 0; + emit ProxyListCleared(); + } + + /// @notice Adds a new proxy to the list of contracts managed by the gatekeeper + /// @param proxy Address of proxy to add + function addProxyContract(address proxy) external { requireMaster(msg.sender); - require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Idle, "upa11"); // upa11 - unable to activate active upgrade mode + require(upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Idle, "apc11"); /// apc11 - proxy can't be added during upgrade + + proxyAddress[numberOfProxies] = proxy; + numberOfProxies++; - upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.NoticePeriod; - upgradeInfo[proxyAddress].activationTime = now; - upgradeInfo[proxyAddress].nextTarget = newTarget; - upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; + emit ProxyAdded(proxy); + } + + /// @notice Starts upgrade (activates notice period) + /// @param newTargets New proxies targets + function startProxyUpgrade(address[] calldata newTargets) external { + requireMaster(msg.sender); + require(upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Idle, "spu11"); // spu11 - unable to activate active upgrade mode + require(newTargets.length == numberOfProxies, "spu12"); // spu12 - number of new targets must be equal to the number of proxies + + upgradeStatus = UpgradeGatekeeper.UpgradeStatus.NoticePeriod; + activationTime = now; + for (uint64 i = 0; i < numberOfProxies; i++) { + address proxy = proxyAddress[i]; + nextTarget[proxy] = newTargets[i]; + } + priorityOperationsToProcessBeforeUpgrade = 0; - emit UpgradeModeActivated(proxyAddress, version[proxyAddress]); + emit UpgradeModeActivated(); } /// @notice Cancels upgrade - /// @param proxyAddress Address of proxy to process - function cancelProxyUpgrade(address proxyAddress) external { + function cancelProxyUpgrade() external { requireMaster(msg.sender); - require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeGatekeeper.UpgradeStatus.Idle, "umc11"); // umc11 - unable to cancel not active upgrade mode + require(upgradeStatus != UpgradeGatekeeper.UpgradeStatus.Idle, "cpu11"); // cpu11 - unable to cancel not active upgrade mode - upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Idle; - upgradeInfo[proxyAddress].activationTime = 0; - upgradeInfo[proxyAddress].nextTarget = address(0); - upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; + upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Idle; + activationTime = 0; + for (uint64 i = 0; i < numberOfProxies; i++) { + address proxy = proxyAddress[i]; + nextTarget[proxy] = address(0); + } + priorityOperationsToProcessBeforeUpgrade = 0; - emit UpgradeCanceled(proxyAddress, version[proxyAddress]); + emit UpgradeCanceled(); } /// @notice Checks that preparation status is active and activates it if needed - /// @param proxyAddress Address of proxy to process /// @return Bool flag indicating that preparation status is active after this call - function startPreparation(address proxyAddress) public returns (bool) { - require(upgradeInfo[proxyAddress].upgradeStatus != UpgradeGatekeeper.UpgradeStatus.Idle, "uaf11"); // uaf11 - unable to activate preparation status in case of not active upgrade mode + function startPreparation() public returns (bool) { + require(upgradeStatus != UpgradeGatekeeper.UpgradeStatus.Idle, "ugp11"); // ugp11 - unable to activate preparation status in case of not active upgrade mode - if (upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Preparation) { + if (upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Preparation) { return true; } - if (now >= upgradeInfo[proxyAddress].activationTime + NOTICE_PERIOD) { - upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Preparation; + if (now >= activationTime + NOTICE_PERIOD) { + upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Preparation; (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("totalRegisteredPriorityOperations()") ); - require(mainContractCallSuccess, "uaf12"); // uaf12 - main contract static call failed + require(mainContractCallSuccess, "ugp12"); // ugp12 - main contract static call failed uint64 totalRegisteredPriorityOperations = abi.decode(encodedResult, (uint64)); - upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = totalRegisteredPriorityOperations; + priorityOperationsToProcessBeforeUpgrade = totalRegisteredPriorityOperations; - emit UpgradeModePreparationStatusActivated(proxyAddress, version[proxyAddress]); + emit UpgradeModePreparationStatusActivated(); return true; } else { return false; @@ -108,32 +141,51 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { } /// @notice Finishes upgrade - /// @param proxyAddress Address of proxy to process - /// @param newTargetInitializationParameters New target initialization parameters - function finishProxyUpgrade(address proxyAddress, bytes calldata newTargetInitializationParameters) external { + /// @param initParametersConcatenated New targets initialization parameters per each proxy (concatenated into one array) + /// @param sizeOfInitParameters Sizes of targets initialization parameters (in bytes) + function finishProxyUpgrade(bytes calldata initParametersConcatenated, uint[] calldata sizeOfInitParameters) external { requireMaster(msg.sender); - require(upgradeInfo[proxyAddress].upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Preparation, "umf11"); // umf11 - unable to finish upgrade without preparation status active + require(upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Preparation, "fpu11"); // fpu11 - unable to finish upgrade without preparation status active + require(sizeOfInitParameters.length == numberOfProxies, "fpu12"); // fpu12 - number of new targets initialization parameters must be equal to the number of proxies (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( abi.encodeWithSignature("totalVerifiedPriorityOperations()") ); - require(mainContractCallSuccess, "umf12"); // umf12 - main contract static call failed + require(mainContractCallSuccess, "fpu13"); // fpu13 - main contract static call failed uint64 totalVerifiedPriorityOperations = abi.decode(encodedResult, (uint64)); - require(totalVerifiedPriorityOperations >= upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade, "umf13"); // umf13 - can't finish upgrade before verifing all priority operations received before start of preparation status + require(totalVerifiedPriorityOperations >= priorityOperationsToProcessBeforeUpgrade, "fpu14"); // fpu14 - can't finish upgrade before verifying all priority operations received before start of preparation status - (bool proxyUpgradeCallSuccess, ) = proxyAddress.call( - abi.encodeWithSignature("upgradeTarget(address,bytes)", upgradeInfo[proxyAddress].nextTarget, newTargetInitializationParameters) - ); - require(proxyUpgradeCallSuccess, "umf14"); // umf14 - proxy contract call failed + bytes memory initParametersConcatenated = initParametersConcatenated; + uint processedBytes = 0; + for (uint64 i = 0; i < numberOfProxies; i++) { + address proxy = proxyAddress[i]; + bytes memory targetInitParameters; + + // TODO: remove this when Bytes.slice function will be fixed + if (sizeOfInitParameters[i] == 0){ + targetInitParameters = new bytes(0); + } else { + (processedBytes, targetInitParameters) = Bytes.read(initParametersConcatenated, processedBytes, sizeOfInitParameters[i]); + } + + (bool proxyUpgradeCallSuccess, ) = proxy.call( + abi.encodeWithSignature("upgradeTarget(address,bytes)", nextTarget[proxy], targetInitParameters) + ); + require(proxyUpgradeCallSuccess, "fpu15"); // fpu15 - proxy contract call failed - emit UpgradeCompleted(proxyAddress, version[proxyAddress], upgradeInfo[proxyAddress].nextTarget); - version[proxyAddress]++; + emit UpgradeCompleted(proxy, version[proxy], nextTarget[proxy]); + version[proxy]++; + } + require(processedBytes == initParametersConcatenated.length, "fpu16"); // fpu16 - all targets initialization parameters bytes must be processed - upgradeInfo[proxyAddress].upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Idle; - upgradeInfo[proxyAddress].activationTime = 0; - upgradeInfo[proxyAddress].nextTarget = address(0); - upgradeInfo[proxyAddress].priorityOperationsToProcessBeforeUpgrade = 0; + upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Idle; + activationTime = 0; + for (uint64 i = 0; i < numberOfProxies; i++) { + address proxy = proxyAddress[i]; + nextTarget[proxy] = address(0); + } + priorityOperationsToProcessBeforeUpgrade = 0; } } diff --git a/contracts/scripts/test-upgrade-franklin.ts b/contracts/scripts/test-upgrade-franklin.ts index a2e7293f61..9f7d819cc8 100644 --- a/contracts/scripts/test-upgrade-franklin.ts +++ b/contracts/scripts/test-upgrade-franklin.ts @@ -6,6 +6,7 @@ import {deployContract} from "ethereum-waffle"; const {performance} = require('perf_hooks'); const {expect} = require("chai") +export const GovernanceTestNoInitContractCode = require(`../build/GovernanceTestNoInit`); export const FranklinTestNoInitContractCode = require(`../build/FranklinTestNoInit`); async function main() { @@ -42,7 +43,13 @@ async function main() { wallet, ); - const newTarget = await deployContract( + const newTargetGovernance = await deployContract( + wallet, + GovernanceTestNoInitContractCode, + [], + {gasLimit: 6500000}, + ); + const newTargetFranklin = await deployContract( wallet, FranklinTestNoInitContractCode, [], @@ -51,17 +58,17 @@ async function main() { let notice_period = parseInt(await upgradeGatekeeper.get_NOTICE_PERIOD()); - await (await upgradeGatekeeper.startProxyUpgrade(proxyContract.address, newTarget.address)).wait(); + await (await upgradeGatekeeper.startProxyUpgrade([newTargetGovernance.address, newTargetFranklin.address])).wait(); // wait notice period await new Promise(r => setTimeout(r, notice_period * 1000 + 10)); // finish upgrade - await (await upgradeGatekeeper.startPreparation(proxyContract.address)).wait(); - await (await upgradeGatekeeper.finishProxyUpgrade(proxyContract.address, [])).wait(); + await (await upgradeGatekeeper.startPreparation()).wait(); + await (await upgradeGatekeeper.finishProxyUpgrade([], [0, 0])).wait(); await expect(await proxyContract.getTarget()) - .to.equal(newTarget.address); + .to.equal(newTargetFranklin.address); } main(); diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 1f8339cd63..abace0b77c 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -223,6 +223,9 @@ export class Deployer { ); this.addresses.UpgradeGatekeeper = contract.address; + await (await contract.addProxyContract(this.getDeployedContract('Governance').address)).wait(); + await (await contract.addProxyContract(this.getDeployedContract('Franklin').address)).wait(); + await (await this.getDeployedContract('Governance').transferMastership(contract.address)).wait(); await (await this.getDeployedContract('Verifier').transferMastership(contract.address)).wait(); await (await this.getDeployedContract('Franklin').transferMastership(contract.address)).wait(); diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index 1eacecee6d..a889102c27 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -29,7 +29,9 @@ describe("UpgradeGatekeeper unit tests", function () { UpgradeGatekeeperContract = await deployContract(wallet, require('../../build/UpgradeGatekeeperTest'), [proxyTestContract.address], { gasLimit: 6000000, }) - proxyTestContract.transferMastership(UpgradeGatekeeperContract.address); + await proxyTestContract.transferMastership(UpgradeGatekeeperContract.address); + + await UpgradeGatekeeperContract.addProxyContract(proxyTestContract.address); // check initial dummy index and storage expect(await proxyDummyInterface.get_DUMMY_INDEX()) @@ -43,32 +45,30 @@ describe("UpgradeGatekeeper unit tests", function () { it("checking that requireMaster calls present", async () => { let UpgradeGatekeeperContract_with_wallet2_signer = await UpgradeGatekeeperContract.connect(wallet2); - expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.startProxyUpgrade(AddressZero, AddressZero) )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.cancelProxyUpgrade(AddressZero) )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.finishProxyUpgrade(AddressZero, []) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.clearProxyList() )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.startProxyUpgrade([]) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.cancelProxyUpgrade() )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.finishProxyUpgrade([], []) )).revertReason).equal("oro11") }); it("checking UpgradeGatekeeper reverts; activation and cancelation upgrade", async () => { - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address) )).revertReason).equal("umc11") - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startPreparation(proxyTestContract.address) )).revertReason).equal("uaf11") - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.cancelProxyUpgrade() )).revertReason).equal("cpu11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startPreparation() )).revertReason).equal("ugp11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade([], []) )).revertReason).equal("fpu11") - await expect(UpgradeGatekeeperContract.startProxyUpgrade(proxyTestContract.address, DummySecond.address)) + await expect(UpgradeGatekeeperContract.startProxyUpgrade([DummySecond.address])) .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') - .withArgs(proxyTestContract.address, 0) - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startProxyUpgrade(proxyTestContract.address, DummySecond.address) )).revertReason).equal("upa11") - await expect(UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address)) + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startProxyUpgrade([]) )).revertReason).equal("spu11") + await expect(UpgradeGatekeeperContract.cancelProxyUpgrade()) .to.emit(UpgradeGatekeeperContract, 'UpgradeCanceled') - .withArgs(proxyTestContract.address, 0) }); it("checking that the upgrade works correctly", async () => { let start_time = performance.now(); // activate - await expect(UpgradeGatekeeperContract.startProxyUpgrade(proxyTestContract.address, DummySecond.address)) + await expect(UpgradeGatekeeperContract.startProxyUpgrade([DummySecond.address])) .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') - .withArgs(proxyTestContract.address, 0) let activated_time = performance.now(); @@ -86,19 +86,18 @@ describe("UpgradeGatekeeper unit tests", function () { } if (step != 3) { - await UpgradeGatekeeperContract.startPreparation(proxyTestContract.address); + await UpgradeGatekeeperContract.startPreparation(); } else { - await expect(UpgradeGatekeeperContract.startPreparation(proxyTestContract.address)) + await expect(UpgradeGatekeeperContract.startPreparation()) .to.emit(UpgradeGatekeeperContract, 'UpgradeModePreparationStatusActivated') - .withArgs(proxyTestContract.address, 0) } } // finish upgrade without verifying priority operations - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade(proxyTestContract.address, []) )).revertReason).equal("umf13") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade([bytes[2], bytes[3]], [2]) )).revertReason).equal("fpu14") // finish upgrade await proxyDummyInterface.verifyPriorityOperation(); - await expect(UpgradeGatekeeperContract.finishProxyUpgrade(proxyTestContract.address, [bytes[2], bytes[3]])) + await expect(UpgradeGatekeeperContract.finishProxyUpgrade([bytes[2], bytes[3]], [2])) .to.emit(UpgradeGatekeeperContract, 'UpgradeCompleted') .withArgs(proxyTestContract.address, 0, DummySecond.address) @@ -115,14 +114,6 @@ describe("UpgradeGatekeeper unit tests", function () { .to.equal(bytes[2]); expect(parseInt(await provider.getStorageAt(proxyTestContract.address, 3))) .to.equal(bytes[3]); - - // one more activate and cancel with version equal to 1 - await expect(UpgradeGatekeeperContract.startProxyUpgrade(proxyTestContract.address, DummyFirst.address)) - .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') - .withArgs(proxyTestContract.address, 1); - await expect(UpgradeGatekeeperContract.cancelProxyUpgrade(proxyTestContract.address)) - .to.emit(UpgradeGatekeeperContract, 'UpgradeCanceled') - .withArgs(proxyTestContract.address, 1); }); it("checking the presence in the main contract functions that will be called from the gatekeeper", async () => { From b3e769808452704a89c1ba4f475160e29a22e533 Mon Sep 17 00:00:00 2001 From: Vitaly Drogan Date: Tue, 24 Mar 2020 19:13:02 +0200 Subject: [PATCH 092/186] Apply suggestions from code review comment fixes Co-Authored-By: Igor Aleksanov --- core/server/src/eth_watch.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/core/server/src/eth_watch.rs b/core/server/src/eth_watch.rs index 2cc2e28a8d..f8ce7abcd6 100644 --- a/core/server/src/eth_watch.rs +++ b/core/server/src/eth_watch.rs @@ -1,9 +1,9 @@ -//! Ethereum watcher polls ethereum node for new events +//! Ethereum watcher polls the Ethereum node for new events //! such as PriorityQueue events or NewToken events. -//! New events are accepted to the ZK Sync network only after sufficient confirmations. +//! New events are accepted to the ZK Sync network once they have the sufficient amount of confirmations. //! -//! Poll interval is configured using `ETH_POLL_INTERVAL` constant. -//! Number of confirmations are configured using `CONFIRMATIONS_FOR_ETH_EVENT`. +//! Poll interval is configured using the `ETH_POLL_INTERVAL` constant. +//! Number of confirmations is configured using the `CONFIRMATIONS_FOR_ETH_EVENT` constant. //! // Built-in deps @@ -50,7 +50,7 @@ pub enum EthWatchRequest { pub struct EthWatch { gov_contract: (ethabi::Contract, Contract), zksync_contract: (ethabi::Contract, Contract), - /// The last block of the ethereum network known to ethereum watcher. + /// The last block of the Ethereum network known to the Ethereum watcher. last_ethereum_block: u64, eth_state: ETHState, web3: Web3, From 3bb636709e17b33623e25792012b454d85124d00 Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Tue, 24 Mar 2020 19:30:08 +0200 Subject: [PATCH 093/186] make number of confirmations configurable --- core/models/src/config_options.rs | 4 ++++ core/models/src/params.rs | 2 -- core/server/src/bin/eth_watcher.rs | 1 + core/server/src/eth_watch.rs | 18 +++++++++++------- etc/env/dev.env.example | 2 ++ 5 files changed, 18 insertions(+), 9 deletions(-) diff --git a/core/models/src/config_options.rs b/core/models/src/config_options.rs index 4658598597..74e769c91d 100644 --- a/core/models/src/config_options.rs +++ b/core/models/src/config_options.rs @@ -35,6 +35,7 @@ pub struct ConfigurationOptions { pub gas_price_factor: usize, pub tx_batch_size: usize, pub prover_server_address: SocketAddr, + pub confirmations_for_eth_event: u64, } impl ConfigurationOptions { @@ -81,6 +82,9 @@ impl ConfigurationOptions { prover_server_address: get_env("PROVER_SERVER_BIND") .parse() .expect("Failed to parse PROVER_SERVER_BIND bind address"), + confirmations_for_eth_event: get_env("CONFIRMATIONS_FOR_ETH_EVENT") + .parse() + .expect("Failed to parse CONFIRMATIONS_FOR_ETH_EVENT"), } } } diff --git a/core/models/src/params.rs b/core/models/src/params.rs index c2aa99e255..ab2b798060 100644 --- a/core/models/src/params.rs +++ b/core/models/src/params.rs @@ -123,8 +123,6 @@ pub fn max_block_chunk_size() -> usize { /// Priority op should be executed for this number of eth blocks. pub const PRIORITY_EXPIRATION: u64 = 250; pub const FR_ADDRESS_LEN: usize = 20; -/// All ethereum events are accepted after sufficient confirmations to eliminate risk of block reorg. -pub const CONFIRMATIONS_FOR_ETH_EVENT: u64 = 30; pub const PAD_MSG_BEFORE_HASH_BITS_LEN: usize = 736; diff --git a/core/server/src/bin/eth_watcher.rs b/core/server/src/bin/eth_watcher.rs index cc320cabef..e923523d09 100644 --- a/core/server/src/bin/eth_watcher.rs +++ b/core/server/src/bin/eth_watcher.rs @@ -35,6 +35,7 @@ fn main() { governance_addr, //priority_queue_address, contract_address, + 0, eth_req_receiver, ); diff --git a/core/server/src/eth_watch.rs b/core/server/src/eth_watch.rs index f8ce7abcd6..b5f4b6b30a 100644 --- a/core/server/src/eth_watch.rs +++ b/core/server/src/eth_watch.rs @@ -3,8 +3,7 @@ //! New events are accepted to the ZK Sync network once they have the sufficient amount of confirmations. //! //! Poll interval is configured using the `ETH_POLL_INTERVAL` constant. -//! Number of confirmations is configured using the `CONFIRMATIONS_FOR_ETH_EVENT` constant. -//! +//! Number of confirmations is configured using the `CONFIRMATIONS_FOR_ETH_EVENT` environment variable. // Built-in deps use std::collections::HashMap; @@ -24,7 +23,7 @@ use web3::{Transport, Web3}; use models::abi::{governance_contract, zksync_contract}; use models::config_options::ConfigurationOptions; use models::node::{Nonce, PriorityOp, PubKeyHash, TokenId}; -use models::params::{CONFIRMATIONS_FOR_ETH_EVENT, PRIORITY_EXPIRATION}; +use models::params::PRIORITY_EXPIRATION; use models::TokenAddedEvent; use storage::ConnectionPool; use tokio::{runtime::Runtime, time}; @@ -56,6 +55,8 @@ pub struct EthWatch { web3: Web3, _web3_event_loop_handle: EventLoopHandle, db_pool: ConnectionPool, + /// All ethereum events are accepted after sufficient confirmations to eliminate risk of block reorg. + number_of_confirmations_for_event: u64, eth_watch_req: mpsc::Receiver, } @@ -79,6 +80,7 @@ impl EthWatch { db_pool: ConnectionPool, governance_addr: H160, zksync_contract_addr: H160, + number_of_confirmations_for_event: u64, eth_watch_req: mpsc::Receiver, ) -> Self { let gov_contract = { @@ -107,6 +109,7 @@ impl EthWatch { _web3_event_loop_handle: web3_event_loop_handle, db_pool, eth_watch_req, + number_of_confirmations_for_event, } } @@ -183,7 +186,7 @@ impl EthWatch { async fn restore_state_from_eth(&mut self, current_ethereum_block: u64) { let new_block_with_accepted_events = - current_ethereum_block.saturating_sub(CONFIRMATIONS_FOR_ETH_EVENT); + current_ethereum_block.saturating_sub(self.number_of_confirmations_for_event); let previous_block_with_accepted_events = new_block_with_accepted_events.saturating_sub(PRIORITY_EXPIRATION); @@ -221,9 +224,9 @@ impl EthWatch { debug_assert!(self.last_ethereum_block < current_eth_block); let previous_block_with_accepted_events = - (self.last_ethereum_block + 1).saturating_sub(CONFIRMATIONS_FOR_ETH_EVENT); + (self.last_ethereum_block + 1).saturating_sub(self.number_of_confirmations_for_event); let new_block_with_accepted_events = - current_eth_block.saturating_sub(CONFIRMATIONS_FOR_ETH_EVENT); + current_eth_block.saturating_sub(self.number_of_confirmations_for_event); let new_tokens = self .get_new_token_events( @@ -322,7 +325,7 @@ impl EthWatch { .expect("Block number") .as_u64(); self.last_ethereum_block = block; - self.restore_state_from_eth(block.saturating_sub(CONFIRMATIONS_FOR_ETH_EVENT as u64)) + self.restore_state_from_eth(block.saturating_sub(self.number_of_confirmations_for_event)) .await; while let Some(request) = self.eth_watch_req.next().await { @@ -385,6 +388,7 @@ pub fn start_eth_watch( pool, config_options.governance_eth_addr, config_options.contract_eth_addr, + config_options.confirmations_for_eth_event, eth_req_receiver, ); runtime.spawn(eth_watch.run()); diff --git a/etc/env/dev.env.example b/etc/env/dev.env.example index 1d3e3ca943..98216ec482 100755 --- a/etc/env/dev.env.example +++ b/etc/env/dev.env.example @@ -85,3 +85,5 @@ ZKSYNC_ACTION=dont_ask # Used to create promotion jobs on CI. Take from CI Home page > User settings DRONE_SERVER="" DRONE_TOKEN="" + +CONFIRMATIONS_FOR_ETH_EVENT=1 From 07b80e3e972a4995fc91fe2e0f3ceb61919bfdc5 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Wed, 25 Mar 2020 01:49:24 +0200 Subject: [PATCH 094/186] Add transactions history test --- Cargo.lock | 1 + core/models/src/node/tx.rs | 9 +- core/storage/Cargo.toml | 1 + .../storage/src/tests/chain/operations_ext.rs | 186 +++++++++++++++++- 4 files changed, 191 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 05d234e2c6..1abc658bb1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2996,6 +2996,7 @@ dependencies = [ "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", + "testkit 0.1.0", "web3 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", ] diff --git a/core/models/src/node/tx.rs b/core/models/src/node/tx.rs index 420c3b20d0..5ae5e8071d 100644 --- a/core/models/src/node/tx.rs +++ b/core/models/src/node/tx.rs @@ -116,11 +116,10 @@ impl Transfer { } pub fn verify_signature(&self) -> Option { - if let Some(pub_key) = self.signature.verify_musig_sha256(&self.get_bytes()) { - Some(PubKeyHash::from_pubkey(&pub_key)) - } else { - None - } + self.signature + .verify_musig_sha256(&self.get_bytes()) + .as_ref() + .map(PubKeyHash::from_pubkey) } } diff --git a/core/storage/Cargo.toml b/core/storage/Cargo.toml index 2a3cbc2826..5ad8db9ca2 100644 --- a/core/storage/Cargo.toml +++ b/core/storage/Cargo.toml @@ -26,3 +26,4 @@ hex = "0.4" [dev-dependencies] env_logger = "0.6" +testkit = { path = "../testkit" } diff --git a/core/storage/src/tests/chain/operations_ext.rs b/core/storage/src/tests/chain/operations_ext.rs index 17d66291ac..67c5a36355 100644 --- a/core/storage/src/tests/chain/operations_ext.rs +++ b/core/storage/src/tests/chain/operations_ext.rs @@ -1,12 +1,196 @@ // Built-in imports +use std::collections::HashMap; // External imports +use bigdecimal::BigDecimal; // Workspace imports +use crypto_exports::franklin_crypto::bellman::pairing::ff::Field; +use models::node::block::{Block, ExecutedOperations, ExecutedPriorityOp, ExecutedTx}; +use models::node::operations::FranklinOp; +use models::node::priority_ops::PriorityOp; +use models::node::{Deposit, DepositOp, Fr, TransferOp, WithdrawOp}; +use testkit::zksync_account::ZksyncAccount; // Local imports +use crate::tests::db_test; +use crate::StorageProcessor; /// Here we take the account transactions using `get_account_transactions` and /// check `get_account_transactions_history` to match obtained results. #[test] #[cfg_attr(not(feature = "db_test"), ignore)] fn get_account_transactions_history() { - // TODO: Add the actual test. + let from_zksync_account = ZksyncAccount::rand(); + let from_account_id = 0xbabe; + let from_account_address = from_zksync_account.address; + let from_account_address_string = format!("{:?}", &from_account_address); + + let to_zksync_account = ZksyncAccount::rand(); + let to_account_id = 0xdcba; + let to_account_address = to_zksync_account.address; + let to_account_address_string = format!("{:?}", &to_account_address); + + let token = 0; + let amount = BigDecimal::from(1); + + let executed_deposit_op = { + let deposit_op = FranklinOp::Deposit(Box::new(DepositOp { + priority_op: Deposit { + from: from_account_address, + token, + amount: amount.clone(), + to: to_account_address, + }, + account_id: from_account_id, + })); + + let executed_op = ExecutedPriorityOp { + priority_op: PriorityOp { + serial_id: 0, + data: deposit_op.try_get_priority_op().unwrap(), + deadline_block: 0, + eth_fee: 0.into(), + eth_hash: b"1234567890".to_vec(), + }, + op: deposit_op, + block_index: 31, + }; + + ExecutedOperations::PriorityOp(Box::new(executed_op)) + }; + + let executed_transfer_op = { + let transfer_op = FranklinOp::Transfer(Box::new(TransferOp { + tx: from_zksync_account.sign_transfer( + token, + amount.clone(), + BigDecimal::from(0), + &to_account_address, + None, + true, + ), + from: from_account_id, + to: to_account_id, + })); + + let executed_transfer_op = ExecutedTx { + tx: transfer_op.try_get_tx().unwrap(), + success: true, + op: Some(transfer_op), + fail_reason: None, + block_index: None, + }; + + ExecutedOperations::Tx(Box::new(executed_transfer_op)) + }; + + let executed_withdraw_op = { + let withdraw_op = FranklinOp::Withdraw(Box::new(WithdrawOp { + tx: from_zksync_account.sign_withdraw( + token, + amount.clone(), + BigDecimal::from(0), + &from_account_address, + None, + true, + ), + account_id: from_account_id, + })); + + let executed_withdraw_op = ExecutedTx { + tx: withdraw_op.try_get_tx().unwrap(), + success: true, + op: Some(withdraw_op), + fail_reason: None, + block_index: None, + }; + + ExecutedOperations::Tx(Box::new(executed_withdraw_op)) + }; + + let block = Block { + block_number: 1, + new_root_hash: Fr::zero(), + fee_account: 0, + block_transactions: vec![ + executed_deposit_op, + executed_transfer_op, + executed_withdraw_op, + ], + processed_priority_ops: (0, 0), // Not important + }; + + let expected_behavior = { + let mut expected_behavior = HashMap::new(); + expected_behavior.insert( + "Deposit", + ( + Some(from_account_address_string.as_str()), + Some(to_account_address_string.as_str()), + Some(&token), + Some(amount.to_string()), + ), + ); + expected_behavior.insert( + "Transfer", + ( + Some(from_account_address_string.as_str()), + Some(to_account_address_string.as_str()), + Some(&token), + Some(amount.to_string()), + ), + ); + expected_behavior.insert( + "Withdraw", + ( + Some(from_account_address_string.as_str()), + Some(from_account_address_string.as_str()), + Some(&token), + Some(amount.to_string()), + ), + ); + expected_behavior + }; + + // execute_operation + let conn = StorageProcessor::establish_connection().unwrap(); + db_test(conn.conn(), || { + conn.chain().block_schema().save_block_transactions(block)?; + + let from_history = conn + .chain() + .operations_ext_schema() + .get_account_transactions_history(&from_account_address, 0, 10)?; + + for tx in &from_history { + let tx_type: &str = tx.tx["type"].as_str().expect("no tx_type"); + let (from, to, token, amount) = expected_behavior + .get(tx_type) + .expect("no expected behavior"); + + let tx_info = match tx_type { + "Deposit" => tx.tx["priority_op"].clone(), + _ => tx.tx.clone(), + }; + let tx_from_addr = tx_info["from"].as_str(); + let tx_to_addr = tx_info["to"].as_str(); + let tx_token = tx_info["token"].as_u64().map(|x| x as u16); + let tx_amount = tx_info["amount"].as_str().map(String::from); + + assert!(tx.hash.is_some()); + + assert_eq!(tx_from_addr, *from); + assert_eq!(tx_to_addr, *to); + assert_eq!(tx_token, token.cloned()); + assert_eq!(tx_amount, *amount); + } + + let to_history = conn + .chain() + .operations_ext_schema() + .get_account_transactions_history(&to_account_address, 0, 10)?; + + assert_eq!(from_history.len(), 3); + assert_eq!(to_history.len(), 2); + + Ok(()) + }); } From 22842b7f76b4aa85f4cb626fe8390f506c0f01ef Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Wed, 25 Mar 2020 01:50:04 +0200 Subject: [PATCH 095/186] Remove '\' from sql queries to calm down cargo fmt --- core/storage/src/chain/block/mod.rs | 114 ++++++++++++++-------------- 1 file changed, 57 insertions(+), 57 deletions(-) diff --git a/core/storage/src/chain/block/mod.rs b/core/storage/src/chain/block/mod.rs index 61d9a1dc64..8c2fac09c9 100644 --- a/core/storage/src/chain/block/mod.rs +++ b/core/storage/src/chain/block/mod.rs @@ -231,33 +231,33 @@ impl<'a> BlockSchema<'a> { // and verified operations; // - collects the {limit} blocks in the descending order with the data gathered above. let query = format!( - " \ - with eth_ops as ( \ - select \ - operations.block_number, \ - '0x' || encode(eth_operations.tx_hash::bytea, 'hex') as tx_hash, \ - operations.action_type, \ - operations.created_at \ - from operations \ - left join eth_operations on eth_operations.op_id = operations.id \ - ) \ - select \ - blocks.number as block_number, \ - blocks.root_hash as new_state_root, \ - blocks.block_size as block_size, \ - committed.tx_hash as commit_tx_hash, \ - verified.tx_hash as verify_tx_hash, \ - committed.created_at as committed_at, \ - verified.created_at as verified_at \ - from blocks \ - inner join eth_ops committed on \ - committed.block_number = blocks.number and committed.action_type = 'COMMIT' \ - left join eth_ops verified on \ - verified.block_number = blocks.number and verified.action_type = 'VERIFY' \ - where \ - blocks.number <= {max_block} \ - order by blocks.number desc \ - limit {limit}; \ + " + with eth_ops as ( + select + operations.block_number, + '0x' || encode(eth_operations.tx_hash::bytea, 'hex') as tx_hash, + operations.action_type, + operations.created_at + from operations + left join eth_operations on eth_operations.op_id = operations.id + ) + select + blocks.number as block_number, + blocks.root_hash as new_state_root, + blocks.block_size as block_size, + committed.tx_hash as commit_tx_hash, + verified.tx_hash as verify_tx_hash, + committed.created_at as committed_at, + verified.created_at as verified_at + from blocks + inner join eth_ops committed on + committed.block_number = blocks.number and committed.action_type = 'COMMIT' + left join eth_ops verified on + verified.block_number = blocks.number and verified.action_type = 'VERIFY' + where + blocks.number <= {max_block} + order by blocks.number desc + limit {limit}; ", max_block = i64::from(max_block), limit = i64::from(limit) @@ -286,36 +286,36 @@ impl<'a> BlockSchema<'a> { // + query equals to the state hash obtained in the block (in form of `sync-bl:00{..}00`); // + query equals to the number of the block. let sql_query = format!( - " \ - with eth_ops as ( \ - select \ - operations.block_number, \ - '0x' || encode(eth_operations.tx_hash::bytea, 'hex') as tx_hash, \ - operations.action_type, \ - operations.created_at \ - from operations \ - left join eth_operations on eth_operations.op_id = operations.id \ - ) \ - select \ - blocks.number as block_number, \ - blocks.root_hash as new_state_root, \ - blocks.block_size as block_size, \ - committed.tx_hash as commit_tx_hash, \ - verified.tx_hash as verify_tx_hash, \ - committed.created_at as committed_at, \ - verified.created_at as verified_at \ - from blocks \ - inner join eth_ops committed on \ - committed.block_number = blocks.number and committed.action_type = 'COMMIT' \ - left join eth_ops verified on \ - verified.block_number = blocks.number and verified.action_type = 'VERIFY' \ - where false \ - or lower(committed.tx_hash) = $1 \ - or lower(verified.tx_hash) = $1 \ - or lower(blocks.root_hash) = $1 \ - or blocks.number = {block_number} \ - order by blocks.number desc \ - limit 1; \ + " + with eth_ops as ( + select + operations.block_number, + '0x' || encode(eth_operations.tx_hash::bytea, 'hex') as tx_hash, + operations.action_type, + operations.created_at + from operations + left join eth_operations on eth_operations.op_id = operations.id + ) + select + blocks.number as block_number, + blocks.root_hash as new_state_root, + blocks.block_size as block_size, + committed.tx_hash as commit_tx_hash, + verified.tx_hash as verify_tx_hash, + committed.created_at as committed_at, + verified.created_at as verified_at + from blocks + inner join eth_ops committed on + committed.block_number = blocks.number and committed.action_type = 'COMMIT' + left join eth_ops verified on + verified.block_number = blocks.number and verified.action_type = 'VERIFY' + where false + or lower(committed.tx_hash) = $1 + or lower(verified.tx_hash) = $1 + or lower(blocks.root_hash) = $1 + or blocks.number = {block_number} + order by blocks.number desc + limit 1; ", block_number = block_number ); From 971457f7efdb1aa159c8c67d934d63eac491f75c Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 25 Mar 2020 08:46:37 +0300 Subject: [PATCH 096/186] Make amount of txs in flight configurable & remove several unused methods --- core/server/src/eth_sender/mod.rs | 19 ++++++++--- core/server/src/eth_sender/tests/mock.rs | 16 ++++++--- .../src/eth_sender/tx_queue/counter_queue.rs | 10 ++---- core/server/src/eth_sender/tx_queue/mod.rs | 20 +++-------- .../src/eth_sender/tx_queue/sparse_queue.rs | 34 ++++++++----------- etc/env/dev.env.example | 3 ++ 6 files changed, 49 insertions(+), 53 deletions(-) diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 1b40ac5f36..5bd99b25ef 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -5,6 +5,7 @@ // Built-in deps use std::collections::VecDeque; +use std::str::FromStr; use std::time::Duration; // External uses use futures::channel::mpsc; @@ -106,13 +107,12 @@ struct ETHSender { impl ETHSender { pub fn new( + max_txs_in_flight: usize, db: DB, ethereum: ETH, rx_for_eth: mpsc::Receiver, op_notify: mpsc::Sender, ) -> Self { - const MAX_TXS_IN_FLIGHT: usize = 1; // TODO: Should be configurable. - let ongoing_ops: VecDeque<_> = db .restore_state() .expect("Failed loading unconfirmed operations from the storage") @@ -123,7 +123,7 @@ impl ETHSender { .load_stats() .expect("Failed loading ETH operations stats"); - let tx_queue = TxQueueBuilder::new(MAX_TXS_IN_FLIGHT) + let tx_queue = TxQueueBuilder::new(max_txs_in_flight) .with_sent_pending_txs(ongoing_ops.len()) .with_commit_operations_count(stats.commit_ops) .with_verify_operations_count(stats.verify_ops) @@ -550,6 +550,11 @@ pub fn start_eth_sender( send_requst_receiver: mpsc::Receiver, config_options: ConfigurationOptions, ) { + let max_txs_in_flight = + std::env::var("ETH_MAX_TXS_IN_FLIGHT").expect("ETH_MAX_TXS_IN_FLIGHT env variable missing"); + let max_txs_in_flight = usize::from_str(&max_txs_in_flight) + .expect("ETH_MAX_TXS_IN_FLIGHT env variable has invalid value"); + std::thread::Builder::new() .name("eth_sender".to_string()) .spawn(move || { @@ -561,7 +566,13 @@ pub fn start_eth_sender( let db = Database::new(pool); let mut runtime = Runtime::new().expect("eth-sender-runtime"); - let eth_sender = ETHSender::new(db, ethereum, send_requst_receiver, op_notify_sender); + let eth_sender = ETHSender::new( + max_txs_in_flight, + db, + ethereum, + send_requst_receiver, + op_notify_sender, + ); runtime.block_on(eth_sender.run()); }) .expect("Eth sender thread"); diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 267f96d3cf..7d1e7f5c74 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -314,17 +314,23 @@ pub(super) fn restored_eth_sender( mpsc::Sender, mpsc::Receiver, ) { + const MAX_TXS_IN_FLIGHT: usize = 1; + let ethereum = MockEthereum::default(); let db = MockDatabase::with_restorable_state(restore_state, stats); let (operation_sender, operation_receiver) = mpsc::channel(CHANNEL_CAPACITY); let (notify_sender, notify_receiver) = mpsc::channel(CHANNEL_CAPACITY); - ( - ETHSender::new(db, ethereum, operation_receiver, notify_sender), - operation_sender, - notify_receiver, - ) + let eth_sender = ETHSender::new( + MAX_TXS_IN_FLIGHT, + db, + ethereum, + operation_receiver, + notify_sender, + ); + + (eth_sender, operation_sender, notify_receiver) } /// Behaves the same as `ETHSender::sign_new_tx`, but does not affect nonce. diff --git a/core/server/src/eth_sender/tx_queue/counter_queue.rs b/core/server/src/eth_sender/tx_queue/counter_queue.rs index 0a146ce761..734735b9b5 100644 --- a/core/server/src/eth_sender/tx_queue/counter_queue.rs +++ b/core/server/src/eth_sender/tx_queue/counter_queue.rs @@ -19,14 +19,8 @@ impl Default for CounterQueue { } impl CounterQueue { - /// Creates a new empty counter queue. - pub fn new() -> Self { - Self::default() - } - /// Creates a new empty counter queue with the custom number of processed elements. - /// This method is used to restore the state of the queue. - pub fn new_with_count(counter: usize) -> Self { + pub fn new(counter: usize) -> Self { Self { counter, ..Default::default() @@ -65,7 +59,7 @@ mod tests { /// Checks the main operations of the queue: `push_back`, `pop_front` and `get_count`. #[test] fn basic_operations() { - let mut queue: CounterQueue = CounterQueue::new(); + let mut queue: CounterQueue = CounterQueue::new(0); // Check that by default the current count is 0. assert_eq!(queue.get_count(), 0); diff --git a/core/server/src/eth_sender/tx_queue/mod.rs b/core/server/src/eth_sender/tx_queue/mod.rs index 423c9da466..379723ae6c 100644 --- a/core/server/src/eth_sender/tx_queue/mod.rs +++ b/core/server/src/eth_sender/tx_queue/mod.rs @@ -106,9 +106,9 @@ impl TxQueueBuilder { max_pending_txs: self.max_pending_txs, sent_pending_txs: self.sent_pending_txs, - commit_operations: CounterQueue::new_with_count(self.commit_operations_count), - verify_operations: SparseQueue::new_from(verify_operations_next_block), - withdraw_operations: CounterQueue::new_with_count(self.withdraw_operations_count), + commit_operations: CounterQueue::new(self.commit_operations_count), + verify_operations: SparseQueue::new(verify_operations_next_block), + withdraw_operations: CounterQueue::new(self.withdraw_operations_count), } } } @@ -138,18 +138,6 @@ pub struct TxQueue { } impl TxQueue { - /// Creates a new empty transactions queue. - pub fn new(max_pending_txs: usize) -> Self { - Self { - max_pending_txs, - sent_pending_txs: 0, - - commit_operations: CounterQueue::new(), - verify_operations: SparseQueue::new_from(1), // Blocks are starting from the index 1. - withdraw_operations: CounterQueue::new(), - } - } - /// Adds the `commit` operation to the queue. pub fn add_commit_operation(&mut self, commit_operation: TxData) { self.commit_operations.push_back(commit_operation); @@ -241,7 +229,7 @@ mod tests { const VERIFY_MARK: u8 = 1; const WITHDRAW_MARK: u8 = 2; - let mut queue = TxQueue::new(MAX_IN_FLY); + let mut queue = TxQueueBuilder::new(MAX_IN_FLY).build(); // Add 2 commit, 2 verify and 2 withdraw operations. queue.add_commit_operation(TxData::from_raw( diff --git a/core/server/src/eth_sender/tx_queue/sparse_queue.rs b/core/server/src/eth_sender/tx_queue/sparse_queue.rs index 87f72e174d..b30947d508 100644 --- a/core/server/src/eth_sender/tx_queue/sparse_queue.rs +++ b/core/server/src/eth_sender/tx_queue/sparse_queue.rs @@ -9,30 +9,24 @@ use std::{collections::HashMap, fmt}; /// N and (N + 1) elements. #[derive(Debug)] pub struct SparseQueue { - current_idx: usize, + next_expected_idx: usize, elements: HashMap, } impl Default for SparseQueue { fn default() -> Self { Self { - current_idx: 0, + next_expected_idx: 0, elements: HashMap::new(), } } } impl SparseQueue { - /// Creates a new empty sparse queue. - pub fn new() -> Self { - Self::default() - } - - /// Creates a new empty sparse queue with the custom expected next ID. - /// This method is used to restore the state of the queue. - pub fn new_from(idx: usize) -> Self { + /// Creates a new empty sparse queue with the custom next expected element ID. + pub fn new(next_expected_idx: usize) -> Self { Self { - current_idx: idx, + next_expected_idx, ..Default::default() } } @@ -40,7 +34,7 @@ impl SparseQueue { /// Inserts an element to the queue given its index. pub fn insert(&mut self, idx: usize, element: T) { assert!( - idx >= self.current_idx, + idx >= self.next_expected_idx, "Can't insert the element with index lower than the next expected one" ); self.elements.insert(idx, element); @@ -50,9 +44,9 @@ impl SparseQueue { /// if either the queue is empty, or the next expected element is yet /// missing in the queue. pub fn pop_front(&mut self) -> Option { - match self.elements.remove(&self.current_idx) { + match self.elements.remove(&self.next_expected_idx) { Some(value) => { - self.current_idx += 1; + self.next_expected_idx += 1; Some(value) } None => None, @@ -63,12 +57,12 @@ impl SparseQueue { /// Returns `true` if the next expected element exists in the queue, /// and returns `false` otherwise. pub fn has_next(&self) -> bool { - self.elements.contains_key(&self.current_idx) + self.elements.contains_key(&self.next_expected_idx) } /// Returns the next expected element ID. pub fn next_id(&self) -> usize { - self.current_idx + self.next_expected_idx } } @@ -79,7 +73,7 @@ mod tests { /// Checks the main operations of the queue: `insert`, `pop_front` and `has_next`. #[test] fn basic_operations() { - let mut queue: SparseQueue = SparseQueue::new(); + let mut queue: SparseQueue = SparseQueue::new(0); // Insert the next element and obtain it. queue.insert(0, "zero".into()); @@ -99,11 +93,11 @@ mod tests { assert_eq!(queue.pop_front().unwrap(), "two"); } - /// Checks that we can use the difference `current_idx` as the custom + /// Checks that we can use the difference `next_expected_idx` as the custom /// queue start point. #[test] fn different_start_point() { - let mut queue: SparseQueue = SparseQueue::new_from(10); + let mut queue: SparseQueue = SparseQueue::new(10); // Check that by default the queue is empty. assert!(!queue.has_next()); @@ -119,7 +113,7 @@ mod tests { #[test] #[should_panic] fn add_out_of_order_element() { - let mut queue: SparseQueue = SparseQueue::new_from(10); + let mut queue: SparseQueue = SparseQueue::new(10); // Insert the element with too low index. queue.insert(0, "zero".into()); } diff --git a/etc/env/dev.env.example b/etc/env/dev.env.example index 90447edb12..6875cdf792 100755 --- a/etc/env/dev.env.example +++ b/etc/env/dev.env.example @@ -35,6 +35,9 @@ DATABASE_URL=postgres://postgres@localhost/plasma DB_POOL_SIZE=10 +# The maximum amount of simultaneously sent Ethereum transactions. +ETH_MAX_TXS_IN_FLIGHT=3 + PADDING_PUB_KEY="[\"0x18936d8e5f18dc41425e85a25d7a76f63715be4b3c9fac18475d028fca64c740\", \"0x0f933c18160257e0aa54056652e6bc2b8673b31c80cda933421f99dada946bf4\"]" FROM_BLOCK=0 BLOCK_DELAY=0 From d386aebdb248b13124513f7cfe9bbc3d637628ea Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 25 Mar 2020 08:57:20 +0300 Subject: [PATCH 097/186] Restore operations_order test --- core/server/src/eth_sender/tests/mock.rs | 7 - core/server/src/eth_sender/tests/mod.rs | 220 +++++++++++------------ 2 files changed, 107 insertions(+), 120 deletions(-) diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 7d1e7f5c74..3e2ef692fa 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -64,13 +64,6 @@ impl MockDatabase { assert!(self.confirmed_operations.borrow().get(&tx.id).is_none()); } - /// Ensures that the provided transaction is not stored in the database. - pub fn assert_not_stored(&self, tx: ÐOperation) { - assert!(self.confirmed_operations.borrow().get(&tx.id).is_none()); - - assert!(self.unconfirmed_operations.borrow().get(&tx.id).is_none()); - } - /// Ensures that the provided transaction is stored as confirmed. pub fn assert_confirmed(&self, tx: ÐOperation) { assert_eq!(self.confirmed_operations.borrow().get(&tx.id), Some(tx)); diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index c50e911f12..1d172b8f29 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -3,9 +3,10 @@ use web3::contract::Options; // Workspace uses use models::ethereum::ETHOperation; // Local uses -use self::mock::{create_signed_tx, default_eth_sender, restored_eth_sender}; +use self::mock::{ + create_signed_tx, create_signed_withdraw_tx, default_eth_sender, restored_eth_sender, +}; use super::{ - database::DatabaseAccess, ethereum_interface::EthereumInterface, transactions::{ETHStats, ExecutedTxStatus, TxCheckOutcome}, ETHSender, TxCheckMode, @@ -321,117 +322,110 @@ fn stuck_transaction() { eth_sender.db.assert_confirmed(&stuck_tx); } -// TODO: Restore once withdraw operations are fixed in `eth_sender`. -// Currently this test is too hard to implement, since withdraw txs are not stored in the database. -// /// This test verifies that with multiple operations received all-together, -// /// their order is respected and no processing of the next operation is started until -// /// the previous one is committed. -// #[test] -// fn operations_order() { -// let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); - -// // We send multiple the operations at once to the channel. -// let operations_count = 3; -// let mut operations = Vec::new(); -// let commit_operations = &test_data::COMMIT_OPERATIONS[..operations_count]; -// let verify_operations = &test_data::VERIFY_OPERATIONS[..operations_count]; -// operations.extend_from_slice(commit_operations); -// operations.extend_from_slice(verify_operations); - -// // Also we create the list of expected transactions. -// let mut expected_txs = Vec::new(); - -// // Create expected txs from all the operations. -// for (idx, (commit_operation, verify_operation)) in -// commit_operations.iter().zip(verify_operations).enumerate() -// { -// // Create the commit operation. -// let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; -// let deadline_block = eth_sender.get_deadline_block(start_block); -// let eth_op_idx = (idx * 3) as i64; -// let nonce = eth_op_idx; - -// let mut commit_op_tx = create_signed_tx( -// eth_op_idx, -// ð_sender, -// commit_operation, -// deadline_block, -// nonce, -// ); - -// expected_txs.push(commit_op_tx); - -// // Create the verify operation, as by priority it will be processed right after `commit`. -// let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 1) as u64; -// let deadline_block = eth_sender.get_deadline_block(start_block); -// let eth_op_idx = (idx * 3 + 1) as i64; -// let nonce = eth_op_idx; - -// let mut verify_op_tx = create_signed_tx( -// eth_op_idx, -// ð_sender, -// verify_operation, -// deadline_block, -// nonce, -// ); - -// expected_txs.push(verify_op_tx); - -// // Create the withdraw operation. -// } - -// for operation in operations.iter() { -// sender.try_send(operation.clone()).unwrap(); -// } -// eth_sender.retrieve_operations(); - -// // Then we go through the operations and check that the order of operations is preserved. -// for (idx, tx) in expected_txs.iter().enumerate() { -// eth_sender.proceed_next_operations(); - -// // Check that current expected tx is stored, but the next ones are not. -// eth_sender.db.assert_stored(tx); -// eth_sender.ethereum.assert_sent(tx); - -// for following_tx in expected_txs[idx + 1..].iter() { -// eth_sender.db.assert_not_stored(following_tx) -// } - -// eth_sender -// .ethereum -// .add_successfull_execution(tx.signed_tx.hash, super::WAIT_CONFIRMATIONS); -// eth_sender.proceed_next_operations(); -// eth_sender.db.assert_confirmed(tx); - -// if idx % 2 == 1 { -// // For every verify operation, we should also add a withdraw operation and process it. -// let raw_tx = eth_sender.ethereum.encode_tx_data( -// "completeWithdrawals", -// models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, -// ); - -// let nonce = (idx / 2) * 3 + 2; -// let mut options = Options::default(); -// options.nonce = Some(nonce.into()); - -// let signed_tx = eth_sender -// .ethereum -// .sign_prepared_tx(raw_tx, options) -// .unwrap(); - -// eth_sender -// .ethereum -// .add_successfull_execution(signed_tx.hash, super::WAIT_CONFIRMATIONS); -// eth_sender.proceed_next_operations(); -// eth_sender.proceed_next_operations(); -// } -// } - -// // We should be notified about all the verify operations being completed. -// for _ in 0..operations_count { -// assert!(receiver.try_next().unwrap().is_some()); -// } -// } +/// This test verifies that with multiple operations received all-together, +/// their order is respected and no processing of the next operation is started until +/// the previous one is committed. +/// +/// This test includes all three operation types (commit, verify and withdraw). +#[test] +fn operations_order() { + let (mut eth_sender, mut sender, mut receiver) = default_eth_sender(); + + // We send multiple the operations at once to the channel. + let operations_count = 3; + let mut operations = Vec::new(); + let commit_operations = &test_data::COMMIT_OPERATIONS[..operations_count]; + let verify_operations = &test_data::VERIFY_OPERATIONS[..operations_count]; + operations.extend_from_slice(commit_operations); + operations.extend_from_slice(verify_operations); + + // Also we create the list of expected transactions. + let mut expected_txs = Vec::new(); + + // Create expected txs from all the operations. + // Since we create 3 operations at each cycle iteration, + // the logic of ID calculating is (i * 3), (i * 3 + 1), (i * 3 + 2). + // On the first iteration the indices 0, 1 and 2 will be taken, then it + // will be 3, 4 and 5, etc. + for (idx, (commit_operation, verify_operation)) in + commit_operations.iter().zip(verify_operations).enumerate() + { + // Create the commit operation. + let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; + let deadline_block = eth_sender.get_deadline_block(start_block); + let eth_op_idx = (idx * 3) as i64; + let nonce = eth_op_idx; + + let commit_op_tx = create_signed_tx( + eth_op_idx, + ð_sender, + commit_operation, + deadline_block, + nonce, + ); + + expected_txs.push(commit_op_tx); + + // Create the verify operation, as by priority it will be processed right after `commit`. + let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 1) as u64; + let deadline_block = eth_sender.get_deadline_block(start_block); + let eth_op_idx = (idx * 3 + 1) as i64; + let nonce = eth_op_idx; + + let verify_op_tx = create_signed_tx( + eth_op_idx, + ð_sender, + verify_operation, + deadline_block, + nonce, + ); + + expected_txs.push(verify_op_tx); + + // Create the withdraw operation. + let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 2) as u64; + let deadline_block = eth_sender.get_deadline_block(start_block); + let eth_op_idx = (idx * 3 + 2) as i64; + let nonce = eth_op_idx; + + let withdraw_op_tx = + create_signed_withdraw_tx(eth_op_idx, ð_sender, deadline_block, nonce); + + expected_txs.push(withdraw_op_tx); + } + + for operation in operations.iter() { + sender.try_send(operation.clone()).unwrap(); + } + eth_sender.retrieve_operations(); + + // Then we go through the operations and check that the order of operations is preserved. + for mut tx in expected_txs.into_iter() { + let current_tx_hash = tx.used_tx_hashes[0]; + + eth_sender.proceed_next_operations(); + + // Check that current expected tx is stored. + eth_sender.db.assert_stored(&tx); + eth_sender.ethereum.assert_sent(¤t_tx_hash); + + // Mark the tx as successfully + eth_sender + .ethereum + .add_successfull_execution(current_tx_hash, super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); + + // Update the fields in the tx and check if it's confirmed. + tx.confirmed = true; + tx.final_hash = Some(current_tx_hash); + eth_sender.db.assert_confirmed(&tx); + } + + // We should be notified about all the verify operations being completed. + for _ in 0..operations_count { + assert!(receiver.try_next().unwrap().is_some()); + } +} /// Check that upon a transaction failure the incident causes a panic by default. #[test] From d4b7c028db9f55a9dd8d87cd886b108d2fe2d689 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 25 Mar 2020 09:02:19 +0300 Subject: [PATCH 098/186] Improve operation_commitment_workflow test --- core/server/src/eth_sender/tests/mod.rs | 36 +++++++++++++++---------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index 1d172b8f29..1b327ef9ed 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -1,5 +1,4 @@ // External uses -use web3::contract::Options; // Workspace uses use models::ethereum::ETHOperation; // Local uses @@ -7,7 +6,6 @@ use self::mock::{ create_signed_tx, create_signed_withdraw_tx, default_eth_sender, restored_eth_sender, }; use super::{ - ethereum_interface::EthereumInterface, transactions::{ETHStats, ExecutedTxStatus, TxCheckOutcome}, ETHSender, TxCheckMode, }; @@ -252,20 +250,30 @@ fn operation_commitment_workflow() { eth_sender.db.assert_confirmed(&expected_tx); } - // Process the next operation and check that `completeWithdrawals` transaction is sent. + // Process the next operation and check that `completeWithdrawals` transaction is stored and sent. eth_sender.proceed_next_operations(); - let mut options = Options::default(); - let nonce = operations.len().into(); - options.nonce = Some(nonce); - let raw_tx = eth_sender.ethereum.encode_tx_data( - "completeWithdrawals", - models::node::config::MAX_WITHDRAWALS_TO_COMPLETE_IN_A_CALL, - ); - let tx = eth_sender + + let eth_op_idx = operations.len() as i64; + let nonce = eth_op_idx; + let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); + let mut withdraw_op_tx = + create_signed_withdraw_tx(eth_op_idx, ð_sender, deadline_block, nonce); + + eth_sender.db.assert_stored(&withdraw_op_tx); + eth_sender .ethereum - .sign_prepared_tx(raw_tx, options) - .unwrap(); - eth_sender.ethereum.assert_sent(&tx.hash); + .assert_sent(&withdraw_op_tx.used_tx_hashes[0]); + + // Mark `completeWithdrawals` as completed. + eth_sender + .ethereum + .add_successfull_execution(withdraw_op_tx.used_tx_hashes[0], super::WAIT_CONFIRMATIONS); + eth_sender.proceed_next_operations(); + + // Check that `completeWithdrawals` is completed in the DB. + withdraw_op_tx.confirmed = true; + withdraw_op_tx.final_hash = Some(withdraw_op_tx.used_tx_hashes[0]); + eth_sender.db.assert_confirmed(&withdraw_op_tx); // We should be notified about verify operation being completed. assert_eq!( From b4e52ae90aae203acd1a5e4b565532e2e3484c31 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 25 Mar 2020 09:44:43 +0300 Subject: [PATCH 099/186] Add the test for the concurrent txs sending --- core/server/src/eth_sender/tests/mock.rs | 30 ++++- core/server/src/eth_sender/tests/mod.rs | 148 ++++++++++++++++++++++- 2 files changed, 175 insertions(+), 3 deletions(-) diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 3e2ef692fa..744cfd0151 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -294,7 +294,20 @@ pub(super) fn default_eth_sender() -> ( mpsc::Sender, mpsc::Receiver, ) { - restored_eth_sender(Vec::new(), Default::default()) + build_eth_sender(1, Vec::new(), Default::default()) +} + +/// Creates an `ETHSender` with mock Ethereum connection/database and no operations in DB +/// which supports multiple transactions in flight. +/// Returns the `ETHSender` itself along with communication channels to interact with it. +pub(super) fn concurrent_eth_sender( + max_txs_in_flight: usize, +) -> ( + ETHSender, + mpsc::Sender, + mpsc::Receiver, +) { + build_eth_sender(max_txs_in_flight, Vec::new(), Default::default()) } /// Creates an `ETHSender` with mock Ethereum connection/database and restores its state "from DB". @@ -309,6 +322,19 @@ pub(super) fn restored_eth_sender( ) { const MAX_TXS_IN_FLIGHT: usize = 1; + build_eth_sender(MAX_TXS_IN_FLIGHT, restore_state, stats) +} + +/// Helper method for configurable creation of `ETHSender`. +fn build_eth_sender( + max_txs_in_flight: usize, + restore_state: impl IntoIterator, + stats: ETHStats, +) -> ( + ETHSender, + mpsc::Sender, + mpsc::Receiver, +) { let ethereum = MockEthereum::default(); let db = MockDatabase::with_restorable_state(restore_state, stats); @@ -316,7 +342,7 @@ pub(super) fn restored_eth_sender( let (notify_sender, notify_receiver) = mpsc::channel(CHANNEL_CAPACITY); let eth_sender = ETHSender::new( - MAX_TXS_IN_FLIGHT, + max_txs_in_flight, db, ethereum, operation_receiver, diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index 1b327ef9ed..70fd173bf7 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -3,7 +3,8 @@ use models::ethereum::ETHOperation; // Local uses use self::mock::{ - create_signed_tx, create_signed_withdraw_tx, default_eth_sender, restored_eth_sender, + concurrent_eth_sender, create_signed_tx, create_signed_withdraw_tx, default_eth_sender, + restored_eth_sender, }; use super::{ transactions::{ETHStats, ExecutedTxStatus, TxCheckOutcome}, @@ -566,3 +567,148 @@ fn confirmations_independence() { stuck_tx.final_hash = Some(stuck_tx.used_tx_hashes[0]); eth_sender.db.assert_confirmed(&stuck_tx); } + +/// This test is the same as `operations_order`, but configures ETH sender +/// to use 3 transactions in flight, and checks that they are being sent concurrently. +#[test] +fn concurrent_operations_order() { + const MAX_TXS_IN_FLIGHT: usize = 3; + let (mut eth_sender, mut sender, mut receiver) = concurrent_eth_sender(MAX_TXS_IN_FLIGHT); + + // We send multiple the operations at once to the channel. + let operations_count = 3; + let commit_operations = &test_data::COMMIT_OPERATIONS[..operations_count]; + let verify_operations = &test_data::VERIFY_OPERATIONS[..operations_count]; + + // Also we create the list of expected transactions. + let mut expected_txs = Vec::new(); + + // Create expected txs from all the operations. + // Since we create 3 operations at each cycle iteration, + // the logic of ID calculating is (i * 3), (i * 3 + 1), (i * 3 + 2). + // On the first iteration the indices 0, 1 and 2 will be taken, then it + // will be 3, 4 and 5, etc. + for (idx, (commit_operation, verify_operation)) in + commit_operations.iter().zip(verify_operations).enumerate() + { + // Commit/verify transactions from one iteration will be sent concurrently, + // thus the deadline block is the same for them. + // However, withdraw operation will be sent after these txs are confirmed, + // so it will have a different deadline block, + let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; + let deadline_block = eth_sender.get_deadline_block(start_block); + + // Create the commit operation. + let eth_op_idx = (idx * 3) as i64; + let nonce = eth_op_idx; + + let commit_op_tx = create_signed_tx( + eth_op_idx, + ð_sender, + commit_operation, + deadline_block, + nonce, + ); + + expected_txs.push(commit_op_tx); + + // Create the verify operation, as by priority it will be processed right after `commit`. + let eth_op_idx = (idx * 3 + 1) as i64; + let nonce = eth_op_idx; + + let verify_op_tx = create_signed_tx( + eth_op_idx, + ð_sender, + verify_operation, + deadline_block, + nonce, + ); + + expected_txs.push(verify_op_tx); + + // Create the withdraw operation. + let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 2) as u64; + let deadline_block = eth_sender.get_deadline_block(start_block); + let eth_op_idx = (idx * 3 + 2) as i64; + let nonce = eth_op_idx; + + let withdraw_op_tx = + create_signed_withdraw_tx(eth_op_idx, ð_sender, deadline_block, nonce); + + expected_txs.push(withdraw_op_tx); + } + + // Pair commit/verify operations. + let mut operations_iter = commit_operations.iter().zip(verify_operations); + + // Then we go through the operations and check that the order of operations is preserved. + // Here we take N txs at each interaction. + for txs in expected_txs.chunks(MAX_TXS_IN_FLIGHT) { + // We send operations by two, so the order will be "commit-verify-withdraw". + // If we'll send all the operations together, the order will be "commit-verify-commit-verify-withdraw", + // since withdraw is only sent after verify operation is confirmed. + let (commit_op, verify_op) = operations_iter.next().unwrap(); + sender.try_send(commit_op.clone()).unwrap(); + sender.try_send(verify_op.clone()).unwrap(); + eth_sender.retrieve_operations(); + + // Call `proceed_next_operations`. Several txs should be sent. + eth_sender.proceed_next_operations(); + + let commit_tx = &txs[0]; + let verify_tx = &txs[1]; + let mut withdraw_tx = txs[2].clone(); + + // Check that commit/verify txs are sent and add the successful execution for them. + for tx in vec![commit_tx, verify_tx] { + let current_tx_hash = tx.used_tx_hashes[0]; + + // Check that current expected tx is stored. + eth_sender.db.assert_stored(&tx); + eth_sender.ethereum.assert_sent(¤t_tx_hash); + + // Mark the tx as successfully + eth_sender + .ethereum + .add_successfull_execution(current_tx_hash, super::WAIT_CONFIRMATIONS); + } + + // Call `proceed_next_operations` again. Both txs should become confirmed. + eth_sender.proceed_next_operations(); + + for tx in vec![commit_tx, verify_tx] { + let mut tx = tx.clone(); + let current_tx_hash = tx.used_tx_hashes[0]; + + // Update the fields in the tx and check if it's confirmed. + tx.confirmed = true; + tx.final_hash = Some(current_tx_hash); + eth_sender.db.assert_confirmed(&tx); + } + + // Now, the withdraw operation should be taken from the queue, and + // sent to the Ethereum. + eth_sender.proceed_next_operations(); + + let withdraw_tx_hash = withdraw_tx.used_tx_hashes[0]; + eth_sender.db.assert_stored(&withdraw_tx); + eth_sender.ethereum.assert_sent(&withdraw_tx_hash); + + // Mark the tx as successfully + eth_sender + .ethereum + .add_successfull_execution(withdraw_tx_hash, super::WAIT_CONFIRMATIONS); + + // Call `proceed_next_operations` again. Withdraw tx should become confirmed. + eth_sender.proceed_next_operations(); + // Update the fields in the tx and check if it's confirmed. + withdraw_tx.confirmed = true; + withdraw_tx.final_hash = Some(withdraw_tx_hash); + eth_sender.db.assert_confirmed(&withdraw_tx); + } + + // We should be notified about all the verify operations being completed. + for _ in 0..operations_count { + assert!(receiver.try_next().unwrap().is_some()); + } +} From 6a39db1362bd7c5ba256d9c954efa4838b5014a0 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 25 Mar 2020 10:00:16 +0300 Subject: [PATCH 100/186] Add more documentation --- core/server/src/eth_sender/mod.rs | 42 ++++++++++++++++++++-- core/server/src/eth_sender/tx_queue/mod.rs | 8 +++++ 2 files changed, 47 insertions(+), 3 deletions(-) diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 5bd99b25ef..0eb5507933 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -67,8 +67,11 @@ enum TxCheckMode { /// successfully included in blocks and executed. /// /// Also `ETHSender` preserves the order of operations: it guarantees that operations -/// are committed in FIFO order, meaning that until the older operation is committed -/// and has enough confirmations, no other operations will be committed. +/// are committed in FIFO order, meaning that until the older operation of certain type (e.g. +/// `commit`) will always be committed before the newer one. +/// +/// However, multiple transaction can be "in flight" at the same time, see "Concurrent transaction +/// sending" section for details. /// /// # Transaction sending policy /// @@ -78,12 +81,26 @@ enum TxCheckMode { /// transaction and watch for its confirmations. /// /// If transaction is not confirmed for a while, we increase the gas price and do the same, but we -/// keep the list of all sent transactions for one particular operations, since we can't be +/// keep the list of all sent transaction hashes for one particular operations, since we can't be /// sure which one will be committed; thus we have to track all of them. /// /// Note: make sure to save signed tx to db before sending it to ETH, this way we can be sure /// that state is always recoverable. /// +/// # Concurrent transaction sending +/// +/// `ETHSender` supports sending multiple transaction to the Ethereum at the same time. +/// This can be configured by the constructor `max_txs_in_flight` parameter. The order of +/// transaction is still guaranteed to be preserved, since every sent tx has the assigned nonce +/// which makes it impossible to get sent transactions committed out of order. +/// +/// Internally order of the transaction is determined by the underlying `TxQueue`, which provides +/// transactions to send for `ETHSender` according to the following priority: +/// +/// 1. Verify operations (only if the corresponding commit operation was sent) +/// 2. Withdraw operations (only if both commit/verify for the same block operations were sent). +/// 3. Commit operations. +/// /// # Failure policy /// /// By default, `ETHSender` expects no transactions to fail, and thus upon a failure it will @@ -154,6 +171,8 @@ impl ETHSender { } } + /// Gets the incoming operations from the channel and adds them to the + /// transactions queue. fn retrieve_operations(&mut self) { while let Ok(Some(operation)) = self.rx_for_eth.try_next() { info!( @@ -166,6 +185,11 @@ impl ETHSender { } } + /// This method does two main things: + /// + /// 1. Pops all the available transactions from the `TxQueue` and sends them. + /// 2. Sifts all the ongoing operations, filtering the completed ones and + /// managing the rest (e.g. by sending a supplement txs for stuck operations). fn proceed_next_operations(&mut self) { while let Some(tx) = self.tx_queue.pop_front() { self.initialize_operation(tx).unwrap_or_else(|e| { @@ -220,6 +244,7 @@ impl ETHSender { self.ongoing_ops = new_ongoing_ops; } + /// Stores the new operation in the database and sends the corresponding transaction. fn initialize_operation(&mut self, tx: TxData) -> Result<(), failure::Error> { let current_block = self.ethereum.block_number()?; let deadline_block = self.get_deadline_block(current_block); @@ -241,6 +266,8 @@ impl ETHSender { Ok(()) } + /// Helper method to obtain the string representation of the ZK Sync operation. + /// Intended to be used for log entries. fn zksync_operation_description(&self, operation: ÐOperation) -> String { if let Some(op) = &operation.op { format!( @@ -254,6 +281,12 @@ impl ETHSender { } } + /// Handles the ongoing operation by checking its state and doing the following: + /// - If the transaction is either pending or completed, stops the execution (as + /// there is nothing to do with the operation yet). + /// - If the transaction is stuck, sends a supplement transaction for it. + /// - If the transaction is failed, handles the failure according to the failure + /// processing policy. fn perform_commitment_step( &mut self, op: &mut ETHOperation, @@ -465,6 +498,7 @@ impl ETHSender { })) } + /// Encodes the operation data to the Ethereum tx payload (not signs it!). fn operation_to_raw_tx(&self, op: &Operation) -> Vec { match &op.action { Action::Commit => { @@ -507,6 +541,7 @@ impl ETHSender { } } + /// Encodes the ZK Sync operation to the tx payload and adds it to the queue. fn add_operation_to_queue(&mut self, op: Operation) { let raw_tx = self.operation_to_raw_tx(&op); @@ -529,6 +564,7 @@ impl ETHSender { } } + /// The same as `add_operation_to_queue`, but for the withdraw operation. fn add_complete_withdrawals_to_queue(&mut self) { // function completeWithdrawals(uint32 _n) external { let raw_tx = self.ethereum.encode_tx_data( diff --git a/core/server/src/eth_sender/tx_queue/mod.rs b/core/server/src/eth_sender/tx_queue/mod.rs index 379723ae6c..f6a8258253 100644 --- a/core/server/src/eth_sender/tx_queue/mod.rs +++ b/core/server/src/eth_sender/tx_queue/mod.rs @@ -8,10 +8,16 @@ mod sparse_queue; pub type RawTxData = Vec; +/// Representation of the transaction data stored in the queue. +/// This structure contains only essential fields required for the `eth_sender` +/// to create an actual operation. #[derive(Debug)] pub struct TxData { + /// Type of the operation. pub op_type: OperationType, + /// Not signed raw tx payload. pub raw: RawTxData, + /// Optional ZK Sync operation. pub operation: Option, } @@ -22,6 +28,7 @@ impl PartialEq for TxData { } impl TxData { + /// Creates a new `TxData` object with the associated ZK Sync operation. pub fn from_operation(op_type: OperationType, operation: Operation, raw: RawTxData) -> Self { Self { op_type, @@ -30,6 +37,7 @@ impl TxData { } } + /// Creates a new `TxData` object without associated ZK Sync operation. pub fn from_raw(op_type: OperationType, raw: RawTxData) -> Self { Self { op_type, From 00e37c3d42b62d5595da004767875e983b0ee5f6 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Wed, 25 Mar 2020 10:01:18 +0200 Subject: [PATCH 101/186] Remove usage of web3 in client --- js/client/src/main.js | 61 +++++++++++++++++++---------------- js/client/src/views/Login.vue | 2 +- 2 files changed, 35 insertions(+), 28 deletions(-) diff --git a/js/client/src/main.js b/js/client/src/main.js index 971fb63f28..bff08a2a6e 100644 --- a/js/client/src/main.js +++ b/js/client/src/main.js @@ -2,7 +2,8 @@ import Vue from "vue"; import App from "./App.vue"; import router from "./router"; import BootstrapVue from 'bootstrap-vue'; -import config from "./env-config.js" +import config from "./env-config.js"; +import { sleep } from './utils'; Vue.config.productionTip = false; @@ -41,36 +42,39 @@ new Vue({ router, render: h => h(App), async created() { - ethereum.autoRefreshOnNetworkChange = false; - const checkNetwork = () => { - window.web3.version.getNetwork((err, currentNetwork) => { - let net = ({ - '1': 'mainnet', - '4': 'rinkeby', - '9': 'localhost', - })[currentNetwork] - || 'unknown'; + window.ethereum.autoRefreshOnNetworkChange = false; + const checkNetwork = async () => { + let net = ({ + '1': 'mainnet', + '4': 'rinkeby', + '9': 'localhost', + })[window.ethereum.networkVersion] + || 'unknown'; - let networkCorrect = this.currentLocationNetworkName.toLowerCase() == net.toLowerCase(); - if (networkCorrect == false) { - if (router.currentRoute.path !== '/login') { - router.push('/login'); - } + let networkCorrect = this.currentLocationNetworkName.toLowerCase() == net.toLowerCase(); + if (!networkCorrect) { + if (router.currentRoute.path !== '/login') { + router.push('/login'); } - if (router.currentRoute.path === '/login') { - if (window.web3 == false) { - document.getElementById("change_network_alert").style.display = "none"; - document.getElementById("login_button").style.display = "none"; - } else if (networkCorrect) { - document.getElementById("change_network_alert").style.display = "none"; - document.getElementById("login_button").style.display = "inline-block"; - } else { - document.getElementById("change_network_alert").style.display = "inline-block"; - document.getElementById("login_button").style.display = "none"; - } + } + if (router.currentRoute.path === '/login') { + while (!document.getElementById("change_network_alert")) { + await sleep(1000); + } + + if (!window.ethereum) { + document.getElementById("change_network_alert").style.display = "none"; + document.getElementById("login_button").style.display = "none"; + } else if (networkCorrect) { + document.getElementById("change_network_alert").style.display = "none"; + document.getElementById("login_button").style.display = "inline-block"; + } else { + document.getElementById("change_network_alert").style.display = "inline-block"; + document.getElementById("login_button").style.display = "none"; } - }); + } }; + window.ethereum.on('chainIdChanged', checkNetwork); window.ethereum.on('accountsChanged', accounts => { if (router.currentRoute.path !== '/login') { router.push('/login'); @@ -78,6 +82,9 @@ new Vue({ }); checkNetwork(); // the first time + + // this isn't needed if window.ethereum.on handler works + // but it doesn't work on Metamask Plugins Beta. setInterval(checkNetwork, 1000); }, }).$mount("#app"); diff --git a/js/client/src/views/Login.vue b/js/client/src/views/Login.vue index c44e85c818..b1dc590852 100644 --- a/js/client/src/views/Login.vue +++ b/js/client/src/views/Login.vue @@ -34,7 +34,7 @@ import { WalletDecorator } from '../WalletDecorator' export default { name: 'login', computed: { - ethereumSupported: () => typeof window.web3 !== 'undefined', + ethereumSupported: () => typeof window.ethereum !== 'undefined', }, methods: { async login() { From eb220355e6bb6cde98349c78ef8d8ac7261124d5 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 25 Mar 2020 11:48:25 +0300 Subject: [PATCH 102/186] Fix clippy warning --- core/models/src/ethereum.rs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/core/models/src/ethereum.rs b/core/models/src/ethereum.rs index 3e36b09d58..7d1e298857 100644 --- a/core/models/src/ethereum.rs +++ b/core/models/src/ethereum.rs @@ -1,5 +1,6 @@ //! Common primitives for the Ethereum network interaction. // Built-in deps +use std::fmt; use std::str::FromStr; // External uses /// Local uses @@ -20,12 +21,12 @@ pub enum OperationType { Withdraw, } -impl OperationType { - pub fn to_string(&self) -> String { +impl fmt::Display for OperationType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::Commit => "commit".into(), - Self::Verify => "verify".into(), - Self::Withdraw => "withdraw".into(), + Self::Commit => write!(f, "commit"), + Self::Verify => write!(f, "verify"), + Self::Withdraw => write!(f, "withdraw"), } } } From 97b1fce7fa0b82a81173c06aa7a3f649c747b1ec Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 25 Mar 2020 11:50:41 +0300 Subject: [PATCH 103/186] Fix several more clippy warnings --- core/storage/src/ethereum/mod.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index f85a21c790..fbd403f823 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -62,7 +62,7 @@ impl<'a> EthereumSchema<'a> { .order_by(eth_tx_hashes::id.asc()) .load(self.0.conn())?; assert!( - eth_tx_hashes.len() >= 1, + !eth_tx_hashes.is_empty(), "No hashes stored for the Ethereum operation" ); @@ -104,6 +104,7 @@ impl<'a> EthereumSchema<'a> { } /// Stores the sent (but not confirmed yet) Ethereum transaction in the database. + #[allow(clippy::too_many_arguments)] // OK for this particular method. pub fn save_new_eth_tx( &self, op_type: OperationType, From 74f35cfdd4fad30329edb9d37cf4ab4c67008afa Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 25 Mar 2020 12:11:32 +0300 Subject: [PATCH 104/186] Load unprocessed operations after relaunch --- core/server/src/eth_sender/database.rs | 17 +++++++--- core/server/src/eth_sender/mod.rs | 21 ++++++++---- core/server/src/eth_sender/tests/mock.rs | 10 +++--- core/storage/src/ethereum/mod.rs | 41 +++++++++++++++++++++--- 4 files changed, 69 insertions(+), 20 deletions(-) diff --git a/core/server/src/eth_sender/database.rs b/core/server/src/eth_sender/database.rs index c0ad74213e..aafe4a6739 100644 --- a/core/server/src/eth_sender/database.rs +++ b/core/server/src/eth_sender/database.rs @@ -4,20 +4,26 @@ //! database to run, which is required for tests. // Built-in deps +use std::collections::VecDeque; use std::str::FromStr; // External uses use bigdecimal::BigDecimal; use web3::types::{H256, U256}; // Workspace uses -use models::ethereum::{ETHOperation, EthOpId}; +use models::{ + ethereum::{ETHOperation, EthOpId}, + Operation, +}; use storage::ConnectionPool; // Local uses use super::transactions::ETHStats; /// Abstract database access trait, optimized for the needs of `ETHSender`. pub(super) trait DatabaseAccess { - /// Loads the unconfirmed operations from the database. - fn restore_state(&self) -> Result, failure::Error>; + /// Loads the unconfirmed and unprocessed operations from the database. + /// Unconfirmed operations are Ethereum operations that were started, but not confirmed yet. + /// Unprocessed operations are ZK Sync operations that were not started at all. + fn restore_state(&self) -> Result<(VecDeque, Vec), failure::Error>; /// Saves a new unconfirmed operation to the database. fn save_new_eth_tx(&self, op: ÐOperation) -> Result; @@ -55,14 +61,15 @@ impl Database { } impl DatabaseAccess for Database { - fn restore_state(&self) -> Result, failure::Error> { + fn restore_state(&self) -> Result<(VecDeque, Vec), failure::Error> { let storage = self .db_pool .access_storage() .expect("Failed to access storage"); let unconfirmed_ops = storage.ethereum_schema().load_unconfirmed_operations()?; - Ok(unconfirmed_ops) + let unprocessed_ops = storage.ethereum_schema().load_unprocessed_operations()?; + Ok((unconfirmed_ops, unprocessed_ops)) } fn save_new_eth_tx(&self, op: ÐOperation) -> Result { diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 0eb5507933..043d79c306 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -130,11 +130,7 @@ impl ETHSender { rx_for_eth: mpsc::Receiver, op_notify: mpsc::Sender, ) -> Self { - let ongoing_ops: VecDeque<_> = db - .restore_state() - .expect("Failed loading unconfirmed operations from the storage") - .into_iter() - .collect(); + let (ongoing_ops, unprocessed_ops) = db.restore_state().expect("Can't restore state"); let stats = db .load_stats() @@ -147,14 +143,27 @@ impl ETHSender { .with_withdraw_operations_count(stats.withdraw_ops) .build(); - Self { + let mut sender = Self { ethereum, ongoing_ops, db, rx_for_eth, op_notify, tx_queue, + }; + + // Add all the unprocessed operations to the queue. + for operation in unprocessed_ops { + info!( + "Adding unprocessed ZKSync operation to queue", + operation.id.expect("ID must be set"), + operation.action.to_string(), + operation.block.block_number + ); + sender.add_operation_to_queue(operation); } + + sender } /// Main routine of `ETHSender`. diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 744cfd0151..7a3677fe8d 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -2,7 +2,7 @@ // Built-in deps use std::cell::{Cell, RefCell}; -use std::collections::HashMap; +use std::collections::{HashMap, VecDeque}; // External uses use futures::channel::mpsc; use web3::contract::{tokens::Tokenize, Options}; @@ -24,7 +24,7 @@ const CHANNEL_CAPACITY: usize = 16; /// Mock database is capable of recording all the incoming requests for the further analysis. #[derive(Debug, Default)] pub(super) struct MockDatabase { - restore_state: Vec, + restore_state: VecDeque, unconfirmed_operations: RefCell>, confirmed_operations: RefCell>, nonce: Cell, @@ -38,7 +38,7 @@ impl MockDatabase { restore_state: impl IntoIterator, stats: ETHStats, ) -> Self { - let restore_state: Vec<_> = restore_state.into_iter().collect(); + let restore_state: VecDeque<_> = restore_state.into_iter().collect(); let nonce = restore_state .iter() .fold(0, |acc, op| acc + op.used_tx_hashes.len()); @@ -73,8 +73,8 @@ impl MockDatabase { } impl DatabaseAccess for MockDatabase { - fn restore_state(&self) -> Result, failure::Error> { - Ok(self.restore_state.clone()) + fn restore_state(&self) -> Result<(VecDeque, Vec), failure::Error> { + Ok((self.restore_state.clone(), Vec::new())) } fn save_new_eth_tx(&self, op: ÐOperation) -> Result { diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index fbd403f823..05f626791b 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -1,4 +1,5 @@ // Built-in deps +use std::collections::VecDeque; use std::str::FromStr; // External imports use bigdecimal::BigDecimal; @@ -6,7 +7,10 @@ use diesel::dsl::{insert_into, update}; use diesel::prelude::*; use web3::types::{H256, U256}; // Workspace imports -use models::ethereum::{ETHOperation, OperationType}; +use models::{ + ethereum::{ETHOperation, OperationType}, + Operation, +}; // Local imports use self::records::{ ETHBinding, ETHNonce, ETHStats, ETHTxHash, NewETHBinding, NewETHOperation, NewETHTxHash, @@ -27,7 +31,7 @@ pub struct EthereumSchema<'a>(pub &'a StorageProcessor); impl<'a> EthereumSchema<'a> { /// Loads the list of operations that were not confirmed on Ethereum, /// each operation has a list of sent Ethereum transactions. - pub fn load_unconfirmed_operations(&self) -> QueryResult> { + pub fn load_unconfirmed_operations(&self) -> QueryResult> { // Load the operations with the associated Ethereum transactions // from the database. // Here we obtain a sequence of one-to-one mappings (ETH tx) -> (operation ID). @@ -51,7 +55,7 @@ impl<'a> EthereumSchema<'a> { })?; // Create a vector for the expected output. - let mut ops: Vec = Vec::with_capacity(raw_ops.len()); + let mut ops: VecDeque = VecDeque::with_capacity(raw_ops.len()); // Transform the `StoredOperation` to `Operation` and `StoredETHOperation` to `ETHOperation`. for (eth_op, _, raw_op) in raw_ops { @@ -97,12 +101,41 @@ impl<'a> EthereumSchema<'a> { final_hash, }; - ops.push(eth_op); + ops.push_back(eth_op); } Ok(ops) } + /// Loads the operations which were stored in `operations` table, but not + /// in the `eth_operations`. This method is intended to be used after relaunch + /// to synchronize `eth_sender` state, as operations are sent to the `eth_sender` + /// only once. + pub fn load_unprocessed_operations(&self) -> QueryResult> { + let raw_ops: Vec<(StoredOperation, Option)> = + self.0.conn().transaction(|| { + operations::table + .left_join(eth_ops_binding::table.on(operations::id.eq(eth_ops_binding::op_id))) + .filter(operations::confirmed.eq(false)) + .order(operations::id.asc()) + .load(self.0.conn()) + })?; + + let operations: Vec = raw_ops + .into_iter() + .filter_map(|(raw_op, maybe_binding)| { + // We are only interested in operations unknown to `eth_operations` table. + if maybe_binding.is_some() { + None + } else { + Some(raw_op.into_op(self.0).expect("Can't convert the operation")) + } + }) + .collect(); + + Ok(operations) + } + /// Stores the sent (but not confirmed yet) Ethereum transaction in the database. #[allow(clippy::too_many_arguments)] // OK for this particular method. pub fn save_new_eth_tx( From 241a0c30a60d89acd12f62adb87a31a5ef1e0fbf Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 25 Mar 2020 12:13:19 +0300 Subject: [PATCH 105/186] Load unconfirmed txs in one transaction --- core/storage/src/ethereum/mod.rs | 114 +++++++++++++++---------------- 1 file changed, 57 insertions(+), 57 deletions(-) diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index 05f626791b..e56f133797 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -39,72 +39,72 @@ impl<'a> EthereumSchema<'a> { // operation is associated with exactly one Ethereum transaction. Note that there may // be ETH transactions without an operation (e.g. `completeWithdrawals` call), but for // every operation always there is an ETH transaction. - let raw_ops: Vec<( - StorageETHOperation, - Option, - Option, - )> = self.0.conn().transaction(|| { - eth_operations::table + self.0.conn().transaction(|| { + let raw_ops: Vec<( + StorageETHOperation, + Option, + Option, + )> = eth_operations::table .left_join( eth_ops_binding::table.on(eth_operations::id.eq(eth_ops_binding::eth_op_id)), ) .left_join(operations::table.on(operations::id.eq(eth_ops_binding::op_id))) .filter(eth_operations::confirmed.eq(false)) .order(eth_operations::id.asc()) - .load(self.0.conn()) - })?; - - // Create a vector for the expected output. - let mut ops: VecDeque = VecDeque::with_capacity(raw_ops.len()); - - // Transform the `StoredOperation` to `Operation` and `StoredETHOperation` to `ETHOperation`. - for (eth_op, _, raw_op) in raw_ops { - // Load the stored txs hashes ordered by their ID, - // so the latest added hash will be the last one in the list. - let eth_tx_hashes: Vec = eth_tx_hashes::table - .filter(eth_tx_hashes::eth_op_id.eq(eth_op.id)) - .order_by(eth_tx_hashes::id.asc()) .load(self.0.conn())?; - assert!( - !eth_tx_hashes.is_empty(), - "No hashes stored for the Ethereum operation" - ); - - // If there is an operation, convert it to the `Operation` type. - let op = if let Some(raw_op) = raw_op { - Some(raw_op.into_op(self.0)?) - } else { - None - }; - - // Convert the fields into expected format. - let op_type = OperationType::from_str(eth_op.op_type.as_ref()) - .expect("Stored operation type must have a valid value"); - let last_used_gas_price = - U256::from_str(ð_op.last_used_gas_price.to_string()).unwrap(); - let used_tx_hashes = eth_tx_hashes - .iter() - .map(|entry| H256::from_slice(&entry.tx_hash)) - .collect(); - let final_hash = eth_op.final_hash.map(|hash| H256::from_slice(&hash)); - - let eth_op = ETHOperation { - id: eth_op.id, - op_type, - op, - nonce: eth_op.nonce.into(), - last_deadline_block: eth_op.last_deadline_block as u64, - last_used_gas_price, - used_tx_hashes, - encoded_tx_data: eth_op.raw_tx, - confirmed: eth_op.confirmed, - final_hash, - }; - ops.push_back(eth_op); - } + // Create a vector for the expected output. + let mut ops: VecDeque = VecDeque::with_capacity(raw_ops.len()); + + // Transform the `StoredOperation` to `Operation` and `StoredETHOperation` to `ETHOperation`. + for (eth_op, _, raw_op) in raw_ops { + // Load the stored txs hashes ordered by their ID, + // so the latest added hash will be the last one in the list. + let eth_tx_hashes: Vec = eth_tx_hashes::table + .filter(eth_tx_hashes::eth_op_id.eq(eth_op.id)) + .order_by(eth_tx_hashes::id.asc()) + .load(self.0.conn())?; + assert!( + !eth_tx_hashes.is_empty(), + "No hashes stored for the Ethereum operation" + ); + + // If there is an operation, convert it to the `Operation` type. + let op = if let Some(raw_op) = raw_op { + Some(raw_op.into_op(self.0)?) + } else { + None + }; + + // Convert the fields into expected format. + let op_type = OperationType::from_str(eth_op.op_type.as_ref()) + .expect("Stored operation type must have a valid value"); + let last_used_gas_price = + U256::from_str(ð_op.last_used_gas_price.to_string()).unwrap(); + let used_tx_hashes = eth_tx_hashes + .iter() + .map(|entry| H256::from_slice(&entry.tx_hash)) + .collect(); + let final_hash = eth_op.final_hash.map(|hash| H256::from_slice(&hash)); + + let eth_op = ETHOperation { + id: eth_op.id, + op_type, + op, + nonce: eth_op.nonce.into(), + last_deadline_block: eth_op.last_deadline_block as u64, + last_used_gas_price, + used_tx_hashes, + encoded_tx_data: eth_op.raw_tx, + confirmed: eth_op.confirmed, + final_hash, + }; + + ops.push_back(eth_op); + } - Ok(ops) + Ok(ops) + }) } /// Loads the operations which were stored in `operations` table, but not From d122fdae8627a86d43780ed446d3dfdc4550df45 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 25 Mar 2020 12:36:02 +0300 Subject: [PATCH 106/186] Remove redundant clones --- core/server/src/eth_sender/tests/mock.rs | 4 +--- core/server/src/eth_sender/tests/mod.rs | 8 ++++---- core/storage/src/tests/ethereum.rs | 2 +- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 7a3677fe8d..7946b213fa 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -86,9 +86,7 @@ impl DatabaseAccess for MockDatabase { let mut op = op.clone(); op.id = id; - self.unconfirmed_operations - .borrow_mut() - .insert(id, op.clone()); + self.unconfirmed_operations.borrow_mut().insert(id, op); Ok(id) } diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index 70fd173bf7..1814cbacdb 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -477,7 +477,7 @@ fn restore_state() { let deadline_block = eth_sender.get_deadline_block(2); let verify_op_tx = create_signed_tx(1, ð_sender, &verify_op, deadline_block, 1); - let operations = vec![commit_op.clone(), verify_op.clone()]; + let operations = vec![commit_op, verify_op]; let stored_operations = vec![commit_op_tx, verify_op_tx]; (operations, stored_operations) @@ -488,7 +488,7 @@ fn restore_state() { verify_ops: 1, withdraw_ops: 0, }; - let (mut eth_sender, _, mut receiver) = restored_eth_sender(stored_operations.clone(), stats); + let (mut eth_sender, _, mut receiver) = restored_eth_sender(stored_operations, stats); for (eth_op_id, operation) in operations.iter().enumerate() { // Note that we DO NOT send an operation to `ETHSender` and neither receive it. @@ -660,7 +660,7 @@ fn concurrent_operations_order() { let mut withdraw_tx = txs[2].clone(); // Check that commit/verify txs are sent and add the successful execution for them. - for tx in vec![commit_tx, verify_tx] { + for tx in &[commit_tx, verify_tx] { let current_tx_hash = tx.used_tx_hashes[0]; // Check that current expected tx is stored. @@ -676,7 +676,7 @@ fn concurrent_operations_order() { // Call `proceed_next_operations` again. Both txs should become confirmed. eth_sender.proceed_next_operations(); - for tx in vec![commit_tx, verify_tx] { + for &tx in &[commit_tx, verify_tx] { let mut tx = tx.clone(); let current_tx_hash = tx.used_tx_hashes[0]; diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index 49d6a4fbaf..7a492f5adf 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -61,7 +61,7 @@ impl EthereumTxParams { let op_type = OperationType::from_str(self.op_type.as_ref()) .expect("Stored operation type must have a valid value"); let last_used_gas_price = U256::from_str(&self.gas_price.to_string()).unwrap(); - let used_tx_hashes = vec![self.hash.clone()]; + let used_tx_hashes = vec![self.hash]; ETHOperation { id: db_id, From 3cf3441198613088e9b8884829dd1cffac26d7f6 Mon Sep 17 00:00:00 2001 From: furkhat Date: Wed, 25 Mar 2020 14:15:16 +0200 Subject: [PATCH 107/186] promote to testnet --- .drone.yml | 46 +++++++++++++++++++++++++++++++++++++++++++++ Makefile | 2 +- etc/kube/nginx.yaml | 2 +- 3 files changed, 48 insertions(+), 2 deletions(-) diff --git a/.drone.yml b/.drone.yml index 072e12abb7..1a84ea866a 100644 --- a/.drone.yml +++ b/.drone.yml @@ -344,3 +344,49 @@ steps: - nginx-image-publish - server-image-publish - prover-image-publish + +--- +# This pipeline udpates testnet +kind: pipeline +type: docker +name: update-testnet +clone: + depth: 1 + +volumes: + - name: docker-sock + host: + path: /var/run/docker.sock + +trigger: + target: + - testnet + event: + - promote + +steps: +- name: check-images-exist + image: matterlabs/ci + environment: + ENV_BASE64: + from_secret: testnet_env_base64 + commands: + - export ZKSYNC_HOME=`pwd` + - export PATH=$ZKSYNC_HOME/bin:$PATH + - echo -n $ENV_BASE64 | base64 --decode > $ZKSYNC_HOME/etc/env/testnet.env + - zksync env testnet + - docker pull matterlabs/server:$(f echo -n $IMAGE_TAG) + - docker pull matterlabs/prover:$(f echo -n $IMAGE_TAG) + - docker pull matterlabs/nginx:$(f echo -n $IMAGE_TAG) +- name: testnet-deploy + image: matterlabs/ci + environment: + KUBECONFIG_BASE64: + from_secret: stage_kubeconfig_base64 + commands: + - export ZKSYNC_HOME=`pwd` + - export PATH=$ZKSYNC_HOME/bin:$PATH + - echo -n $KUBECONFIG_BASE64 | base64 --decode > `pwd`/kubeconfig.yaml + - python3 $ZKSYNC_HOME/bin/replace-env-variable.py $ZKSYNC_HOME/etc/env/testnet.env KUBECONFIG=`pwd`/kubeconfig.yaml + - zksync apply-kubeconfig + - zksync restart diff --git a/Makefile b/Makefile index 9aa934c3b4..aeaa53fad7 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ export CI_PIPELINE_ID ?= $(shell date +"%Y-%m-%d-%s") export SERVER_DOCKER_IMAGE ?=matterlabs/server:$(IMAGE_TAG) export PROVER_DOCKER_IMAGE ?=matterlabs/prover:$(IMAGE_TAG) -export NGINX_DOCKER_IMAGE ?= matterlabs/nginx:$(ZKSYNC_ENV)-$(IMAGE_TAG) +export NGINX_DOCKER_IMAGE ?= matterlabs/nginx:$(IMAGE_TAG) export GETH_DOCKER_IMAGE ?= matterlabs/geth:latest export CI_DOCKER_IMAGE ?= matterlabs/ci diff --git a/etc/kube/nginx.yaml b/etc/kube/nginx.yaml index 34e8dd5a25..5124978867 100644 --- a/etc/kube/nginx.yaml +++ b/etc/kube/nginx.yaml @@ -33,7 +33,7 @@ spec: spec: containers: - name: ${ZKSYNC_ENV}-nginx - image: matterlabs/nginx:${ZKSYNC_ENV}-${IMAGE_TAG} + image: matterlabs/nginx:${IMAGE_TAG} imagePullPolicy: Always ports: - containerPort: 80 From 5daee8b0c2b368095caa492b6e5caa0cf61e10f0 Mon Sep 17 00:00:00 2001 From: furkhat Date: Wed, 25 Mar 2020 15:05:46 +0200 Subject: [PATCH 108/186] fix ref --- Makefile | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/Makefile b/Makefile index aeaa53fad7..60e7a9a8ae 100644 --- a/Makefile +++ b/Makefile @@ -233,10 +233,7 @@ apply-kubeconfig-nginx: @bin/k8s-gen-resource-definitions @bin/k8s-apply-nginx -apply-kubeconfig: - apply-kubeconfig-server - apply-kubeconfig-provers - apply-kubeconfig-nginx +apply-kubeconfig: apply-kubeconfig-server apply-kubeconfig-provers apply-kubeconfig-nginx update-provers: push-image-prover apply-kubeconfig-server @kubectl patch deployment $(ZKSYNC_ENV)-server --namespace $(ZKSYNC_ENV) -p "{\"spec\":{\"template\":{\"metadata\":{\"labels\":{\"date\":\"$(shell date +%s)\"}}}}}" From d40a98ef82e9e2b238ce69a5b4e42aaa43119cf0 Mon Sep 17 00:00:00 2001 From: furkhat Date: Wed, 25 Mar 2020 16:30:30 +0200 Subject: [PATCH 109/186] use testnet kubeconf --- .drone.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.drone.yml b/.drone.yml index 1a84ea866a..b35b117ec1 100644 --- a/.drone.yml +++ b/.drone.yml @@ -382,7 +382,7 @@ steps: image: matterlabs/ci environment: KUBECONFIG_BASE64: - from_secret: stage_kubeconfig_base64 + from_secret: testnet_kubeconfig_base64 commands: - export ZKSYNC_HOME=`pwd` - export PATH=$ZKSYNC_HOME/bin:$PATH From 12d92d0d0e0f7e4fca701b84d264a065081aa38a Mon Sep 17 00:00:00 2001 From: furkhat Date: Wed, 25 Mar 2020 16:36:30 +0200 Subject: [PATCH 110/186] promote-to-testnet cmd --- .drone.yml | 6 +++--- Makefile | 5 ++++- bin/{promote-to-stage.sh => promote-to.sh} | 2 +- 3 files changed, 8 insertions(+), 5 deletions(-) rename bin/{promote-to-stage.sh => promote-to.sh} (65%) diff --git a/.drone.yml b/.drone.yml index b35b117ec1..489f571791 100644 --- a/.drone.yml +++ b/.drone.yml @@ -375,9 +375,9 @@ steps: - export PATH=$ZKSYNC_HOME/bin:$PATH - echo -n $ENV_BASE64 | base64 --decode > $ZKSYNC_HOME/etc/env/testnet.env - zksync env testnet - - docker pull matterlabs/server:$(f echo -n $IMAGE_TAG) - - docker pull matterlabs/prover:$(f echo -n $IMAGE_TAG) - - docker pull matterlabs/nginx:$(f echo -n $IMAGE_TAG) + - f docker pull matterlabs/server:$IMAGE_TAG + - f docker pull matterlabs/prover:$IMAGE_TAG + - f docker pull matterlabs/nginx:$IMAGE_TAG - name: testnet-deploy image: matterlabs/ci environment: diff --git a/Makefile b/Makefile index 60e7a9a8ae..8b9ab46e62 100644 --- a/Makefile +++ b/Makefile @@ -211,7 +211,10 @@ deposit: confirm_action # Promote build promote-to-stage: - @bin/promote-to-stage.sh $(ci-build) + @bin/promote-to.sh stage $(ci-build) + +promote-to-testnet: + @bin/promote-to.sh testnet $(ci-build) # (Re)deploy contracts and database redeploy: confirm_action stop deploy-contracts db-insert-contract diff --git a/bin/promote-to-stage.sh b/bin/promote-to.sh similarity index 65% rename from bin/promote-to-stage.sh rename to bin/promote-to.sh index 831080e322..af85c815c5 100755 --- a/bin/promote-to-stage.sh +++ b/bin/promote-to.sh @@ -2,4 +2,4 @@ set -e -drone build promote $(git config --get remote.origin.url|sed -e 's/git@github.com:\(.*\).git/\1/') $1 stage +drone build promote $(git config --get remote.origin.url|sed -e 's/git@github.com:\(.*\).git/\1/') $2 $1 From 0b964715bce114ae96f8d5e0a8d0286bb806c362 Mon Sep 17 00:00:00 2001 From: furkhat Date: Wed, 25 Mar 2020 16:41:20 +0200 Subject: [PATCH 111/186] explicit image tag --- .drone.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.drone.yml b/.drone.yml index 489f571791..0a68915330 100644 --- a/.drone.yml +++ b/.drone.yml @@ -366,26 +366,26 @@ trigger: steps: - name: check-images-exist - image: matterlabs/ci + image: docker environment: ENV_BASE64: from_secret: testnet_env_base64 commands: - - export ZKSYNC_HOME=`pwd` - - export PATH=$ZKSYNC_HOME/bin:$PATH - - echo -n $ENV_BASE64 | base64 --decode > $ZKSYNC_HOME/etc/env/testnet.env - - zksync env testnet - - f docker pull matterlabs/server:$IMAGE_TAG - - f docker pull matterlabs/prover:$IMAGE_TAG - - f docker pull matterlabs/nginx:$IMAGE_TAG + - docker pull matterlabs/server:${DRONE_COMMIT_SHA:0:8} + - docker pull matterlabs/prover:${DRONE_COMMIT_SHA:0:8} + - docker pull matterlabs/nginx:${DRONE_COMMIT_SHA:0:8} - name: testnet-deploy image: matterlabs/ci environment: + ENV_BASE64: + from_secret: testnet_env_base64 KUBECONFIG_BASE64: from_secret: testnet_kubeconfig_base64 commands: - export ZKSYNC_HOME=`pwd` - export PATH=$ZKSYNC_HOME/bin:$PATH + - echo -n $ENV_BASE64 | base64 --decode > $ZKSYNC_HOME/etc/env/testnet.env + - zksync env testnet - echo -n $KUBECONFIG_BASE64 | base64 --decode > `pwd`/kubeconfig.yaml - python3 $ZKSYNC_HOME/bin/replace-env-variable.py $ZKSYNC_HOME/etc/env/testnet.env KUBECONFIG=`pwd`/kubeconfig.yaml - zksync apply-kubeconfig From c9cc66c09c71cfb84450c1e9df6afae806d4debe Mon Sep 17 00:00:00 2001 From: furkhat Date: Wed, 25 Mar 2020 16:49:12 +0200 Subject: [PATCH 112/186] connect docker sock --- .drone.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.drone.yml b/.drone.yml index 0a68915330..a2e7c8d8f8 100644 --- a/.drone.yml +++ b/.drone.yml @@ -374,6 +374,9 @@ steps: - docker pull matterlabs/server:${DRONE_COMMIT_SHA:0:8} - docker pull matterlabs/prover:${DRONE_COMMIT_SHA:0:8} - docker pull matterlabs/nginx:${DRONE_COMMIT_SHA:0:8} + volumes: + - name: docker-sock + path: /var/run/docker.sock - name: testnet-deploy image: matterlabs/ci environment: From 26c62ed7f96212935deef1f0710ede210c307daf Mon Sep 17 00:00:00 2001 From: furkhat Date: Wed, 25 Mar 2020 17:19:59 +0200 Subject: [PATCH 113/186] nginx image tag --- .drone.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.drone.yml b/.drone.yml index a2e7c8d8f8..98e3771c4f 100644 --- a/.drone.yml +++ b/.drone.yml @@ -258,8 +258,8 @@ steps: image: docker commands: - docker login -u $USERNAME -p $PASSWORD - - docker build -t $REPO:stage-${DRONE_COMMIT_SHA:0:8} -f $DOCKERFILE . - - docker push $REPO:stage-${DRONE_COMMIT_SHA:0:8} + - docker build -t $REPO:${DRONE_COMMIT_SHA:0:8} -f $DOCKERFILE . + - docker push $REPO:${DRONE_COMMIT_SHA:0:8} environment: USERNAME: from_secret: docker_username @@ -368,9 +368,14 @@ steps: - name: check-images-exist image: docker environment: + USERNAME: + from_secret: docker_username + PASSWORD: + from_secret: docker_password ENV_BASE64: from_secret: testnet_env_base64 commands: + - docker login -u $USERNAME -p $PASSWORD - docker pull matterlabs/server:${DRONE_COMMIT_SHA:0:8} - docker pull matterlabs/prover:${DRONE_COMMIT_SHA:0:8} - docker pull matterlabs/nginx:${DRONE_COMMIT_SHA:0:8} From bcf011fd2f9adcd37549e0c9659dfba2fcfdffaf Mon Sep 17 00:00:00 2001 From: furkhat Date: Wed, 25 Mar 2020 18:01:29 +0200 Subject: [PATCH 114/186] fail on kubectl apply error --- bin/k8s-apply-nginx | 2 ++ bin/k8s-apply-provers | 2 ++ bin/k8s-apply-server | 4 ++++ bin/k8s-secret | 5 +++-- 4 files changed, 11 insertions(+), 2 deletions(-) diff --git a/bin/k8s-apply-nginx b/bin/k8s-apply-nginx index 17f761725c..78025eeece 100755 --- a/bin/k8s-apply-nginx +++ b/bin/k8s-apply-nginx @@ -1,2 +1,4 @@ #!/bin/bash +set -e + kubectl apply -f etc/kube/gen/$ZKSYNC_ENV/nginx.yaml -n $ZKSYNC_ENV diff --git a/bin/k8s-apply-provers b/bin/k8s-apply-provers index 7c8ec23690..a56d3bb815 100755 --- a/bin/k8s-apply-provers +++ b/bin/k8s-apply-provers @@ -1,4 +1,6 @@ #!/bin/bash +set -e + for BLOCK_SIZE_CHUNKS in $(echo $BLOCK_CHUNK_SIZES | sed "s/,/ /g"); do kubectl apply -f etc/kube/gen/$ZKSYNC_ENV/prover-$BLOCK_SIZE_CHUNKS.yaml -n $ZKSYNC_ENV --record=true done diff --git a/bin/k8s-apply-server b/bin/k8s-apply-server index 538ff8ed08..1a35c3b16e 100755 --- a/bin/k8s-apply-server +++ b/bin/k8s-apply-server @@ -1,3 +1,7 @@ #!/bin/bash +set -e + +cat etc/kube/gen/$ZKSYNC_ENV/secret.yaml + kubectl apply -f etc/kube/gen/$ZKSYNC_ENV/secret.yaml -n $ZKSYNC_ENV kubectl apply -f etc/kube/gen/$ZKSYNC_ENV/server.yaml -n $ZKSYNC_ENV --record=true diff --git a/bin/k8s-secret b/bin/k8s-secret index a04ed39aa6..2814b7eeff 100755 --- a/bin/k8s-secret +++ b/bin/k8s-secret @@ -4,10 +4,11 @@ #. .setup_env prod -kubectl create secret generic $ZKSYNC_ENV-secret --dry-run -o yaml +kubectl create secret generic $ZKSYNC_ENV-secret --namespace $ZKSYNC_ENV --dry-run -o yaml echo data: grep -v '^#' $ENV_FILE | grep -v '^$' | while read -r line; do VAR=`sed 's/=.*//' <<< $line` echo -n " $VAR: " - echo -n ${!VAR} | base64 + echo -n ${!VAR} | base64 --wrap=0 + echo done From c520dba0f5446ea0337fc5a44709340588ac196d Mon Sep 17 00:00:00 2001 From: furkhat Date: Wed, 25 Mar 2020 19:41:31 +0200 Subject: [PATCH 115/186] update promote docs --- bin/k8s-apply-server | 2 -- docs/promote.md | 20 +++++++++++++++++++- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/bin/k8s-apply-server b/bin/k8s-apply-server index 1a35c3b16e..d3ecfea49b 100755 --- a/bin/k8s-apply-server +++ b/bin/k8s-apply-server @@ -1,7 +1,5 @@ #!/bin/bash set -e -cat etc/kube/gen/$ZKSYNC_ENV/secret.yaml - kubectl apply -f etc/kube/gen/$ZKSYNC_ENV/secret.yaml -n $ZKSYNC_ENV kubectl apply -f etc/kube/gen/$ZKSYNC_ENV/server.yaml -n $ZKSYNC_ENV --record=true diff --git a/docs/promote.md b/docs/promote.md index 33f00647a0..238ab99212 100644 --- a/docs/promote.md +++ b/docs/promote.md @@ -8,7 +8,7 @@ Creat promotion job on CI to deploy to target environment. ```bash cat $ZKSYNC_HOME/etc/env/stage.env | base64 ``` -3. Update CI secret `stage_env_base64` +3. Add/Replace CI secret `stage_env_base64` 4. Create promotion job on CI using following command where `` is a build number to promote to staging ```bash zksync promote-to-stage build= @@ -18,3 +18,21 @@ Example: ```bash zksync promote-to-stage ci-build=23 ``` + +## Promoting to testnet environment (target env = testnet) + +1. Prepare `testnet.env` file +2. Take base64 of env file: +```bash +cat $ZKSYNC_HOME/etc/env/stage.env | base64 +``` +3. Add/Replace CI secret `testnet_env_base64` +4. Create promotion job on CI using following command where `` is a build number to promote to staging +```bash +zksync promote-to-stage build= +``` + +Example: +```bash +zksync promote-to-testnet ci-build=23 +``` From e82a630f40ec4b05e221fb0af3240e87ffb92d5e Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 26 Mar 2020 07:13:19 +0300 Subject: [PATCH 116/186] Add initial benchmark skeleton --- Cargo.lock | 206 +++++++++++++++++++ core/models/Cargo.toml | 8 + core/models/benches/criterion/lib.rs | 8 + core/models/benches/criterion/merkle_tree.rs | 49 +++++ core/models/src/circuit/account.rs | 1 + 5 files changed, 272 insertions(+) create mode 100644 core/models/benches/criterion/lib.rs create mode 100644 core/models/benches/criterion/merkle_tree.rs diff --git a/Cargo.lock b/Cargo.lock index 05d234e2c6..789932f303 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -565,9 +565,17 @@ name = "bstr" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-automata 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "bumpalo" +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "byte-tools" version = "0.3.1" @@ -601,6 +609,14 @@ dependencies = [ "ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "cast" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "cc" version = "1.0.41" @@ -740,6 +756,39 @@ dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "criterion" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", + "cast 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)", + "criterion-plot 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "csv 1.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "oorandom 11.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "plotters 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)", + "rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", + "tinytemplate 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "criterion-plot" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cast 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "crossbeam" version = "0.7.3" @@ -824,6 +873,26 @@ dependencies = [ "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "csv" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bstr 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "csv-core 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "csv-core" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "ctr" version = "0.3.2" @@ -1570,6 +1639,14 @@ name = "itoa" version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "js-sys" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "wasm-bindgen 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "jsonrpc-client-transports" version = "14.0.5" @@ -1928,6 +2005,7 @@ name = "models" version = "0.0.1" dependencies = [ "bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "criterion 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "crypto_exports 0.1.0", "ethabi 8.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "ethsign 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2027,6 +2105,11 @@ dependencies = [ "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "oorandom" +version = "11.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "opaque-debug" version = "0.2.3" @@ -2224,6 +2307,17 @@ dependencies = [ "web3 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "plotters" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "js-sys 0.3.36 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.36 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "ppv-lite86" version = "0.2.6" @@ -2577,6 +2671,14 @@ dependencies = [ "thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "regex-automata" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "regex-syntax" version = "0.6.14" @@ -2695,6 +2797,14 @@ name = "safemem" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "schannel" version = "0.1.17" @@ -3166,6 +3276,15 @@ dependencies = [ "crunchy 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "tinytemplate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "tokio" version = "0.1.22" @@ -3636,6 +3755,16 @@ name = "void" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "walkdir" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "want" version = "0.2.0" @@ -3651,6 +3780,64 @@ name = "wasi" version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "wasm-bindgen" +version = "0.2.59" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.59" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bumpalo 3.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.59" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro-support 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.59" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.59" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "web-sys" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "js-sys 0.3.36 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "web3" version = "0.8.0" @@ -3860,11 +4047,13 @@ dependencies = [ "checksum brotli-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4445dea95f4c2b41cde57cc9fee236ae4dbae88d8fcbdb4750fc1bb5d86aaecd" "checksum brotli2 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0cb036c3eade309815c15ddbacec5b22c4d1f3983a774ab2eac2e3e9ea85568e" "checksum bstr 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "502ae1441a0a5adb8fbd38a5955a6416b9493e92b465de5e4a9bde6a539c2c48" +"checksum bumpalo 3.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "12ae9db68ad7fac5fe51304d20f016c911539251075a214f8e663babefa35187" "checksum byte-tools 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" "checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" "checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" "checksum bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "130aac562c0dd69c56b3b1cc8ffd2e17be31d0b6c25b61c96b76231aa23e39e1" "checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb" +"checksum cast 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4b9434b9a5aa1450faa3f9cb14ea0e8c53bb5d2b3c1bfd1ab4fc03e9f33fbfb0" "checksum cc 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "8dae9c4b8fedcae85592ba623c4fd08cfdab3e3b72d6df780c6ead964a69bfff" "checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" "checksum chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "31850b4a4d6bae316f7a09e691c944c28299298837edc0a03f755618c23cbc01" @@ -3879,6 +4068,8 @@ dependencies = [ "checksum core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d" "checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" "checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" +"checksum criterion 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1fc755679c12bda8e5523a71e4d654b6bf2e14bd838dfc48cde6559a05caf7d1" +"checksum criterion-plot 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a01e15e0ea58e8234f96146b1f91fa9d0e4dd7a38da93ff7a75d42c0b9d3a545" "checksum crossbeam 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" "checksum crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "acec9a3b0b3559f15aee4f90746c4e5e293b701c0f7d3925d24e01645267b68c" "checksum crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3aa945d63861bfe624b55d153a39684da1e8c0bc8fba932f7ee3a3c16cea3ca" @@ -3887,6 +4078,8 @@ dependencies = [ "checksum crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce446db02cdc3165b94ae73111e570793400d0794e46125cc4056c81cbb039f4" "checksum crunchy 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" "checksum crypto-mac 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4434400df11d95d556bac068ddfedd482915eb18fe8bea89bc80b6e4b1c179e5" +"checksum csv 1.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "00affe7f6ab566df61b4be3ce8cf16bc2576bca0963ceb0955e45d514bf9a279" +"checksum csv-core 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" "checksum ctr 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "022cd691704491df67d25d006fe8eca083098253c4d43516c2206479c58c6736" "checksum ctrlc 3.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7dfd2d8b4c82121dfdff120f818e09fc4380b0b7e17a742081a89b94853e87f" "checksum derive_more 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6d944ac6003ed268757ef1ee686753b57efc5fcf0ebe7b64c9fc81e7e32ff839" @@ -3961,6 +4154,7 @@ dependencies = [ "checksum ipconfig 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aa79fa216fbe60834a9c0737d7fcd30425b32d1c58854663e24d4c4b328ed83f" "checksum itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484" "checksum itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b8b7a7c0c47db5545ed3fef7468ee7bb5b74691498139e4b3f6a20685dc6dd8e" +"checksum js-sys 0.3.36 (registry+https://github.com/rust-lang/crates.io-index)" = "1cb931d43e71f560c81badb0191596562bafad2be06a3f9025b845c847c60df5" "checksum jsonrpc-client-transports 14.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0a9ae166c4d1f702d297cd76d4b55758ace80272ffc6dbb139fdc1bf810de40b" "checksum jsonrpc-core 11.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97b83fdc5e0218128d0d270f2f2e7a5ea716f3240c8518a58bc89e6716ba8581" "checksum jsonrpc-core 14.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "fe3b688648f1ef5d5072229e2d672ecb92cbff7d1c79bcf3fd5898f3f3df0970" @@ -4004,6 +4198,7 @@ dependencies = [ "checksum num-integer 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "3f6ea62e9d81a77cd3ee9a2a5b9b609447857f3d358704331e4ef39eb247fcba" "checksum num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "c62be47e61d1842b9170f0fdeec8eba98e60e90e5446449a0545e5152acd7096" "checksum num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "46203554f085ff89c235cd12f7075f3233af9b11ed7c9e16dfe2560d03313ce6" +"checksum oorandom 11.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ebcec7c9c2a95cacc7cd0ecb89d8a8454eca13906f6deb55258ffff0adeb9405" "checksum opaque-debug 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" "checksum openssl 0.10.28 (registry+https://github.com/rust-lang/crates.io-index)" = "973293749822d7dd6370d6da1e523b0d1db19f06c459134c658b2a4261378b52" "checksum openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" @@ -4023,6 +4218,7 @@ dependencies = [ "checksum pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "237844750cfbb86f67afe27eee600dfbbcb6188d734139b534cbfbf4f96792ae" "checksum pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5894c618ce612a3fa23881b152b608bafb8c56cfc22f434a3ba3120b40f7b587" "checksum pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "05da548ad6865900e60eaba7f589cc0783590a92e940c26953ff81ddbab2d677" +"checksum plotters 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "4e3bb8da247d27ae212529352020f3e5ee16e83c0c258061d27b08ab92675eeb" "checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b" "checksum pq-sys 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "6ac25eee5a0582f45a67e837e350d784e7003bd29a5f460796772061ca49ffda" "checksum primitive-types 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2288eb2a39386c4bc817974cc413afe173010dc80e470fcb1e9a35580869f024" @@ -4058,6 +4254,7 @@ dependencies = [ "checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" "checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" "checksum regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "322cf97724bea3ee221b78fe25ac9c46114ebb51747ad5babd51a2fc6a8235a8" +"checksum regex-automata 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "ae1ded71d66a4a97f5e961fd0cb25a5f366a42a41570d16a763a69c092c26ae4" "checksum regex-syntax 0.6.14 (registry+https://github.com/rust-lang/crates.io-index)" = "b28dfe3fe9badec5dbf0a79a9cccad2cfc2ab5484bdb3e44cbd1ae8b3ba2be06" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum reqwest 0.9.24 (registry+https://github.com/rust-lang/crates.io-index)" = "f88643aea3c1343c804950d7bf983bd2067f5ab59db6d613a08e05572f2714ab" @@ -4071,6 +4268,7 @@ dependencies = [ "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" "checksum ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8" "checksum safemem 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" +"checksum same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" "checksum schannel 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "507a9e6e8ffe0a4e0ebb9a10293e62fdf7657c06f1b8bb07a8fcf697d2abf295" "checksum scheduled-thread-pool 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f5de7bc31f28f8e6c28df5e1bf3d10610f5fdc14cc95f272853512c70a2bd779" "checksum scoped-tls 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "332ffa32bf586782a3efaeb58f127980944bbc8c4d6913a86107ac2a5ab24b28" @@ -4114,6 +4312,7 @@ dependencies = [ "checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865" "checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" "checksum tiny-keccak 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d8a021c69bb74a44ccedb824a046447e2c84a01df9e5c20779750acb38e11b2" +"checksum tinytemplate 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "57a3c6667d3e65eb1bc3aed6fd14011c6cbc3a0665218ab7f5daf040b9ec371a" "checksum tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "5a09c0b5bb588872ab2f09afa13ee6e9dac11e10a0ec9e8e3ba39a5a5d530af6" "checksum tokio 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8fdd17989496f49cdc57978c96f0c9fe5e4a58a8bddc6813c449a4624f6a030b" "checksum tokio-buf 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8fb220f46c53859a4b7ec083e41dec9778ff0b1851c0942b211edb89e0ccdc46" @@ -4159,8 +4358,15 @@ dependencies = [ "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" "checksum version_check 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +"checksum walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" "checksum want 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230" "checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" +"checksum wasm-bindgen 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)" = "3557c397ab5a8e347d434782bcd31fc1483d927a6826804cec05cc792ee2519d" +"checksum wasm-bindgen-backend 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)" = "e0da9c9a19850d3af6df1cb9574970b566d617ecfaf36eb0b706b6f3ef9bd2f8" +"checksum wasm-bindgen-macro 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)" = "0f6fde1d36e75a714b5fe0cffbb78978f222ea6baebb726af13c78869fdb4205" +"checksum wasm-bindgen-macro-support 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)" = "25bda4168030a6412ea8a047e27238cadf56f0e53516e1e83fec0a8b7c786f6d" +"checksum wasm-bindgen-shared 0.2.59 (registry+https://github.com/rust-lang/crates.io-index)" = "fc9f36ad51f25b0219a3d4d13b90eb44cd075dff8b6280cca015775d7acaddd8" +"checksum web-sys 0.3.36 (registry+https://github.com/rust-lang/crates.io-index)" = "721c6263e2c66fd44501cc5efbfa2b7dfa775d13e4ea38c46299646ed1f9c70a" "checksum web3 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "076f34ed252d74a8521e3b013254b1a39f94a98f23aae7cfc85cda6e7b395664" "checksum websocket 0.21.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c9faed2bff8af2ea6b9f8b917d3d00b467583f6781fe3def174a9e33c879703" "checksum widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "effc0e4ff8085673ea7b9b2e3c73f6bd4d118810c9009ed8f1e16bd96c331db6" diff --git a/core/models/Cargo.toml b/core/models/Cargo.toml index 3f6e6ef59c..8a9c46604f 100644 --- a/core/models/Cargo.toml +++ b/core/models/Cargo.toml @@ -24,6 +24,14 @@ futures = { version = "0.3", features = ["compat"] } ethsign = "0.7.3" tiny-keccak = "1.4.2" +[dev-dependencies] +criterion = "0.3.0" + +[[bench]] +name = "criterion" +harness = false +path = "benches/criterion/lib.rs" + [features] default=[] run_benches=[] diff --git a/core/models/benches/criterion/lib.rs b/core/models/benches/criterion/lib.rs new file mode 100644 index 0000000000..e9de2edec9 --- /dev/null +++ b/core/models/benches/criterion/lib.rs @@ -0,0 +1,8 @@ +use criterion::{criterion_group, criterion_main}; + +use crate::merkle_tree::bench_merkle_tree; + +mod merkle_tree; + +criterion_group!(benches, bench_merkle_tree); +criterion_main!(benches); diff --git a/core/models/benches/criterion/merkle_tree.rs b/core/models/benches/criterion/merkle_tree.rs new file mode 100644 index 0000000000..c6d42520e7 --- /dev/null +++ b/core/models/benches/criterion/merkle_tree.rs @@ -0,0 +1,49 @@ +use criterion::{black_box, BatchSize, Bencher, Criterion}; + +use models::circuit::account::CircuitAccount; +use models::franklin_crypto::bellman::pairing::bn256::{Bn256, Fr}; +use models::merkle_tree::{PedersenHasher, SparseMerkleTree}; + +const N_ACCOUNTS: u64 = 10; + +type RealSMT = SparseMerkleTree, Fr, PedersenHasher>; + +fn gen_account(id: u64) -> CircuitAccount { + let mut account = CircuitAccount::::default(); + + let id_hex = format!("{:064x}", id); + account.address = Fr::from_hex(id_hex.as_ref()).unwrap(); + + account +} + +fn bench_tree_create(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth() as u32; + + b.iter(|| { + RealSMT::new(black_box(depth)); + }); +} + +fn bench_tree_insert(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth() as u32; + + let setup = || (0..N_ACCOUNTS).map(gen_account).collect::>(); + + b.iter_batched( + setup, + |accounts| { + let mut tree = RealSMT::new(depth); + + for (id, account) in accounts.into_iter().enumerate() { + tree.insert(id as u32, account); + } + }, + BatchSize::SmallInput, + ); +} + +pub fn bench_merkle_tree(c: &mut Criterion) { + c.bench_function("Merkle tree create", bench_tree_create); + c.bench_function("Merkle tree insert", bench_tree_insert); +} diff --git a/core/models/src/circuit/account.rs b/core/models/src/circuit/account.rs index f1ae4f8efb..69a997a039 100644 --- a/core/models/src/circuit/account.rs +++ b/core/models/src/circuit/account.rs @@ -10,6 +10,7 @@ use crate::primitives::{GetBits, GetBitsFixed}; pub type CircuitAccountTree = SparseMerkleTree, Fr, PedersenHasher>; pub type CircuitBalanceTree = SparseMerkleTree, Fr, PedersenHasher>; + pub struct CircuitAccount { pub subtree: SparseMerkleTree, E::Fr, PedersenHasher>, pub nonce: E::Fr, From 56b16566ff1045b6d2818d28a28ab894bb7aa4ce Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Thu, 26 Mar 2020 08:00:14 +0200 Subject: [PATCH 117/186] Clean up zksync-crypto code --- js/zksync-crypto/Cargo.toml | 1 - js/zksync-crypto/src/lib.rs | 50 ++++++++++++++++++++++------------- js/zksync-crypto/tests/web.rs | 13 --------- 3 files changed, 31 insertions(+), 33 deletions(-) delete mode 100644 js/zksync-crypto/tests/web.rs diff --git a/js/zksync-crypto/Cargo.toml b/js/zksync-crypto/Cargo.toml index e48b8441ec..e306c53cb9 100644 --- a/js/zksync-crypto/Cargo.toml +++ b/js/zksync-crypto/Cargo.toml @@ -15,7 +15,6 @@ default = ["console_error_panic_hook"] [dependencies] wasm-bindgen = "0.2.59" crypto_exports = { path = "../../core/crypto_exports", version = "0.1.0" } -ff = { package = "ff_ce", version = "0.6.0"} hex = "0.3" sha2 = "0.8" diff --git a/js/zksync-crypto/src/lib.rs b/js/zksync-crypto/src/lib.rs index 38d2588a8d..b9276d6eca 100644 --- a/js/zksync-crypto/src/lib.rs +++ b/js/zksync-crypto/src/lib.rs @@ -1,5 +1,11 @@ +//! Utils for signing zksync transactions. +//! This crate is compiled into wasm to be used in `zksync.js`. + mod utils; +const PACKED_POINT_SIZE: usize = 32; +const PACKED_SIGNATURE_SIZE: usize = 64; + pub use crypto_exports::franklin_crypto::bellman::pairing::bn256::{Bn256 as Engine, Fr}; pub type Fs = ::Fs; thread_local! { @@ -45,7 +51,9 @@ pub fn private_key_from_seed(seed: &[u8]) -> Vec { hasher.result().to_vec() }; let mut fs_repr = FsRepr::default(); - fs_repr.read_be(&raw_priv_key[..]).unwrap(); + fs_repr + .read_be(&raw_priv_key[..]) + .expect("failed to read raw_priv_key"); if Fs::from_repr(fs_repr).is_ok() { return raw_priv_key; } else { @@ -54,15 +62,19 @@ pub fn private_key_from_seed(seed: &[u8]) -> Vec { } } +fn read_signing_key(private_key: &[u8]) -> PrivateKey { + let mut fs_repr = FsRepr::default(); + fs_repr + .read_be(private_key) + .expect("couldn't read private key repr"); + PrivateKey::(Fs::from_repr(fs_repr).expect("couldn't read private key from repr")) +} + #[wasm_bindgen] pub fn private_key_to_pubkey_hash(private_key: &[u8]) -> Vec { let p_g = FixedGenerators::SpendingKeyGenerator; - let sk = { - let mut fs_repr = FsRepr::default(); - fs_repr.read_be(private_key).unwrap(); - PrivateKey::(Fs::from_repr(fs_repr).unwrap()) - }; + let sk = read_signing_key(private_key); let pubkey = JUBJUB_PARAMS.with(|params| PublicKey::from_private(&sk, p_g, params)); pub_key_hash(&pubkey) @@ -71,16 +83,13 @@ pub fn private_key_to_pubkey_hash(private_key: &[u8]) -> Vec { #[wasm_bindgen] pub fn sign_musig_sha256(private_key: &[u8], msg: &[u8]) -> Vec { let p_g = FixedGenerators::SpendingKeyGenerator; - - let sk: PrivateKey = { - let mut fs_repr = FsRepr::default(); - fs_repr.read_be(private_key).unwrap(); - PrivateKey::(Fs::from_repr(fs_repr).unwrap()) - }; + let sk = read_signing_key(private_key); let pubkey = JUBJUB_PARAMS.with(|params| PublicKey::from_private(&sk, p_g, params)); - let mut packed_point = [0u8; 32]; - pubkey.write(packed_point.as_mut()).unwrap(); + let mut packed_point = [0u8; PACKED_POINT_SIZE]; + pubkey + .write(packed_point.as_mut()) + .expect("failed to write pubkey to packed_point"); let signable_msg = pedersen_hash_tx_msg(msg); @@ -88,13 +97,16 @@ pub fn sign_musig_sha256(private_key: &[u8], msg: &[u8]) -> Vec { let sign = JUBJUB_PARAMS.with(|params| sk.musig_sha256_sign(&signable_msg, &seed1, p_g, params)); - let mut packed_signature = [0u8; 64]; - let (r_bar, s_bar) = packed_signature.as_mut().split_at_mut(32); + let mut packed_signature = [0u8; PACKED_SIGNATURE_SIZE]; + let (r_bar, s_bar) = packed_signature.as_mut().split_at_mut(PACKED_POINT_SIZE); - sign.r.write(r_bar).unwrap(); - sign.s.into_repr().write_le(s_bar).unwrap(); + sign.r.write(r_bar).expect("failed to write signature"); + sign.s + .into_repr() + .write_le(s_bar) + .expect("failed to write signature repr"); - let mut result = Vec::with_capacity(32 + 64); + let mut result = Vec::with_capacity(PACKED_POINT_SIZE + PACKED_SIGNATURE_SIZE); result.extend_from_slice(&packed_point); result.extend_from_slice(&packed_signature[..]); result diff --git a/js/zksync-crypto/tests/web.rs b/js/zksync-crypto/tests/web.rs deleted file mode 100644 index de5c1dafef..0000000000 --- a/js/zksync-crypto/tests/web.rs +++ /dev/null @@ -1,13 +0,0 @@ -//! Test suite for the Web and headless browsers. - -#![cfg(target_arch = "wasm32")] - -extern crate wasm_bindgen_test; -use wasm_bindgen_test::*; - -wasm_bindgen_test_configure!(run_in_browser); - -#[wasm_bindgen_test] -fn pass() { - assert_eq!(1 + 1, 2); -} From 81fb08e6c4c52dd93206f4a8cdfda1929a23509c Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Wed, 25 Mar 2020 10:12:22 +0200 Subject: [PATCH 118/186] Increase default fee for deposit and emergency withdraw --- js/zksync.js/src/wallet.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/js/zksync.js/src/wallet.ts b/js/zksync.js/src/wallet.ts index d90b95aedd..929dc8d028 100644 --- a/js/zksync.js/src/wallet.ts +++ b/js/zksync.js/src/wallet.ts @@ -19,6 +19,11 @@ import { SYNC_MAIN_CONTRACT_INTERFACE } from "./utils"; +// Our MetaMask users sometimes use custom gas price values, +// which we can't know. We use this constant to assure that +// gasprice from our calculations isn't smaller than actually used one. +const metamaskIncreaseGasPriceFactor = 10; + class ZKSyncTxError extends Error { constructor( message: string, @@ -396,6 +401,7 @@ export class Wallet { deposit.token, gasPrice ); + maxFeeInETHToken = maxFeeInETHToken.mul(metamaskIncreaseGasPriceFactor); } const mainZkSyncContract = new Contract( this.provider.contractAddress.mainContract, @@ -485,6 +491,7 @@ export class Wallet { maxFeeInETHToken = await ethProxy.estimateEmergencyWithdrawFeeInETHToken( gasPrice ); + maxFeeInETHToken = maxFeeInETHToken.mul(metamaskIncreaseGasPriceFactor); } let accountId; From 2ad0a0bef1f6e156b13be86ab389622f2a228831 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 26 Mar 2020 09:18:20 +0300 Subject: [PATCH 119/186] Assign the nonce in the same db transaction as saving the tx --- core/models/src/ethereum.rs | 17 +++++++++++ core/storage/src/ethereum/mod.rs | 43 ++++++++++++++++----------- core/storage/src/tests/chain/block.rs | 4 --- core/storage/src/tests/ethereum.rs | 32 ++++++++++---------- 4 files changed, 60 insertions(+), 36 deletions(-) diff --git a/core/models/src/ethereum.rs b/core/models/src/ethereum.rs index 7d1e298857..38261b5c6e 100644 --- a/core/models/src/ethereum.rs +++ b/core/models/src/ethereum.rs @@ -90,6 +90,12 @@ impl ETHOperation { false } } + + /// Completes the object state with the data obtained from the database. + pub fn complete(&mut self, inserted_data: InsertedOperationResponse) { + self.id = inserted_data.id; + self.nonce = inserted_data.nonce; + } } impl PartialEq for ETHOperation { @@ -106,3 +112,14 @@ impl PartialEq for ETHOperation { && (self.final_hash == other.final_hash) } } + +/// Structure representing the result of the insertion of the Ethereum +/// operation into the database. +/// Contains the assigned nonce and ID for the operation. +pub struct InsertedOperationResponse { + /// Unique numeric identifier of the Ethereum operation. + pub id: i64, + /// Nonce assigned for the Ethereum operation. Meant to be used for all the + /// transactions sent within one particular Ethereum operation. + pub nonce: U256, +} diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index e56f133797..e935963ce7 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -8,7 +8,7 @@ use diesel::prelude::*; use web3::types::{H256, U256}; // Workspace imports use models::{ - ethereum::{ETHOperation, OperationType}, + ethereum::{ETHOperation, InsertedOperationResponse, OperationType}, Operation, }; // Local imports @@ -137,27 +137,30 @@ impl<'a> EthereumSchema<'a> { } /// Stores the sent (but not confirmed yet) Ethereum transaction in the database. - #[allow(clippy::too_many_arguments)] // OK for this particular method. + /// Returns the `ETHOperation` object containing the assigned nonce and operation ID. pub fn save_new_eth_tx( &self, op_type: OperationType, op_id: Option, hash: H256, - deadline_block: u64, - nonce: u32, - gas_price: BigDecimal, + last_deadline_block: i64, + last_used_gas_price: BigDecimal, raw_tx: Vec, - ) -> QueryResult { - let operation = NewETHOperation { - op_type: op_type.to_string(), - nonce: i64::from(nonce), - last_deadline_block: deadline_block as i64, - last_used_gas_price: gas_price, - raw_tx, - }; - + ) -> QueryResult { self.0.conn().transaction(|| { - // Insert the operation itself. + // It's important to assign nonce within the same db transaction + // as saving the operation to avoid the state divergence. + let nonce = self.get_next_nonce()?; + + // Create and insert the operation. + let operation = NewETHOperation { + op_type: op_type.to_string(), + nonce, + last_deadline_block, + last_used_gas_price, + raw_tx, + }; + let inserted_tx = insert_into(eth_operations::table) .values(&operation) .returning(eth_operations::id) @@ -196,7 +199,13 @@ impl<'a> EthereumSchema<'a> { // Update the stored stats. self.report_created_operation(op_type)?; - Ok(eth_op_id) + // Return the assigned ID and nonce. + let response = InsertedOperationResponse { + id: eth_op_id, + nonce: nonce.into(), + }; + + Ok(response) }) } @@ -339,7 +348,7 @@ impl<'a> EthereumSchema<'a> { /// This method expects the database to be initially prepared with inserting the actual /// nonce value. Currently the script `db-insert-eth-data.sh` is responsible for that /// and it's invoked within `db-reset` subcommand. - pub fn get_next_nonce(&self) -> QueryResult { + pub(crate) fn get_next_nonce(&self) -> QueryResult { let old_nonce: ETHNonce = eth_nonce::table.first(self.0.conn())?; let new_nonce_value = old_nonce.nonce + 1; diff --git a/core/storage/src/tests/chain/block.rs b/core/storage/src/tests/chain/block.rs index 02052091bc..f6e5cd98d4 100644 --- a/core/storage/src/tests/chain/block.rs +++ b/core/storage/src/tests/chain/block.rs @@ -241,7 +241,6 @@ fn find_block_by_height_or_hash() { Some(ethereum_op_id), eth_tx_hash, 100, - 100, 100.into(), Default::default(), )?; @@ -276,7 +275,6 @@ fn find_block_by_height_or_hash() { Some(ethereum_op_id), eth_tx_hash, 100, - 100, 100.into(), Default::default(), )?; @@ -373,7 +371,6 @@ fn block_range() { Some(ethereum_op_id), eth_tx_hash, 100, - 100, 100.into(), Default::default(), )?; @@ -395,7 +392,6 @@ fn block_range() { Some(ethereum_op_id), eth_tx_hash, 100, - 100, 100.into(), Default::default(), )?; diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index 7a492f5adf..5a2ffcf1db 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -38,26 +38,24 @@ pub struct EthereumTxParams { op: Operation, hash: H256, deadline_block: u64, - nonce: u32, gas_price: BigDecimal, raw_tx: Vec, } impl EthereumTxParams { - pub fn new(op_type: String, op: Operation, nonce: u32) -> Self { + pub fn new(op_type: String, op: Operation) -> Self { let op_id = op.id.unwrap() as u64; Self { op_type, op, hash: H256::from_low_u64_ne(op_id), deadline_block: 100, - nonce, gas_price: 1000.into(), raw_tx: Default::default(), } } - pub fn to_eth_op(&self, db_id: i64) -> ETHOperation { + pub fn to_eth_op(&self, db_id: i64, nonce: u64) -> ETHOperation { let op_type = OperationType::from_str(self.op_type.as_ref()) .expect("Stored operation type must have a valid value"); let last_used_gas_price = U256::from_str(&self.gas_price.to_string()).unwrap(); @@ -67,7 +65,7 @@ impl EthereumTxParams { id: db_id, op_type, op: Some(self.op.clone()), - nonce: self.nonce.into(), + nonce: nonce.into(), last_deadline_block: self.deadline_block, last_used_gas_price, used_tx_hashes, @@ -114,13 +112,12 @@ fn ethereum_storage() { let operation = BlockSchema(&conn).execute_operation(get_operation(block_number))?; // Store the Ethereum transaction. - let params = EthereumTxParams::new("commit".into(), operation.clone(), 1); - EthereumSchema(&conn).save_new_eth_tx( + let params = EthereumTxParams::new("commit".into(), operation.clone()); + let response = EthereumSchema(&conn).save_new_eth_tx( OperationType::Commit, Some(params.op.id.unwrap()), params.hash, - params.deadline_block, - params.nonce, + params.deadline_block as i64, params.gas_price.clone(), params.raw_tx.clone(), )?; @@ -131,20 +128,22 @@ fn ethereum_storage() { let op = eth_op.op.clone().expect("No Operation entry"); assert_eq!(op.id, operation.id); // Load the database ID, since we can't predict it for sure. - assert_eq!(eth_op, params.to_eth_op(eth_op.id)); + assert_eq!( + eth_op, + params.to_eth_op(eth_op.id, response.nonce.low_u64()) + ); // Store operation with ID 2. let block_number = 2; let operation_2 = BlockSchema(&conn).execute_operation(get_operation(block_number))?; // Create one more Ethereum transaction. - let params_2 = EthereumTxParams::new("commit".into(), operation_2.clone(), 2); - EthereumSchema(&conn).save_new_eth_tx( + let params_2 = EthereumTxParams::new("commit".into(), operation_2.clone()); + let response_2 = EthereumSchema(&conn).save_new_eth_tx( OperationType::Commit, Some(params_2.op.id.unwrap()), params_2.hash, - params_2.deadline_block, - params_2.nonce, + params_2.deadline_block as i64, params_2.gas_price.clone(), params_2.raw_tx.clone(), )?; @@ -155,7 +154,10 @@ fn ethereum_storage() { let eth_op = unconfirmed_operations[1].clone(); let op = eth_op.op.clone().expect("No Operation entry"); assert_eq!(op.id, operation_2.id); - assert_eq!(eth_op, params_2.to_eth_op(eth_op.id)); + assert_eq!( + eth_op, + params_2.to_eth_op(eth_op.id, response_2.nonce.low_u64()) + ); // Make the transaction as completed. EthereumSchema(&conn).confirm_eth_tx(¶ms_2.hash)?; From 90e263055904e9b82c2a6c4e7988dd5e62bf6581 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Thu, 26 Mar 2020 10:18:50 +0300 Subject: [PATCH 120/186] Adapt eth_sender to assign the nonce together with saving the operation --- core/models/src/ethereum.rs | 2 +- core/server/src/eth_sender/database.rs | 58 ++++++++-------- core/server/src/eth_sender/mod.rs | 72 +++++++++++--------- core/server/src/eth_sender/tests/mock.rs | 85 ++++++++++++++++-------- core/storage/src/ethereum/mod.rs | 49 +++++++------- core/storage/src/tests/chain/block.rs | 16 ++--- core/storage/src/tests/ethereum.rs | 4 +- 7 files changed, 168 insertions(+), 118 deletions(-) diff --git a/core/models/src/ethereum.rs b/core/models/src/ethereum.rs index 38261b5c6e..5f6947bc26 100644 --- a/core/models/src/ethereum.rs +++ b/core/models/src/ethereum.rs @@ -11,7 +11,7 @@ use web3::types::{H256, U256}; pub type EthOpId = i64; /// Type of the transactions sent to the Ethereum network. -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, Copy, PartialEq)] pub enum OperationType { /// Commit action (`commitBlock` method of the smart contract). Commit, diff --git a/core/server/src/eth_sender/database.rs b/core/server/src/eth_sender/database.rs index aafe4a6739..7c43470345 100644 --- a/core/server/src/eth_sender/database.rs +++ b/core/server/src/eth_sender/database.rs @@ -11,7 +11,7 @@ use bigdecimal::BigDecimal; use web3::types::{H256, U256}; // Workspace uses use models::{ - ethereum::{ETHOperation, EthOpId}, + ethereum::{ETHOperation, EthOpId, InsertedOperationResponse, OperationType}, Operation, }; use storage::ConnectionPool; @@ -26,13 +26,22 @@ pub(super) trait DatabaseAccess { fn restore_state(&self) -> Result<(VecDeque, Vec), failure::Error>; /// Saves a new unconfirmed operation to the database. - fn save_new_eth_tx(&self, op: ÐOperation) -> Result; + fn save_new_eth_tx( + &self, + op_type: OperationType, + op_id: Option, + deadline_block: i64, + used_gas_price: U256, + raw_tx: Vec, + ) -> Result; + + /// Adds a tx hash entry associated with some Ethereum operation to the database. + fn add_hash_entry(&self, eth_op_id: i64, hash: &H256) -> Result<(), failure::Error>; /// Adds a new tx info to the previously started Ethereum operation. fn update_eth_tx( &self, eth_op_id: EthOpId, - hash: &H256, new_deadline_block: i64, new_gas_value: U256, ) -> Result<(), failure::Error>; @@ -40,9 +49,6 @@ pub(super) trait DatabaseAccess { /// Marks an operation as completed in the database. fn confirm_operation(&self, hash: &H256) -> Result<(), failure::Error>; - /// Gets the next nonce to use from the database. - fn next_nonce(&self) -> Result; - /// Loads the stored Ethereum operations stats. fn load_stats(&self) -> Result; } @@ -72,37 +78,40 @@ impl DatabaseAccess for Database { Ok((unconfirmed_ops, unprocessed_ops)) } - fn save_new_eth_tx(&self, op: ÐOperation) -> Result { + fn save_new_eth_tx( + &self, + op_type: OperationType, + op: Option, + deadline_block: i64, + used_gas_price: U256, + raw_tx: Vec, + ) -> Result { let storage = self.db_pool.access_storage()?; - assert_eq!( - op.used_tx_hashes.len(), - 1, - "For the new operation there should be exactly one tx hash" - ); - let tx_hash = op.used_tx_hashes[0]; Ok(storage.ethereum_schema().save_new_eth_tx( - op.op_type.clone(), - op.op.clone().map(|op| op.id.unwrap()), - tx_hash, - op.last_deadline_block, - op.nonce.as_u32(), - BigDecimal::from_str(&op.last_used_gas_price.to_string()).unwrap(), - op.encoded_tx_data.clone(), + op_type, + op.map(|op| op.id.unwrap()), + deadline_block, + BigDecimal::from_str(&used_gas_price.to_string()).unwrap(), + raw_tx, )?) } + fn add_hash_entry(&self, eth_op_id: i64, hash: &H256) -> Result<(), failure::Error> { + let storage = self.db_pool.access_storage()?; + + Ok(storage.ethereum_schema().add_hash_entry(eth_op_id, hash)?) + } + fn update_eth_tx( &self, eth_op_id: EthOpId, - hash: &H256, new_deadline_block: i64, new_gas_value: U256, ) -> Result<(), failure::Error> { let storage = self.db_pool.access_storage()?; Ok(storage.ethereum_schema().update_eth_tx( eth_op_id, - hash, new_deadline_block, BigDecimal::from_str(&new_gas_value.to_string()).unwrap(), )?) @@ -113,11 +122,6 @@ impl DatabaseAccess for Database { Ok(storage.ethereum_schema().confirm_eth_tx(hash)?) } - fn next_nonce(&self) -> Result { - let storage = self.db_pool.access_storage()?; - Ok(storage.ethereum_schema().get_next_nonce()?) - } - fn load_stats(&self) -> Result { let storage = self.db_pool.access_storage()?; let stats = storage.ethereum_schema().load_stats()?; diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 043d79c306..57553b6bff 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -257,20 +257,46 @@ impl ETHSender { fn initialize_operation(&mut self, tx: TxData) -> Result<(), failure::Error> { let current_block = self.ethereum.block_number()?; let deadline_block = self.get_deadline_block(current_block); + let gas_price = self.ethereum.gas_price()?; + + // First, we should store the operation in the database and obtain the assigned + // operation ID and nonce. Without them we won't be able to sign the transaction. + let assigned_data = self.db.save_new_eth_tx( + tx.op_type, + tx.operation.clone(), + deadline_block as i64, + gas_price, + tx.raw.clone(), + )?; + + let mut new_op = ETHOperation { + id: assigned_data.id, + op_type: tx.op_type, + op: tx.operation, + nonce: assigned_data.nonce, + last_deadline_block: deadline_block, + last_used_gas_price: gas_price, + used_tx_hashes: vec![], // No hash yet, will be added below. + encoded_tx_data: tx.raw, + confirmed: false, + final_hash: None, + }; - let (mut new_tx, signed_tx) = - self.sign_new_tx(tx.op_type, tx.operation, tx.raw, deadline_block)?; + // Sign the transaction. + let signed_tx = self.sign_new_tx(&new_op)?; - let op_id = self.db.save_new_eth_tx(&new_tx)?; - new_tx.id = op_id; + // With signed tx, update the hash in the operation entry and in the db. + new_op.used_tx_hashes.push(signed_tx.hash); + self.db.add_hash_entry(new_op.id, &signed_tx.hash)?; + // After storing all the tx data in the database, we can finally send the tx. info!( "Sending new tx: [ETH Operation . Tx hash: <{:#x}>. ZKSync operation: {}]", - new_tx.id, new_tx.op_type, new_tx.used_tx_hashes[0], self.zksync_operation_description(&new_tx), + new_op.id, new_op.op_type, signed_tx.hash, self.zksync_operation_description(&new_op), ); self.ethereum.send_tx(&signed_tx)?; - self.ongoing_ops.push_back(new_tx); + self.ongoing_ops.push_back(new_op); Ok(()) } @@ -356,7 +382,8 @@ impl ETHSender { let new_tx = self.create_supplement_tx(deadline_block, op)?; // New transaction should be persisted in the DB *before* sending it. self.db - .update_eth_tx(op.id, &new_tx.hash, deadline_block as i64, new_tx.gas_price)?; + .update_eth_tx(op.id, deadline_block as i64, new_tx.gas_price)?; + self.db.add_hash_entry(op.id, &new_tx.hash)?; info!( "Stuck tx processing: sending tx for op, eth_op_id: {} tx_hash: {:#x}, nonce: {}", @@ -428,34 +455,19 @@ impl ETHSender { } /// Creates a new Ethereum operation. - fn sign_new_tx( - &self, - op_type: OperationType, - op: Option, - raw_tx: Vec, - deadline_block: u64, - ) -> Result<(ETHOperation, SignedCallResult), failure::Error> { + fn sign_new_tx(&self, op: ÐOperation) -> Result { let tx_options = { let mut options = Options::default(); - let nonce = self.db.next_nonce()?; - options.nonce = Some(nonce.into()); + options.nonce = Some(op.nonce); + options.gas_price = Some(op.last_used_gas_price); options }; - let signed_tx = self.ethereum.sign_prepared_tx(raw_tx.clone(), tx_options)?; - let state = ETHOperation { - id: 0, // Will be initialized later. - op_type, - op, - nonce: signed_tx.nonce, - last_deadline_block: deadline_block, - last_used_gas_price: signed_tx.gas_price, - used_tx_hashes: vec![signed_tx.hash], - encoded_tx_data: raw_tx, - confirmed: false, - final_hash: None, - }; - Ok((state, signed_tx)) + let signed_tx = self + .ethereum + .sign_prepared_tx(op.encoded_tx_data.clone(), tx_options)?; + + Ok(signed_tx) } /// Creates a new transaction for the existing Ethereum operation. diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 7946b213fa..584ab6e361 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -10,7 +10,7 @@ use web3::types::{H256, U256}; // Workspace uses use eth_client::SignedCallResult; use models::{ - ethereum::{ETHOperation, EthOpId, OperationType}, + ethereum::{ETHOperation, EthOpId, InsertedOperationResponse, OperationType}, Action, Operation, }; // Local uses @@ -70,6 +70,14 @@ impl MockDatabase { assert!(self.unconfirmed_operations.borrow().get(&tx.id).is_none()); } + + fn next_nonce(&self) -> Result { + let old_value = self.nonce.get(); + let new_value = old_value + 1; + self.nonce.set(new_value); + + Ok(old_value) + } } impl DatabaseAccess for MockDatabase { @@ -77,24 +85,64 @@ impl DatabaseAccess for MockDatabase { Ok((self.restore_state.clone(), Vec::new())) } - fn save_new_eth_tx(&self, op: ÐOperation) -> Result { + fn save_new_eth_tx( + &self, + op_type: OperationType, + op: Option, + deadline_block: i64, + used_gas_price: U256, + encoded_tx_data: Vec, + ) -> Result { let id = self.pending_op_id.get(); let new_id = id + 1; self.pending_op_id.set(new_id); + let nonce = self.next_nonce()?; + // Store with the assigned ID. - let mut op = op.clone(); - op.id = id; + let state = ETHOperation { + id, + op_type, + op, + nonce: nonce.into(), + last_deadline_block: deadline_block as u64, + last_used_gas_price: used_gas_price, + used_tx_hashes: vec![], + encoded_tx_data, + confirmed: false, + final_hash: None, + }; + + self.unconfirmed_operations.borrow_mut().insert(id, state); - self.unconfirmed_operations.borrow_mut().insert(id, op); + let response = InsertedOperationResponse { + id, + nonce: nonce.into(), + }; - Ok(id) + Ok(response) + } + + /// Adds a tx hash entry associated with some Ethereum operation to the database. + fn add_hash_entry(&self, eth_op_id: i64, hash: &H256) -> Result<(), failure::Error> { + assert!( + self.unconfirmed_operations + .borrow() + .contains_key(ð_op_id), + "Attempt to update tx that is not unconfirmed" + ); + + let mut ops = self.unconfirmed_operations.borrow_mut(); + let mut op = ops[ð_op_id].clone(); + op.used_tx_hashes.push(*hash); + ops.insert(eth_op_id, op); + + Ok(()) } fn update_eth_tx( &self, eth_op_id: EthOpId, - hash: &H256, new_deadline_block: i64, new_gas_value: U256, ) -> Result<(), failure::Error> { @@ -105,20 +153,11 @@ impl DatabaseAccess for MockDatabase { "Attempt to update tx that is not unconfirmed" ); - let mut op = self - .unconfirmed_operations - .borrow() - .get(ð_op_id) - .unwrap() - .clone(); - + let mut ops = self.unconfirmed_operations.borrow_mut(); + let mut op = ops[ð_op_id].clone(); op.last_deadline_block = new_deadline_block as u64; op.last_used_gas_price = new_gas_value; - op.used_tx_hashes.push(*hash); - - self.unconfirmed_operations - .borrow_mut() - .insert(eth_op_id, op); + ops.insert(eth_op_id, op); Ok(()) } @@ -149,14 +188,6 @@ impl DatabaseAccess for MockDatabase { Ok(()) } - fn next_nonce(&self) -> Result { - let old_value = self.nonce.get(); - let new_value = old_value + 1; - self.nonce.set(new_value); - - Ok(old_value) - } - fn load_stats(&self) -> Result { Ok(self.stats.borrow().clone()) } diff --git a/core/storage/src/ethereum/mod.rs b/core/storage/src/ethereum/mod.rs index e935963ce7..f235475af2 100644 --- a/core/storage/src/ethereum/mod.rs +++ b/core/storage/src/ethereum/mod.rs @@ -142,7 +142,6 @@ impl<'a> EthereumSchema<'a> { &self, op_type: OperationType, op_id: Option, - hash: H256, last_deadline_block: i64, last_used_gas_price: BigDecimal, raw_tx: Vec, @@ -174,18 +173,18 @@ impl<'a> EthereumSchema<'a> { // Obtain the operation ID for the follow-up queried. let eth_op_id = inserted_tx[0]; - // Add a hash entry. - let hash_entry = NewETHTxHash { - eth_op_id, - tx_hash: hash.as_bytes().to_vec(), - }; - let inserted_hashes_rows = insert_into(eth_tx_hashes::table) - .values(&hash_entry) - .execute(self.0.conn())?; - assert_eq!( - inserted_hashes_rows, 1, - "Wrong amount of updated rows (eth_tx_hashes)" - ); + // // Add a hash entry. + // let hash_entry = NewETHTxHash { + // eth_op_id, + // tx_hash: hash.as_bytes().to_vec(), + // }; + // let inserted_hashes_rows = insert_into(eth_tx_hashes::table) + // .values(&hash_entry) + // .execute(self.0.conn())?; + // assert_eq!( + // inserted_hashes_rows, 1, + // "Wrong amount of updated rows (eth_tx_hashes)" + // ); // If the operation ID was provided, we should also insert a binding entry. if let Some(op_id) = op_id { @@ -218,16 +217,8 @@ impl<'a> EthereumSchema<'a> { Ok(hash_entry.eth_op_id) } - /// Updates the Ethereum operation by adding a new tx data. - /// The new deadline block / gas value are placed instead of old values to the main entry, - /// and for hash a new `eth_tx_hashes` entry is added. - pub fn update_eth_tx( - &self, - eth_op_id: i64, - hash: &H256, - new_deadline_block: i64, - new_gas_value: BigDecimal, - ) -> QueryResult<()> { + /// Adds a tx hash entry associated with some Ethereum operation to the database. + pub fn add_hash_entry(&self, eth_op_id: i64, hash: &H256) -> QueryResult<()> { self.0.conn().transaction(|| { // Insert the new hash entry. let hash_entry = NewETHTxHash { @@ -241,7 +232,19 @@ impl<'a> EthereumSchema<'a> { inserted_hashes_rows, 1, "Wrong amount of updated rows (eth_tx_hashes)" ); + Ok(()) + }) + } + /// Updates the Ethereum operation by adding a new tx data. + /// The new deadline block / gas value are placed instead of old values to the main entry. + pub fn update_eth_tx( + &self, + eth_op_id: i64, + new_deadline_block: i64, + new_gas_value: BigDecimal, + ) -> QueryResult<()> { + self.0.conn().transaction(|| { // Update the stored tx. update(eth_operations::table.filter(eth_operations::id.eq(eth_op_id))) .set(( diff --git a/core/storage/src/tests/chain/block.rs b/core/storage/src/tests/chain/block.rs index f6e5cd98d4..e0cd85eb63 100644 --- a/core/storage/src/tests/chain/block.rs +++ b/core/storage/src/tests/chain/block.rs @@ -236,14 +236,14 @@ fn find_block_by_height_or_hash() { // commit/verify hashes. let ethereum_op_id = operation.id.unwrap() as i64; let eth_tx_hash = ethereum_tx_hash(ethereum_op_id); - EthereumSchema(&conn).save_new_eth_tx( + let response = EthereumSchema(&conn).save_new_eth_tx( OperationType::Commit, Some(ethereum_op_id), - eth_tx_hash, 100, 100.into(), Default::default(), )?; + EthereumSchema(&conn).add_hash_entry(response.id, ð_tx_hash)?; EthereumSchema(&conn).confirm_eth_tx(ð_tx_hash)?; // Initialize reference sample fields. @@ -270,14 +270,14 @@ fn find_block_by_height_or_hash() { // Do not add an ethereum confirmation for the last operation. if block_number != n_verified { - EthereumSchema(&conn).save_new_eth_tx( + let response = EthereumSchema(&conn).save_new_eth_tx( OperationType::Verify, Some(ethereum_op_id), - eth_tx_hash, 100, 100.into(), Default::default(), )?; + EthereumSchema(&conn).add_hash_entry(response.id, ð_tx_hash)?; EthereumSchema(&conn).confirm_eth_tx(ð_tx_hash)?; current_block_detail.verify_tx_hash = Some(format!("0x{}", hex::encode(eth_tx_hash))); @@ -366,14 +366,14 @@ fn block_range() { // commit/verify hashes. let ethereum_op_id = operation.id.unwrap() as i64; let eth_tx_hash = ethereum_tx_hash(ethereum_op_id); - EthereumSchema(&conn).save_new_eth_tx( + let response = EthereumSchema(&conn).save_new_eth_tx( OperationType::Commit, Some(ethereum_op_id), - eth_tx_hash, 100, 100.into(), Default::default(), )?; + EthereumSchema(&conn).add_hash_entry(response.id, ð_tx_hash)?; // Add verification for the block if required. if block_number <= n_verified { @@ -387,14 +387,14 @@ fn block_range() { ))?; let ethereum_op_id = operation.id.unwrap() as i64; let eth_tx_hash = ethereum_tx_hash(ethereum_op_id); - EthereumSchema(&conn).save_new_eth_tx( + let response = EthereumSchema(&conn).save_new_eth_tx( OperationType::Verify, Some(ethereum_op_id), - eth_tx_hash, 100, 100.into(), Default::default(), )?; + EthereumSchema(&conn).add_hash_entry(response.id, ð_tx_hash)?; EthereumSchema(&conn).confirm_eth_tx(ð_tx_hash)?; } } diff --git a/core/storage/src/tests/ethereum.rs b/core/storage/src/tests/ethereum.rs index 5a2ffcf1db..4142adce0d 100644 --- a/core/storage/src/tests/ethereum.rs +++ b/core/storage/src/tests/ethereum.rs @@ -116,11 +116,11 @@ fn ethereum_storage() { let response = EthereumSchema(&conn).save_new_eth_tx( OperationType::Commit, Some(params.op.id.unwrap()), - params.hash, params.deadline_block as i64, params.gas_price.clone(), params.raw_tx.clone(), )?; + EthereumSchema(&conn).add_hash_entry(response.id, ¶ms.hash)?; // Check that it can be loaded. let unconfirmed_operations = EthereumSchema(&conn).load_unconfirmed_operations()?; @@ -142,11 +142,11 @@ fn ethereum_storage() { let response_2 = EthereumSchema(&conn).save_new_eth_tx( OperationType::Commit, Some(params_2.op.id.unwrap()), - params_2.hash, params_2.deadline_block as i64, params_2.gas_price.clone(), params_2.raw_tx.clone(), )?; + EthereumSchema(&conn).add_hash_entry(response_2.id, ¶ms_2.hash)?; // Check that we now can load two operations. let unconfirmed_operations = EthereumSchema(&conn).load_unconfirmed_operations()?; From fe080bb971922bf4554ecdde4689daec2f3a351e Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Thu, 26 Mar 2020 12:55:25 +0200 Subject: [PATCH 121/186] Moved functional of finishing upgrade to franklin contract --- bin/prepare-test-contracts.sh | 11 +- contracts/contracts/Bytes.sol | 83 ++++---- contracts/contracts/Config.sol | 6 + contracts/contracts/Events.sol | 12 +- contracts/contracts/Franklin.sol | 40 +++- contracts/contracts/Proxy.sol | 24 +++ contracts/contracts/Storage.sol | 8 + contracts/contracts/UpgradeGatekeeper.sol | 201 ++++++++---------- contracts/contracts/test/DummyTarget.sol | 34 ++- contracts/scripts/test-upgrade-franklin.ts | 14 +- contracts/src.ts/deploy.ts | 7 +- .../test/unit_tests/upgradeGatekeeper_test.ts | 42 ++-- 12 files changed, 255 insertions(+), 227 deletions(-) diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index 22398570b9..3c4c484aba 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -19,7 +19,6 @@ cp $IN_DIR/Bytes.sol $OUT_DIR/Bytes.sol cp $IN_DIR/Events.sol $OUT_DIR/Events.sol cp $IN_DIR/Operations.sol $OUT_DIR/Operations.sol cp $IN_DIR/VerificationKey.sol $OUT_DIR/VerificationKey.sol -cp $IN_DIR/Governance.sol $OUT_DIR/GovernanceTestNoInit.sol cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTestNoInit.sol # Change dependencies @@ -32,8 +31,7 @@ ssed 's/Storage/StorageTest/' -i $OUT_DIR/*.sol ssed 's/Config/ConfigTest/' -i $OUT_DIR/*.sol ssed 's/UpgradeGatekeeper/UpgradeGatekeeperTest/' -i $OUT_DIR/*.sol -# Renaming no init contracts -ssed 's/contract GovernanceTest/contract GovernanceTestNoInit/' -i $OUT_DIR/GovernanceTestNoInit.sol +# Renaming of FranklinTestNoInit contract ssed 's/contract FranklinTest/contract FranklinTestNoInit/' -i $OUT_DIR/FranklinTestNoInit.sol @@ -52,14 +50,13 @@ set_constant MAX_AMOUNT_OF_REGISTERED_TOKENS 4 $OUT_DIR/ConfigTest.sol set_constant EXPECT_VERIFICATION_IN 8 $OUT_DIR/ConfigTest.sol set_constant MAX_UNVERIFIED_BLOCKS 4 $OUT_DIR/ConfigTest.sol set_constant PRIORITY_EXPIRATION 16 $OUT_DIR/ConfigTest.sol -set_constant NOTICE_PERIOD 4 $OUT_DIR/UpgradeGatekeeperTest.sol +set_constant UPGRADE_NOTICE_PERIOD 4 $OUT_DIR/ConfigTest.sol create_constant_getter MAX_AMOUNT_OF_REGISTERED_TOKENS $OUT_DIR/ConfigTest.sol -create_constant_getter NOTICE_PERIOD $OUT_DIR/UpgradeGatekeeperTest.sol +create_constant_getter UPGRADE_NOTICE_PERIOD $OUT_DIR/UpgradeGatekeeperTest.sol # Verify always true set_constant DUMMY_VERIFIER true $OUT_DIR/VerifierTest.sol -# Make initialize function in no init contracts to do nothing -ssed -E "s/ function initialize(.*)/ function initialize\1\n return;/" -i $OUT_DIR/GovernanceTestNoInit.sol +# Make initialize function in FranklinTestNoInit contract to do nothing ssed -E "s/ function initialize(.*)/ function initialize\1\n return;/" -i $OUT_DIR/FranklinTestNoInit.sol diff --git a/contracts/contracts/Bytes.sol b/contracts/contracts/Bytes.sol index 717b09a921..0ada58e650 100644 --- a/contracts/contracts/Bytes.sol +++ b/contracts/contracts/Bytes.sol @@ -94,55 +94,50 @@ library Bytes { { require(_bytes.length >= (_start + _length), "bse11"); // bytes length is less then start byte + length bytes + if (_length == 0) { + return new bytes(0); + } + bytes memory tempBytes; assembly { - switch iszero(_length) - case 0 { - // Get a location of some free memory and store it in tempBytes as - // Solidity does for memory variables. - tempBytes := mload(0x40) - - // The first word of the slice result is potentially a partial - // word read from the original array. To read it, we calculate - // the length of that partial word and start copying that many - // bytes into the array. The first word we copy will start with - // data we don't care about, but the last `lengthmod` bytes will - // land at the beginning of the contents of the new array. When - // we're done copying, we overwrite the full first word with - // the actual length of the slice. - let lengthmod := and(_length, 31) - - // The multiplication in the next line is necessary - // because when slicing multiples of 32 bytes (lengthmod == 0) - // the following copy loop was copying the origin's length - // and then ending prematurely not copying everything it should. - let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod))) - let end := add(mc, _length) - - for { - // The multiplication in the next line has the same exact purpose - // as the one above. - let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start) - } lt(mc, end) { - mc := add(mc, 0x20) - cc := add(cc, 0x20) - } { - mstore(mc, mload(cc)) - } - - mstore(tempBytes, _length) - - //update free-memory pointer - //allocating the array padded to 32 bytes like the compiler does now - mstore(0x40, and(add(mc, 31), not(31))) - } - //if we want a zero-length slice let's just return a zero-length array - default { - tempBytes := mload(0x40) + // Get a location of some free memory and store it in tempBytes as + // Solidity does for memory variables. + tempBytes := mload(0x40) - mstore(0x40, add(tempBytes, 0x20)) + // The first word of the slice result is potentially a partial + // word read from the original array. To read it, we calculate + // the length of that partial word and start copying that many + // bytes into the array. The first word we copy will start with + // data we don't care about, but the last `lengthmod` bytes will + // land at the beginning of the contents of the new array. When + // we're done copying, we overwrite the full first word with + // the actual length of the slice. + let lengthmod := and(_length, 31) + + // The multiplication in the next line is necessary + // because when slicing multiples of 32 bytes (lengthmod == 0) + // the following copy loop was copying the origin's length + // and then ending prematurely not copying everything it should. + let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod))) + let end := add(mc, _length) + + for { + // The multiplication in the next line has the same exact purpose + // as the one above. + let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start) + } lt(mc, end) { + mc := add(mc, 0x20) + cc := add(cc, 0x20) + } { + mstore(mc, mload(cc)) } + + mstore(tempBytes, _length) + + //update free-memory pointer + //allocating the array padded to 32 bytes like the compiler does now + mstore(0x40, and(add(mc, 31), not(31))) } return tempBytes; diff --git a/contracts/contracts/Config.sol b/contracts/contracts/Config.sol index 48c1ff425c..f3ff3b4520 100644 --- a/contracts/contracts/Config.sol +++ b/contracts/contracts/Config.sol @@ -5,6 +5,12 @@ pragma solidity 0.5.16; /// @author Matter Labs contract Config { + /// @notice Notice period before activation preparation status of upgrade mode (in seconds) + uint constant UPGRADE_NOTICE_PERIOD = 2 weeks; + + /// @notice Period after the start of preparation when contract wouldn't register new priority operations (in seconds) + uint constant UPGRADE_PREPARATION_LOCK_PERIOD = 1 days; + /// @notice zkSync address length uint8 constant ADDRESS_BYTES = 20; diff --git a/contracts/contracts/Events.sol b/contracts/contracts/Events.sol index d813aef9ce..0303a69047 100644 --- a/contracts/contracts/Events.sol +++ b/contracts/contracts/Events.sol @@ -57,27 +57,25 @@ contract Events { /// @author Matter Labs contract UpgradeEvents { - /// @notice Event emitted when new proxy is added to upgrade gatekeeper + /// @notice Event emitted when new proxy is added to upgrade gatekeeper's list of managed contracts event ProxyAdded( address proxyAddress ); - /// @notice Event emitted when list of proxies managed by the upgrade gatekeeper is cleared - event ProxyListCleared(); - /// @notice Upgrade mode enter event - event UpgradeModeActivated(); + event NoticePeriodStarted( + address[] newTargets + ); /// @notice Upgrade mode cancel event event UpgradeCanceled(); /// @notice Upgrade mode preparation status event - event UpgradeModePreparationStatusActivated(); + event PreparationStarted(); /// @notice Upgrade mode complete event event UpgradeCompleted( address proxyAddress, - uint64 version, address newTargetAddress ); diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index ecd3b4875e..45a31674b8 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -14,6 +14,36 @@ import "./Operations.sol"; /// @author Matter Labs contract Franklin is Storage, Config, Events { + // Upgrade functional + + function upgradeNoticePeriod() external pure returns (uint) { + return UPGRADE_NOTICE_PERIOD; + } + + /// @notice Notification that upgrade preparation status is activated + function upgradePreparationStarted() external { + upgradePreparation = true; + upgradePreparationActivationTime = now; + } + + /// @notice Notification that upgrade canceled + function upgradeCanceled() external { + upgradePreparation = false; + upgradePreparationActivationTime = 0; + } + + /// @notice Notification that upgrade finishes + function upgradeFinishes() external { + upgradePreparation = false; + upgradePreparationActivationTime = 0; + } + + /// @notice Checks that contract is ready for upgrade + /// @return bool flag indicating that contract is ready for upgrade + function readyForUpgrade() external view returns (bool) { + return totalOpenPriorityRequests == 0; + } + // // Migration // // Address of the new version of the contract to migrate accounts to @@ -51,14 +81,6 @@ contract Franklin is Storage, Config, Events { blocks[0].stateRoot = _genesisRoot; } - function totalRegisteredPriorityOperations() public view returns (uint64) { - return firstPriorityRequestId + totalOpenPriorityRequests; - } - - function totalVerifiedPriorityOperations() public view returns (uint64) { - return firstPriorityRequestId; - } - /// @notice executes pending withdrawals /// @param _n The number of withdrawals to complete starting from oldest function completeWithdrawals(uint32 _n) external { @@ -696,6 +718,8 @@ contract Franklin is Storage, Config, Events { uint256 _fee, bytes memory _pubData ) internal { + require(!upgradePreparation || now >= upgradePreparationActivationTime + UPGRADE_PREPARATION_LOCK_PERIOD, "apr11"); // apr11 - priority request can't be added during lock period of preparation status of upgrade + // Expiration block is: current block number + priority expiration delta uint256 expirationBlock = block.number + PRIORITY_EXPIRATION; diff --git a/contracts/contracts/Proxy.sol b/contracts/contracts/Proxy.sol index 198b358ee8..7660c6dbad 100644 --- a/contracts/contracts/Proxy.sol +++ b/contracts/contracts/Proxy.sol @@ -58,6 +58,30 @@ contract Proxy is Ownable { require(initializationSuccess, "ufu11"); // ufu11 - target initialization failed } + /// @notice Notifies proxy contract that notice period started + function upgradeNoticePeriodStarted() external { + requireMaster(msg.sender); + getTarget().delegatecall(abi.encodeWithSignature("upgradeNoticePeriodStarted()")); + } + + /// @notice Notifies proxy contract that upgrade preparation status is activated + function upgradePreparationStarted() external { + requireMaster(msg.sender); + getTarget().delegatecall(abi.encodeWithSignature("upgradePreparationStarted()")); + } + + /// @notice Notifies proxy contract that upgrade canceled + function upgradeCanceled() external { + requireMaster(msg.sender); + getTarget().delegatecall(abi.encodeWithSignature("upgradeCanceled()")); + } + + /// @notice Notifies proxy contract that upgrade finishes + function upgradeFinishes() external { + requireMaster(msg.sender); + getTarget().delegatecall(abi.encodeWithSignature("upgradeFinishes()")); + } + /// @notice Performs a delegatecall to the contract implementation /// @dev Fallback function allowing to perform a delegatecall to the given implementation /// This function will return whatever the implementation call returns diff --git a/contracts/contracts/Storage.sol b/contracts/contracts/Storage.sol index c17e18e6f8..7ff5b52532 100644 --- a/contracts/contracts/Storage.sol +++ b/contracts/contracts/Storage.sol @@ -11,6 +11,14 @@ import "./Operations.sol"; /// @author Matter Labs contract Storage { + /// @notice Flag indicates that upgrade preparation status is active + /// @dev Will store false in case of not active upgrade mode + bool public upgradePreparation; + + /// @notice upgrade preparation activation timestamp (as seconds since unix epoch) + /// @dev Will be equal to zero in case of not active upgrade mode + uint public upgradePreparationActivationTime; + /// @notice Verifier contract. Used to verify block proof and exit proof Verifier internal verifier; diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 9ca93ad0dc..263c844b25 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -5,24 +5,46 @@ import "./Ownable.sol"; import "./Bytes.sol"; -/// @title Upgrade Gatekeeper Contract -/// @author Matter Labs -contract UpgradeGatekeeper is UpgradeEvents, Ownable { +/// @title Interface of the main contract +interface MainContract { - /// @notice Notice period before activation preparation status of upgrade mode (in seconds) - uint constant NOTICE_PERIOD = 2 weeks; + /// @notice Notice period before activation preparation status of upgrade mode + function upgradeNoticePeriod() external pure returns (uint); - /// @notice Versions of proxy contracts - mapping(address => uint64) public version; + /// @notice Notifies proxy contract that notice period started + function upgradeNoticePeriodStarted() external; - /// @notice Contract which processes priority operations - address public mainContractAddress; + /// @notice Notifies proxy contract that upgrade preparation status is activated + function upgradePreparationStarted() external; - /// @notice Number of proxy contracts managed by the gatekeeper - uint64 public numberOfProxies; + /// @notice Notifies proxy contract that upgrade canceled + function upgradeCanceled() external; - /// @notice Addresses of proxy contracts managed by the gatekeeper - mapping(uint64 => address) public proxyAddress; + /// @notice Notifies proxy contract that upgrade finishes + function upgradeFinishes() external; + + /// @notice Checks that contract is ready for upgrade + /// @return bool flag indicating that contract is ready for upgrade + function readyForUpgrade() external view returns (bool); + +} + +/// @title Interface of the proxy contract +interface UpgradeableProxy { + + /// @notice Upgrades target of upgradeable contract + /// @param newTarget New target + /// @param newTargetInitializationParameters New target initialization parameters + function upgradeTarget(address newTarget, bytes calldata newTargetInitializationParameters) external; + +} + +/// @title Upgrade Gatekeeper Contract +/// @author Matter Labs +contract UpgradeGatekeeper is UpgradeEvents, Ownable { + + /// @notice Array of addresses of proxy contracts managed by the gatekeeper + address[] public proxies; /// @notice Upgrade mode statuses enum UpgradeStatus { @@ -33,107 +55,73 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { UpgradeStatus upgradeStatus; - /// @notice Notice period activation timestamp (in seconds) - /// @dev Will be equal to zero in case of not active mode - uint activationTime; + /// @notice Notice period activation timestamp (as seconds since unix epoch) + /// @dev Will be equal to zero in case of not active upgrade mode + uint noticePeriodActivationTime; - /// @notice Address of the next version of the contract to be upgraded per each proxy - /// @dev Will store zero in case of not active upgrade mode - mapping(address => address) nextTarget; + /// @notice Proxy which allows finish upgrade during preparation status of upgrade + MainContract mainContract; - /// @notice Number of priority operations that must be verified by main contract at the time of finishing upgrade - /// @dev Will store zero in case of not active upgrade mode or not active preparation status of upgrade mode - uint64 priorityOperationsToProcessBeforeUpgrade; + /// @notice Addresses of the next versions of the contracts to be upgraded (if element of this array is equal to zero address it means that this proxy will not be upgraded) + /// @dev Will be empty in case of not active upgrade mode + address[] nextTargets; /// @notice Contract constructor /// @param _mainContractAddress Address of contract which processes priority operations - /// @dev Calls Ownable contract constructor + /// @dev Calls Ownable contract constructor and adds _mainContractAddress to the list of contracts managed by the gatekeeper constructor(address _mainContractAddress) Ownable(msg.sender) public { - mainContractAddress = _mainContractAddress; - } - - /// @notice Clears list of proxies managed by the gatekeeper (for case of mistake when adding new proxies to the gatekeeper) - function clearProxyList() external { - requireMaster(msg.sender); - - upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Idle; - activationTime = 0; - for (uint64 i = 0; i < numberOfProxies; i++) { - address proxy = proxyAddress[i]; - nextTarget[proxy] = address(0); - } - priorityOperationsToProcessBeforeUpgrade = 0; - - numberOfProxies = 0; - emit ProxyListCleared(); + mainContract = MainContract(_mainContractAddress); } /// @notice Adds a new proxy to the list of contracts managed by the gatekeeper /// @param proxy Address of proxy to add function addProxyContract(address proxy) external { requireMaster(msg.sender); - require(upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Idle, "apc11"); /// apc11 - proxy can't be added during upgrade - - proxyAddress[numberOfProxies] = proxy; - numberOfProxies++; + require(upgradeStatus == UpgradeStatus.Idle, "apc11"); /// apc11 - proxy can't be added during upgrade + proxies.push(proxy); emit ProxyAdded(proxy); } /// @notice Starts upgrade (activates notice period) - /// @param newTargets New proxies targets - function startProxyUpgrade(address[] calldata newTargets) external { + /// @param newTargets New proxies targets (if element of this array is equal to zero address it means that this proxy will not be upgraded) + function startUpgrade(address[] calldata newTargets) external { requireMaster(msg.sender); - require(upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Idle, "spu11"); // spu11 - unable to activate active upgrade mode - require(newTargets.length == numberOfProxies, "spu12"); // spu12 - number of new targets must be equal to the number of proxies - - upgradeStatus = UpgradeGatekeeper.UpgradeStatus.NoticePeriod; - activationTime = now; - for (uint64 i = 0; i < numberOfProxies; i++) { - address proxy = proxyAddress[i]; - nextTarget[proxy] = newTargets[i]; - } - priorityOperationsToProcessBeforeUpgrade = 0; - - emit UpgradeModeActivated(); + require(upgradeStatus == UpgradeStatus.Idle, "spu11"); // spu11 - unable to activate active upgrade mode + require(newTargets.length == proxies.length, "spu12"); // spu12 - number of new targets must be equal to the number of proxies + + mainContract.upgradeNoticePeriodStarted(); + upgradeStatus = UpgradeStatus.NoticePeriod; + noticePeriodActivationTime = now; + nextTargets = newTargets; + emit NoticePeriodStarted(newTargets); } /// @notice Cancels upgrade - function cancelProxyUpgrade() external { + function cancelUpgrade() external { requireMaster(msg.sender); - require(upgradeStatus != UpgradeGatekeeper.UpgradeStatus.Idle, "cpu11"); // cpu11 - unable to cancel not active upgrade mode - - upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Idle; - activationTime = 0; - for (uint64 i = 0; i < numberOfProxies; i++) { - address proxy = proxyAddress[i]; - nextTarget[proxy] = address(0); - } - priorityOperationsToProcessBeforeUpgrade = 0; + require(upgradeStatus != UpgradeStatus.Idle, "cpu11"); // cpu11 - unable to cancel not active upgrade mode + mainContract.upgradeCanceled(); + upgradeStatus = UpgradeStatus.Idle; + noticePeriodActivationTime = 0; + delete nextTargets; emit UpgradeCanceled(); } /// @notice Checks that preparation status is active and activates it if needed /// @return Bool flag indicating that preparation status is active after this call function startPreparation() public returns (bool) { - require(upgradeStatus != UpgradeGatekeeper.UpgradeStatus.Idle, "ugp11"); // ugp11 - unable to activate preparation status in case of not active upgrade mode + require(upgradeStatus != UpgradeStatus.Idle, "ugp11"); // ugp11 - unable to activate preparation status in case of not active upgrade mode - if (upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Preparation) { + if (upgradeStatus == UpgradeStatus.Preparation) { return true; } - if (now >= activationTime + NOTICE_PERIOD) { - upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Preparation; - - (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( - abi.encodeWithSignature("totalRegisteredPriorityOperations()") - ); - require(mainContractCallSuccess, "ugp12"); // ugp12 - main contract static call failed - uint64 totalRegisteredPriorityOperations = abi.decode(encodedResult, (uint64)); - priorityOperationsToProcessBeforeUpgrade = totalRegisteredPriorityOperations; - - emit UpgradeModePreparationStatusActivated(); + if (now >= noticePeriodActivationTime + mainContract.upgradeNoticePeriod()) { + upgradeStatus = UpgradeStatus.Preparation; + mainContract.upgradePreparationStarted(); + emit PreparationStarted(); return true; } else { return false; @@ -143,49 +131,32 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @notice Finishes upgrade /// @param initParametersConcatenated New targets initialization parameters per each proxy (concatenated into one array) /// @param sizeOfInitParameters Sizes of targets initialization parameters (in bytes) - function finishProxyUpgrade(bytes calldata initParametersConcatenated, uint[] calldata sizeOfInitParameters) external { + function finishUpgrade(bytes calldata initParametersConcatenated, uint[] calldata sizeOfInitParameters) external { requireMaster(msg.sender); - require(upgradeStatus == UpgradeGatekeeper.UpgradeStatus.Preparation, "fpu11"); // fpu11 - unable to finish upgrade without preparation status active - require(sizeOfInitParameters.length == numberOfProxies, "fpu12"); // fpu12 - number of new targets initialization parameters must be equal to the number of proxies - - (bool mainContractCallSuccess, bytes memory encodedResult) = mainContractAddress.staticcall( - abi.encodeWithSignature("totalVerifiedPriorityOperations()") - ); - require(mainContractCallSuccess, "fpu13"); // fpu13 - main contract static call failed - uint64 totalVerifiedPriorityOperations = abi.decode(encodedResult, (uint64)); - - require(totalVerifiedPriorityOperations >= priorityOperationsToProcessBeforeUpgrade, "fpu14"); // fpu14 - can't finish upgrade before verifying all priority operations received before start of preparation status + require(upgradeStatus == UpgradeStatus.Preparation, "fpu11"); // fpu11 - unable to finish upgrade without preparation status active + require(sizeOfInitParameters.length == proxies.length, "fpu12"); // fpu12 - number of new targets initialization parameters must be equal to the number of proxies + require(mainContract.readyForUpgrade(), "fpu13"); // fpu13 - main contract is not ready for upgrade + mainContract.upgradeFinishes(); bytes memory initParametersConcatenated = initParametersConcatenated; uint processedBytes = 0; - for (uint64 i = 0; i < numberOfProxies; i++) { - address proxy = proxyAddress[i]; - bytes memory targetInitParameters; - - // TODO: remove this when Bytes.slice function will be fixed - if (sizeOfInitParameters[i] == 0){ - targetInitParameters = new bytes(0); + for (uint64 i = 0; i < proxies.length; i++) { + address proxy = proxies[i]; + address nextTarget = nextTargets[i]; + if (nextTargets[i] == address(0)) { + require(sizeOfInitParameters[i] == 0, "fpu14"); // fpu14 - there must be no init parameters bytes for proxy that wouldn't be upgraded } else { + bytes memory targetInitParameters; (processedBytes, targetInitParameters) = Bytes.read(initParametersConcatenated, processedBytes, sizeOfInitParameters[i]); + UpgradeableProxy(proxy).upgradeTarget(nextTarget, targetInitParameters); + emit UpgradeCompleted(proxy, nextTarget); } - - (bool proxyUpgradeCallSuccess, ) = proxy.call( - abi.encodeWithSignature("upgradeTarget(address,bytes)", nextTarget[proxy], targetInitParameters) - ); - require(proxyUpgradeCallSuccess, "fpu15"); // fpu15 - proxy contract call failed - - emit UpgradeCompleted(proxy, version[proxy], nextTarget[proxy]); - version[proxy]++; } - require(processedBytes == initParametersConcatenated.length, "fpu16"); // fpu16 - all targets initialization parameters bytes must be processed + require(processedBytes == initParametersConcatenated.length, "fpu15"); // fpu15 - all targets initialization parameters bytes must be processed - upgradeStatus = UpgradeGatekeeper.UpgradeStatus.Idle; - activationTime = 0; - for (uint64 i = 0; i < numberOfProxies; i++) { - address proxy = proxyAddress[i]; - nextTarget[proxy] = address(0); - } - priorityOperationsToProcessBeforeUpgrade = 0; + upgradeStatus = UpgradeStatus.Idle; + noticePeriodActivationTime = 0; + delete nextTargets; } } diff --git a/contracts/contracts/test/DummyTarget.sol b/contracts/contracts/test/DummyTarget.sol index c5d719ea95..dcd5d431e4 100644 --- a/contracts/contracts/test/DummyTarget.sol +++ b/contracts/contracts/test/DummyTarget.sol @@ -2,20 +2,25 @@ pragma solidity 0.5.16; interface DummyTarget { + function upgradeNoticePeriod() external pure returns (uint); + function get_DUMMY_INDEX() external pure returns (uint256); function initialize(bytes calldata initializationParameters) external; - function totalVerifiedPriorityOperations() external returns (uint64); - - function totalRegisteredPriorityOperations() external returns (uint64); - function verifyPriorityOperation() external; + function readyForUpgrade() external returns (bool); + } contract DummyFirst is DummyTarget { + uint constant UPGRADE_NOTICE_PERIOD = 4; + function upgradeNoticePeriod() external pure returns (uint) { + return UPGRADE_NOTICE_PERIOD; + } + uint256 private constant DUMMY_INDEX = 1; function get_DUMMY_INDEX() external pure returns (uint256) { return DUMMY_INDEX; @@ -32,11 +37,11 @@ contract DummyFirst is DummyTarget { } } - function totalVerifiedPriorityOperations() external returns (uint64){ + function totalVerifiedPriorityOperations() internal returns (uint64) { return _verifiedPriorityOperations; } - function totalRegisteredPriorityOperations() external returns (uint64){ + function totalRegisteredPriorityOperations() internal returns (uint64) { return 1; } @@ -44,10 +49,19 @@ contract DummyFirst is DummyTarget { _verifiedPriorityOperations++; } + function readyForUpgrade() external returns (bool) { + return totalVerifiedPriorityOperations() >= totalRegisteredPriorityOperations(); + } + } contract DummySecond is DummyTarget { + uint constant UPGRADE_NOTICE_PERIOD = 4; + function upgradeNoticePeriod() external pure returns (uint) { + return UPGRADE_NOTICE_PERIOD; + } + uint256 private constant DUMMY_INDEX = 2; function get_DUMMY_INDEX() external pure returns (uint256) { return DUMMY_INDEX; @@ -64,11 +78,11 @@ contract DummySecond is DummyTarget { } } - function totalVerifiedPriorityOperations() external returns (uint64){ + function totalVerifiedPriorityOperations() internal returns (uint64) { return _verifiedPriorityOperations; } - function totalRegisteredPriorityOperations() external returns (uint64){ + function totalRegisteredPriorityOperations() internal returns (uint64) { return 0; } @@ -76,4 +90,8 @@ contract DummySecond is DummyTarget { _verifiedPriorityOperations++; } + function readyForUpgrade() external returns (bool) { + return totalVerifiedPriorityOperations() >= totalRegisteredPriorityOperations(); + } + } diff --git a/contracts/scripts/test-upgrade-franklin.ts b/contracts/scripts/test-upgrade-franklin.ts index 9f7d819cc8..0a2c8b2b71 100644 --- a/contracts/scripts/test-upgrade-franklin.ts +++ b/contracts/scripts/test-upgrade-franklin.ts @@ -2,11 +2,11 @@ import {ethers} from "ethers"; import {ArgumentParser} from "argparse"; import {proxyContractCode, upgradeGatekeeperTestContractCode} from "../src.ts/deploy"; import {deployContract} from "ethereum-waffle"; +import {AddressZero} from "ethers/constants"; const {performance} = require('perf_hooks'); const {expect} = require("chai") -export const GovernanceTestNoInitContractCode = require(`../build/GovernanceTestNoInit`); export const FranklinTestNoInitContractCode = require(`../build/FranklinTestNoInit`); async function main() { @@ -43,12 +43,6 @@ async function main() { wallet, ); - const newTargetGovernance = await deployContract( - wallet, - GovernanceTestNoInitContractCode, - [], - {gasLimit: 6500000}, - ); const newTargetFranklin = await deployContract( wallet, FranklinTestNoInitContractCode, @@ -56,16 +50,16 @@ async function main() { {gasLimit: 6500000}, ); - let notice_period = parseInt(await upgradeGatekeeper.get_NOTICE_PERIOD()); + let notice_period = parseInt(await newTargetFranklin.upgradeNoticePeriod()); // in tests notice period of FranklinTestNoInit will be equal to FranklinTest - await (await upgradeGatekeeper.startProxyUpgrade([newTargetGovernance.address, newTargetFranklin.address])).wait(); + await (await upgradeGatekeeper.startUpgrade([AddressZero, AddressZero, newTargetFranklin.address])).wait(); // wait notice period await new Promise(r => setTimeout(r, notice_period * 1000 + 10)); // finish upgrade await (await upgradeGatekeeper.startPreparation()).wait(); - await (await upgradeGatekeeper.finishProxyUpgrade([], [0, 0])).wait(); + await (await upgradeGatekeeper.finishUpgrade([], [0, 0, 0])).wait(); await expect(await proxyContract.getTarget()) .to.equal(newTargetFranklin.address); diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index abace0b77c..117b60ae9b 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -223,13 +223,14 @@ export class Deployer { ); this.addresses.UpgradeGatekeeper = contract.address; - await (await contract.addProxyContract(this.getDeployedContract('Governance').address)).wait(); - await (await contract.addProxyContract(this.getDeployedContract('Franklin').address)).wait(); - await (await this.getDeployedContract('Governance').transferMastership(contract.address)).wait(); await (await this.getDeployedContract('Verifier').transferMastership(contract.address)).wait(); await (await this.getDeployedContract('Franklin').transferMastership(contract.address)).wait(); + await (await contract.addProxyContract(this.getDeployedContract('Governance').address)).wait(); + await (await contract.addProxyContract(this.getDeployedContract('Verifier').address)).wait(); + await (await contract.addProxyContract(this.getDeployedContract('Franklin').address)).wait(); + return contract; } diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index a889102c27..656773f377 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -45,21 +45,21 @@ describe("UpgradeGatekeeper unit tests", function () { it("checking that requireMaster calls present", async () => { let UpgradeGatekeeperContract_with_wallet2_signer = await UpgradeGatekeeperContract.connect(wallet2); - expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.clearProxyList() )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.startProxyUpgrade([]) )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.cancelProxyUpgrade() )).revertReason).equal("oro11") - expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.finishProxyUpgrade([], []) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.addProxyContract(AddressZero) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.startUpgrade([]) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.cancelUpgrade() )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.finishUpgrade([], []) )).revertReason).equal("oro11") }); it("checking UpgradeGatekeeper reverts; activation and cancelation upgrade", async () => { - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.cancelProxyUpgrade() )).revertReason).equal("cpu11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.cancelUpgrade() )).revertReason).equal("cpu11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startPreparation() )).revertReason).equal("ugp11") - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade([], []) )).revertReason).equal("fpu11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishUpgrade([], []) )).revertReason).equal("fpu11") - await expect(UpgradeGatekeeperContract.startProxyUpgrade([DummySecond.address])) - .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startProxyUpgrade([]) )).revertReason).equal("spu11") - await expect(UpgradeGatekeeperContract.cancelProxyUpgrade()) + await expect(UpgradeGatekeeperContract.startUpgrade([DummySecond.address])) + .to.emit(UpgradeGatekeeperContract, 'NoticePeriodStarted') + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startUpgrade([]) )).revertReason).equal("spu11") + await expect(UpgradeGatekeeperContract.cancelUpgrade()) .to.emit(UpgradeGatekeeperContract, 'UpgradeCanceled') }); @@ -67,13 +67,13 @@ describe("UpgradeGatekeeper unit tests", function () { let start_time = performance.now(); // activate - await expect(UpgradeGatekeeperContract.startProxyUpgrade([DummySecond.address])) - .to.emit(UpgradeGatekeeperContract, 'UpgradeModeActivated') + await expect(UpgradeGatekeeperContract.startUpgrade([DummySecond.address])) + .to.emit(UpgradeGatekeeperContract, 'NoticePeriodStarted') let activated_time = performance.now(); // wait and activate preparation status - let all_time_in_sec = parseInt(await UpgradeGatekeeperContract.get_NOTICE_PERIOD()); + let all_time_in_sec = parseInt(await DummyFirst.upgradeNoticePeriod()); for (let step = 1; step <= 3; step++) { if (step != 3) { while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { @@ -89,17 +89,17 @@ describe("UpgradeGatekeeper unit tests", function () { await UpgradeGatekeeperContract.startPreparation(); } else { await expect(UpgradeGatekeeperContract.startPreparation()) - .to.emit(UpgradeGatekeeperContract, 'UpgradeModePreparationStatusActivated') + .to.emit(UpgradeGatekeeperContract, 'PreparationStarted') } } // finish upgrade without verifying priority operations - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishProxyUpgrade([bytes[2], bytes[3]], [2]) )).revertReason).equal("fpu14") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishUpgrade([bytes[2], bytes[3]], [2]) )).revertReason).equal("fpu13") // finish upgrade await proxyDummyInterface.verifyPriorityOperation(); - await expect(UpgradeGatekeeperContract.finishProxyUpgrade([bytes[2], bytes[3]], [2])) + await expect(UpgradeGatekeeperContract.finishUpgrade([bytes[2], bytes[3]], [2])) .to.emit(UpgradeGatekeeperContract, 'UpgradeCompleted') - .withArgs(proxyTestContract.address, 0, DummySecond.address) + .withArgs(proxyTestContract.address, DummySecond.address) await expect(await proxyTestContract.getTarget()) .to.equal(DummySecond.address); @@ -116,12 +116,4 @@ describe("UpgradeGatekeeper unit tests", function () { .to.equal(bytes[3]); }); - it("checking the presence in the main contract functions that will be called from the gatekeeper", async () => { - let mainContract = await deployContract(wallet, require('../../build/Franklin'), [], { - gasLimit: 6000000, - }); - await mainContract.totalRegisteredPriorityOperations(); - await mainContract.totalVerifiedPriorityOperations(); - }); - }); From 8ba69d3409aa57a5e8fb2bf06b947b4af0e93dc0 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Thu, 26 Mar 2020 13:00:40 +0200 Subject: [PATCH 122/186] Removed generaing of extra contract in prepare-test-contracts.sh --- bin/prepare-test-contracts.sh | 4 ---- 1 file changed, 4 deletions(-) diff --git a/bin/prepare-test-contracts.sh b/bin/prepare-test-contracts.sh index 3c4c484aba..7877be4fc7 100755 --- a/bin/prepare-test-contracts.sh +++ b/bin/prepare-test-contracts.sh @@ -15,10 +15,6 @@ cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTest.sol cp $IN_DIR/Storage.sol $OUT_DIR/StorageTest.sol cp $IN_DIR/Config.sol $OUT_DIR/ConfigTest.sol cp $IN_DIR/UpgradeGatekeeper.sol $OUT_DIR/UpgradeGatekeeperTest.sol -cp $IN_DIR/Bytes.sol $OUT_DIR/Bytes.sol -cp $IN_DIR/Events.sol $OUT_DIR/Events.sol -cp $IN_DIR/Operations.sol $OUT_DIR/Operations.sol -cp $IN_DIR/VerificationKey.sol $OUT_DIR/VerificationKey.sol cp $IN_DIR/Franklin.sol $OUT_DIR/FranklinTestNoInit.sol # Change dependencies From ad3c53728007aa6ecf3abe538b1edaa41e6047f5 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Thu, 26 Mar 2020 13:34:06 +0200 Subject: [PATCH 123/186] small code format fixes in upgradeable storage --- contracts/contracts/Config.sol | 2 +- contracts/contracts/Storage.sol | 2 +- contracts/contracts/UpgradeGatekeeper.sol | 6 +++--- contracts/test/unit_tests/upgradeGatekeeper_test.ts | 2 ++ 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/contracts/contracts/Config.sol b/contracts/contracts/Config.sol index f3ff3b4520..f23f03b444 100644 --- a/contracts/contracts/Config.sol +++ b/contracts/contracts/Config.sol @@ -8,7 +8,7 @@ contract Config { /// @notice Notice period before activation preparation status of upgrade mode (in seconds) uint constant UPGRADE_NOTICE_PERIOD = 2 weeks; - /// @notice Period after the start of preparation when contract wouldn't register new priority operations (in seconds) + /// @notice Period after the start of preparation upgrade when contract wouldn't register new priority operations (in seconds) uint constant UPGRADE_PREPARATION_LOCK_PERIOD = 1 days; /// @notice zkSync address length diff --git a/contracts/contracts/Storage.sol b/contracts/contracts/Storage.sol index 7ff5b52532..2d416b8182 100644 --- a/contracts/contracts/Storage.sol +++ b/contracts/contracts/Storage.sol @@ -15,7 +15,7 @@ contract Storage { /// @dev Will store false in case of not active upgrade mode bool public upgradePreparation; - /// @notice upgrade preparation activation timestamp (as seconds since unix epoch) + /// @notice Upgrade preparation activation timestamp (as seconds since unix epoch) /// @dev Will be equal to zero in case of not active upgrade mode uint public upgradePreparationActivationTime; diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 263c844b25..bc8d28618e 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -59,13 +59,13 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { /// @dev Will be equal to zero in case of not active upgrade mode uint noticePeriodActivationTime; - /// @notice Proxy which allows finish upgrade during preparation status of upgrade - MainContract mainContract; - /// @notice Addresses of the next versions of the contracts to be upgraded (if element of this array is equal to zero address it means that this proxy will not be upgraded) /// @dev Will be empty in case of not active upgrade mode address[] nextTargets; + /// @notice Contract which allows finish upgrade during preparation status of upgrade + MainContract mainContract; + /// @notice Contract constructor /// @param _mainContractAddress Address of contract which processes priority operations /// @dev Calls Ownable contract constructor and adds _mainContractAddress to the list of contracts managed by the gatekeeper diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index 656773f377..ca2c518f96 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -56,6 +56,7 @@ describe("UpgradeGatekeeper unit tests", function () { expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startPreparation() )).revertReason).equal("ugp11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishUpgrade([], []) )).revertReason).equal("fpu11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startUpgrade([]) )).revertReason).equal("spu12") await expect(UpgradeGatekeeperContract.startUpgrade([DummySecond.address])) .to.emit(UpgradeGatekeeperContract, 'NoticePeriodStarted') expect((await getCallRevertReason( () => UpgradeGatekeeperContract.startUpgrade([]) )).revertReason).equal("spu11") @@ -93,6 +94,7 @@ describe("UpgradeGatekeeper unit tests", function () { } } + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishUpgrade([], []) )).revertReason).equal("fpu12") // finish upgrade without verifying priority operations expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishUpgrade([bytes[2], bytes[3]], [2]) )).revertReason).equal("fpu13") // finish upgrade From ab35c0234646dff0f6b3408ef33e69708ee567e6 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Thu, 26 Mar 2020 13:37:06 +0200 Subject: [PATCH 124/186] Small fix --- contracts/contracts/UpgradeGatekeeper.sol | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index bc8d28618e..2fa5a246a6 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -143,7 +143,7 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { for (uint64 i = 0; i < proxies.length; i++) { address proxy = proxies[i]; address nextTarget = nextTargets[i]; - if (nextTargets[i] == address(0)) { + if (nextTarget == address(0)) { require(sizeOfInitParameters[i] == 0, "fpu14"); // fpu14 - there must be no init parameters bytes for proxy that wouldn't be upgraded } else { bytes memory targetInitParameters; From c8d3f6a6fb0a2ef4a2ffd277f9ebb2ba1f09ce70 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Thu, 26 Mar 2020 13:44:32 +0200 Subject: [PATCH 125/186] updated checking that requireMaster calls present in proxy_test.ts --- contracts/test/unit_tests/proxy_test.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/contracts/test/unit_tests/proxy_test.ts b/contracts/test/unit_tests/proxy_test.ts index 03bb7e19fd..3824f907f6 100644 --- a/contracts/test/unit_tests/proxy_test.ts +++ b/contracts/test/unit_tests/proxy_test.ts @@ -17,11 +17,19 @@ describe("Proxy unit tests", function () { gasLimit: 6000000, }) proxyDummyInterface = new Contract(proxyTestContract.address, require('../../build/DummyTarget').interface, wallet); + + // check delegatecall + expect(await proxyDummyInterface.get_DUMMY_INDEX()) + .to.equal(1); }); it("checking that requireMaster calls present", async () => { let testContract_with_wallet2_signer = await proxyTestContract.connect(wallet2); expect((await getCallRevertReason( () => testContract_with_wallet2_signer.upgradeTarget(AddressZero, []) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.upgradeNoticePeriodStarted() )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.upgradePreparationStarted() )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.upgradeCanceled() )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => testContract_with_wallet2_signer.upgradeFinishes() )).revertReason).equal("oro11") }); it("checking Proxy reverts", async () => { From 272cd5baee864201797178efcf0c8a7ab2d9ede3 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Thu, 26 Mar 2020 15:59:00 +0200 Subject: [PATCH 126/186] No upgrade in exodus mode --- contracts/contracts/Franklin.sol | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index 45a31674b8..3c15862c58 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -16,6 +16,7 @@ contract Franklin is Storage, Config, Events { // Upgrade functional + /// @notice Notice period before activation preparation status of upgrade mode function upgradeNoticePeriod() external pure returns (uint) { return UPGRADE_NOTICE_PERIOD; } @@ -41,7 +42,7 @@ contract Franklin is Storage, Config, Events { /// @notice Checks that contract is ready for upgrade /// @return bool flag indicating that contract is ready for upgrade function readyForUpgrade() external view returns (bool) { - return totalOpenPriorityRequests == 0; + return !exodusMode && totalOpenPriorityRequests == 0; } // // Migration From 0a1eb043a14134bed31ca385d3b41635effcdad1 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Thu, 26 Mar 2020 18:33:37 +0200 Subject: [PATCH 127/186] EIP1271 signatures check --- contracts/contracts/test/EIP1271.sol | 28 ++++++ contracts/contracts/test/IEIP1271.sol | 19 ++++ core/models/src/abi.rs | 10 +++ core/models/src/misc/constants.rs | 3 + core/models/src/node/tx.rs | 37 ++++++++ core/server/src/api_server/rpc_server.rs | 109 +++-------------------- core/server/src/eth_watch.rs | 49 +++++++++- core/server/src/mempool.rs | 77 ++++++++++++++-- js/zksync.js/src/provider.ts | 5 +- js/zksync.js/src/types.ts | 4 + js/zksync.js/src/wallet.ts | 31 ++++--- 11 files changed, 256 insertions(+), 116 deletions(-) create mode 100644 contracts/contracts/test/EIP1271.sol create mode 100644 contracts/contracts/test/IEIP1271.sol diff --git a/contracts/contracts/test/EIP1271.sol b/contracts/contracts/test/EIP1271.sol new file mode 100644 index 0000000000..8a60045ab6 --- /dev/null +++ b/contracts/contracts/test/EIP1271.sol @@ -0,0 +1,28 @@ +pragma solidity 0.5.16; + +import "test/IEIP1271.sol"; + +contract EIP1271 is IEIP1271 { + + // bytes4(keccak256("isValidSignature(bytes,bytes)") + bytes4 constant internal MAGICVALUE = 0x20c13b0b; + + /** + * @dev Should return whether the signature provided is valid for the provided data + * @param _data Arbitrary length data signed on the behalf of address(this) + * @param _signature Signature byte array associated with _data + * + * MUST return the bytes4 magic value 0x20c13b0b when function passes. + * MUST NOT modify state (using STATICCALL for solc < 0.5, view modifier for solc > 0.5) + * MUST allow external calls + */ + function isValidSignature( + bytes memory _data, + bytes memory _signature) + public + view + returns (bytes4) + { + return MAGICVALUE; + } +} diff --git a/contracts/contracts/test/IEIP1271.sol b/contracts/contracts/test/IEIP1271.sol new file mode 100644 index 0000000000..0fc6743efd --- /dev/null +++ b/contracts/contracts/test/IEIP1271.sol @@ -0,0 +1,19 @@ +pragma solidity 0.5.16; + +interface IEIP1271 { + /** + * @dev Should return whether the signature provided is valid for the provided data + * @param _data Arbitrary length data signed on the behalf of address(this) + * @param _signature Signature byte array associated with _data + * + * MUST return the bytes4 magic value 0x20c13b0b when function passes. + * MUST NOT modify state (using STATICCALL for solc < 0.5, view modifier for solc > 0.5) + * MUST allow external calls + */ + function isValidSignature( + bytes calldata _data, + bytes calldata _signature) + external + view + returns (bytes4); +} diff --git a/core/models/src/abi.rs b/core/models/src/abi.rs index 73b9cbafba..edd21abae1 100644 --- a/core/models/src/abi.rs +++ b/core/models/src/abi.rs @@ -6,6 +6,7 @@ use std::str::FromStr; const ZKSYNC_CONTRACT_FILE: &str = "contracts/build/Franklin.json"; const GOVERNANCE_CONTRACT_FILE: &str = "contracts/build/Governance.json"; const IERC20_CONTRACT_FILE: &str = "contracts/build/IERC20.json"; +const IEIP1271_CONTRACT_FILE: &str = "contracts/build/IEIP1271.json"; fn read_file_to_json_value(path: &str) -> io::Result { let contents = fs::read_to_string(path)?; @@ -39,3 +40,12 @@ pub fn erc20_contract() -> Contract { .to_string(); Contract::load(abi_string.as_bytes()).expect("erc20 contract abi") } + +pub fn eip1271_contract() -> Contract { + let abi_string = read_file_to_json_value(IEIP1271_CONTRACT_FILE) + .expect("couldn't read IEIP1271_CONTRACT_FILE") + .get("abi") + .expect("couldn't get abi from IEIP1271_CONTRACT_FILE") + .to_string(); + Contract::load(abi_string.as_bytes()).expect("erc20 contract abi") +} diff --git a/core/models/src/misc/constants.rs b/core/models/src/misc/constants.rs index 2efcef2cc7..13c22a8316 100644 --- a/core/models/src/misc/constants.rs +++ b/core/models/src/misc/constants.rs @@ -3,3 +3,6 @@ pub const ETH_SIGNATURE_LENGTH: usize = 65; /// Size of hex representation in form of "0x{...}". /// Two bytes for "0x", and two for each byte of the signature. pub const ETH_SIGNATURE_HEX_LENGTH: usize = (ETH_SIGNATURE_LENGTH * 2) + 2; + +/// EIP1271 isValidSignature return value +pub const MAGICVALUE: [u8; 4] = [0x20, 0xc1, 0x3b, 0x0b]; diff --git a/core/models/src/node/tx.rs b/core/models/src/node/tx.rs index 5ae5e8071d..bb97099dd1 100644 --- a/core/models/src/node/tx.rs +++ b/core/models/src/node/tx.rs @@ -15,12 +15,14 @@ use crate::franklin_crypto::alt_babyjubjub::{edwards, AltJubjubBn256}; use crate::franklin_crypto::bellman::pairing::ff::{PrimeField, PrimeFieldRepr}; use crate::franklin_crypto::eddsa::{PrivateKey, PublicKey, Seed, Signature}; use crate::franklin_crypto::jubjub::FixedGenerators; +use crate::misc::utils::format_ether; use crate::node::operations::ChangePubKeyOp; use crate::params::JUBJUB_PARAMS; use crate::primitives::{big_decimal_to_u128, pedersen_hash_tx_msg, u128_to_bigdecimal}; use ethsign::{SecretKey, Signature as ETHSignature}; use failure::{ensure, format_err}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use std::collections::HashMap; use std::convert::TryInto; use std::str::FromStr; use web3::types::{Address, H256}; @@ -341,6 +343,34 @@ impl FranklinTx { _ => false, } } + + /// Returns a message that user has to sign to send the transaction. + /// If the transaction doesn't need a message signature, returns `None`. + /// If any error is encountered during the message generation, returns `jsonrpc_core::Error`. + pub fn get_tx_info_message_to_sign( + &self, + ids_to_symbols: &HashMap, + ) -> Result, &'static str> { + match self { + FranklinTx::Transfer(tx) => Ok(Some(format!( + "Transfer {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", + amount = format_ether(&tx.amount), + token = ids_to_symbols.get(&tx.token).ok_or("no such symbol")?, // TODO: jazzandrock better message | other error type + to = tx.to, + nonce = tx.nonce, + fee = format_ether(&tx.fee), + ))), + FranklinTx::Withdraw(tx) => Ok(Some(format!( + "Withdraw {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", + amount = format_ether(&tx.amount), + token = ids_to_symbols.get(&tx.token).ok_or("no such symbol")?, // TODO: jazzandrock better message | other error type + to = tx.to, + nonce = tx.nonce, + fee = format_ether(&tx.fee), + ))), + _ => Ok(None), + } + } } #[derive(Clone, Serialize, Deserialize)] @@ -544,6 +574,13 @@ impl<'de> Deserialize<'de> for PackedSignature { } } +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", content = "signature")] +pub enum TxEthSignature { + EthereumSignature(PackedEthSignature), + EIP1271Signature(Vec), +} + /// Struct used for working with ethereum signatures created using eth_sign (using geth, ethers.js, etc) /// message is serialized as 65 bytes long `0x` prefixed string. #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/core/server/src/api_server/rpc_server.rs b/core/server/src/api_server/rpc_server.rs index 95b0d260cb..acae68a657 100644 --- a/core/server/src/api_server/rpc_server.rs +++ b/core/server/src/api_server/rpc_server.rs @@ -9,8 +9,7 @@ use jsonrpc_core::{IoHandler, MetaIoHandler, Metadata, Middleware}; use jsonrpc_derive::rpc; use jsonrpc_http_server::ServerBuilder; use models::config_options::ThreadPanicNotify; -use models::misc::utils::format_ether; -use models::node::tx::PackedEthSignature; +use models::node::tx::TxEthSignature; use models::node::tx::TxHash; use models::node::{Account, AccountId, FranklinTx, Nonce, PubKeyHash, TokenId}; use std::collections::HashMap; @@ -89,10 +88,11 @@ pub struct ContractAddressResp { enum RpcErrorCodes { NonceMismatch = 101, IncorrectTx = 103, - Other = 104, - ChangePkNotAuthorized = 105, + MissingEthSignature = 104, + IncorrectEthSignature = 105, + Other = 106, + ChangePkNotAuthorized = 107, AccountCloseDisabled = 110, - IncorrectEthSignature = 121, } impl From for RpcErrorCodes { @@ -100,8 +100,10 @@ impl From for RpcErrorCodes { match error { TxAddError::NonceMismatch => RpcErrorCodes::NonceMismatch, TxAddError::IncorrectTx => RpcErrorCodes::IncorrectTx, - TxAddError::Other => RpcErrorCodes::Other, + TxAddError::MissingEthSignature => RpcErrorCodes::MissingEthSignature, + TxAddError::IncorrectEthSignature => RpcErrorCodes::IncorrectEthSignature, TxAddError::ChangePkNotAuthorized => RpcErrorCodes::ChangePkNotAuthorized, + TxAddError::Other => RpcErrorCodes::Other, } } } @@ -126,8 +128,8 @@ pub trait Rpc { #[rpc(name = "tx_submit", returns = "TxHash")] fn tx_submit( &self, - tx: FranklinTx, - signature: Option, + tx: Box, + signature: Box>, ) -> Box + Send>; #[rpc(name = "contract_address")] fn contract_address(&self) -> Result; @@ -154,87 +156,6 @@ impl RpcApp { .access_storage_fragile() .map_err(|_| Error::internal_error()) } - - /// Returns the token symbol for a `TokenId` as a string. - /// In case of failure, returns `jsonrpc_core::Error`, - /// which makes it convenient to use in the RPC methods. - fn token_symbol_from_id(&self, token: TokenId) -> Result { - self.access_storage()? - .tokens_schema() - .token_symbol_from_id(token) - .map_err(|_| Error::internal_error())? - .ok_or(Error { - code: RpcErrorCodes::IncorrectTx.into(), - message: "No such token registered".into(), - data: None, - }) - } - - /// Returns a message that user has to sign to send the transaction. - /// If the transaction doesn't need a message signature, returns `None`. - /// If any error is encountered during the message generation, returns `jsonrpc_core::Error`. - fn get_tx_info_message_to_sign(&self, tx: &FranklinTx) -> Result> { - match tx { - FranklinTx::Transfer(tx) => Ok(Some(format!( - "Transfer {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", - amount = format_ether(&tx.amount), - token = self.token_symbol_from_id(tx.token)?, - to = tx.to, - nonce = tx.nonce, - fee = format_ether(&tx.fee), - ))), - FranklinTx::Withdraw(tx) => Ok(Some(format!( - "Withdraw {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", - amount = format_ether(&tx.amount), - token = self.token_symbol_from_id(tx.token)?, - to = tx.to, - nonce = tx.nonce, - fee = format_ether(&tx.fee), - ))), - _ => Ok(None), - } - } - - /// Checks that tx info message signature is valid. - /// - /// Needed for two-step verification, where user has to sign predefined human-readable - /// message with his ETH signature in order to send a transaction. - /// - /// If signature is correct, or tx doesn't need signature, returns `Ok(())`. - /// - /// If any error encountered during signature verification, - /// including incorrect signature, returns `jsonrpc_core::Error`. - fn verify_tx_info_message_signature( - &self, - tx: &FranklinTx, - signature: Option, - ) -> Result<()> { - fn rpc_message(message: impl ToString) -> Error { - Error { - code: RpcErrorCodes::IncorrectEthSignature.into(), - message: message.to_string(), - data: None, - } - } - - match self.get_tx_info_message_to_sign(&tx)? { - Some(message_to_sign) => { - let packed_signature = - signature.ok_or_else(|| rpc_message("Signature required"))?; - - let signer_account = packed_signature - .signature_recover_signer(message_to_sign.as_bytes()) - .map_err(rpc_message)?; - - if signer_account == tx.account() { - Ok(()) - } else { - Err(rpc_message("Signature is incorrect")) - } - } - None => Ok(()), - } - } } impl Rpc for RpcApp { @@ -359,8 +280,8 @@ impl Rpc for RpcApp { fn tx_submit( &self, - tx: FranklinTx, - signature: Option, + tx: Box, + signature: Box>, ) -> Box + Send> { if tx.is_close() { return Box::new(futures01::future::err(Error { @@ -370,16 +291,12 @@ impl Rpc for RpcApp { })); } - if let Err(error) = self.verify_tx_info_message_signature(&tx, signature) { - return Box::new(futures01::future::err(error)); - } - let mut mempool_sender = self.mempool_request_sender.clone(); let mempool_resp = async move { let hash = tx.hash(); let mempool_resp = oneshot::channel(); mempool_sender - .send(MempoolRequest::NewTx(Box::new(tx), mempool_resp.0)) + .send(MempoolRequest::NewTx(tx, signature, mempool_resp.0)) .await .expect("mempool receiver dropped"); let tx_add_result = mempool_resp.1.await.unwrap_or(Err(TxAddError::Other)); diff --git a/core/server/src/eth_watch.rs b/core/server/src/eth_watch.rs index 22bb8517a9..cefe941e77 100644 --- a/core/server/src/eth_watch.rs +++ b/core/server/src/eth_watch.rs @@ -13,8 +13,9 @@ use web3::contract::{Contract, Options}; use web3::types::{Address, BlockNumber, Filter, FilterBuilder, H160}; use web3::{Transport, Web3}; // Workspace deps -use models::abi::{governance_contract, zksync_contract}; +use models::abi::{eip1271_contract, governance_contract, zksync_contract}; use models::config_options::ConfigurationOptions; +use models::misc::constants::MAGICVALUE; use models::node::{Nonce, PriorityOp, PubKeyHash, TokenId}; use models::params::PRIORITY_EXPIRATION; use models::TokenAddedEvent; @@ -35,6 +36,12 @@ pub enum EthWatchRequest { max_chunks: usize, resp: oneshot::Sender>, }, + CheckEIP1271Signature { + address: Address, + data: Vec, + signature: Vec, + resp: oneshot::Sender, + }, } pub struct EthWatch { @@ -99,6 +106,10 @@ impl EthWatch { } } + fn get_eip1271_contract(&self, address: Address) -> Contract { + Contract::new(self.web3.eth(), address, eip1271_contract()) + } + fn get_new_token_event_filter(&self, from: BlockNumber, to: BlockNumber) -> Filter { let new_token_event_topic = self .gov_contract @@ -266,6 +277,28 @@ impl EthWatch { res } + async fn is_eip1271_signature_correct( + &self, + address: Address, + data: Vec, + signature: Vec, + ) -> Result { + let received: [u8; 4] = self + .get_eip1271_contract(address) + .query( + "isValidSignature", + (data, signature), + None, + Options::default(), + None, + ) + .compat() + .await + .map_err(|e| format_err!("Failed to query contract isValidSignature: {}", e))?; + + Ok(received == MAGICVALUE) + } + async fn is_new_pubkey_hash_authorized( &self, address: Address, @@ -335,9 +368,21 @@ impl EthWatch { let authorized = self .is_new_pubkey_hash_authorized(address, nonce, &pubkey_hash) .await - .unwrap_or_default(); + .unwrap_or(false); resp.send(authorized).unwrap_or_default(); } + EthWatchRequest::CheckEIP1271Signature { + address, + data, + signature, + resp, + } => { + let signature_correct = self + .is_eip1271_signature_correct(address, data, signature) + .await + .unwrap_or(false); + resp.send(signature_correct).unwrap_or_default(); + } } } } diff --git a/core/server/src/mempool.rs b/core/server/src/mempool.rs index 7390b10670..4c0144006c 100644 --- a/core/server/src/mempool.rs +++ b/core/server/src/mempool.rs @@ -18,8 +18,9 @@ use crate::eth_watch::EthWatchRequest; use failure::Fail; use futures::channel::{mpsc, oneshot}; use futures::{SinkExt, StreamExt}; +use models::node::tx::TxEthSignature; use models::node::{ - AccountId, AccountUpdate, AccountUpdates, FranklinTx, Nonce, PriorityOp, TransferOp, + AccountId, AccountUpdate, AccountUpdates, FranklinTx, Nonce, PriorityOp, TokenId, TransferOp, TransferToNewOp, }; use models::params::max_block_chunk_size; @@ -34,6 +35,10 @@ pub enum TxAddError { NonceMismatch, #[fail(display = "Tx is incorrect")] IncorrectTx, + #[fail(display = "MissingEthSignature")] + MissingEthSignature, + #[fail(display = "Eth signature is incorrect")] + IncorrectEthSignature, #[fail(display = "Change pubkey tx is not authorized onchain")] ChangePkNotAuthorized, #[fail(display = "Internal error")] @@ -60,7 +65,11 @@ pub struct GetBlockRequest { pub enum MempoolRequest { /// Add new transaction to mempool, check signature and correctness /// oneshot is used to receive tx add result. - NewTx(Box, oneshot::Sender>), + NewTx( + Box, + Box>, + oneshot::Sender>, + ), /// When block is committed, nonces of the account tree should be updated too. UpdateNonces(AccountUpdates), /// Get transactions from the mempool. @@ -133,10 +142,19 @@ struct Mempool { mempool_state: MempoolState, requests: mpsc::Receiver, eth_watch_req: mpsc::Sender, + + // TODO: jazzandrock find a better place to store such cached structs. + // Maybe, something like storage scheme but for hashmaps? + // if we plan to cache stuff like that more often + ids_to_symbols: HashMap, } impl Mempool { - async fn add_tx(&mut self, tx: FranklinTx) -> Result<(), TxAddError> { + async fn add_tx( + &mut self, + tx: FranklinTx, + signature: Option, + ) -> Result<(), TxAddError> { if let FranklinTx::ChangePubKey(change_pk) = &tx { if change_pk.eth_signature.is_none() { let eth_watch_resp = oneshot::channel(); @@ -158,14 +176,50 @@ impl Mempool { } } + if let Some(message_to_sign) = tx + .get_tx_info_message_to_sign(&self.ids_to_symbols) + .or(Err(TxAddError::IncorrectTx))? + { + let tx_eth_signature = signature.ok_or(TxAddError::MissingEthSignature)?; + + match tx_eth_signature { + TxEthSignature::EthereumSignature(packed_signature) => { + let signer_account = packed_signature + .signature_recover_signer(message_to_sign.as_bytes()) + .or(Err(TxAddError::IncorrectEthSignature))?; + + if signer_account != tx.account() { + return Err(TxAddError::IncorrectEthSignature); + } + } + TxEthSignature::EIP1271Signature(signature) => { + let eth_watch_resp = oneshot::channel(); + self.eth_watch_req + .clone() + .send(EthWatchRequest::CheckEIP1271Signature { + address: tx.account(), + data: message_to_sign.as_bytes().to_vec(), + signature, + resp: eth_watch_resp.0, + }) + .await + .expect("ETH watch req receiver dropped"); + + if !eth_watch_resp.1.await.expect("Err response from eth watch") { + return Err(TxAddError::IncorrectEthSignature); + } + } + }; + }; + self.mempool_state.add_tx(tx) } async fn run(mut self) { while let Some(request) = self.requests.next().await { match request { - MempoolRequest::NewTx(tx, resp) => { - let tx_add_result = self.add_tx(*tx).await; + MempoolRequest::NewTx(tx, signature, resp) => { + let tx_add_result = self.add_tx(*tx, *signature).await; resp.send(tx_add_result).unwrap_or_default(); } MempoolRequest::GetBlock(block) => { @@ -278,10 +332,23 @@ pub fn run_mempool_task( runtime: &Runtime, ) { let mempool_state = MempoolState::restore_from_db(&db_pool); + + // TODO: jazzandrock + let ids_to_symbols = db_pool + .access_storage_fragile() + .expect("fragile enough") + .tokens_schema() + .load_tokens() + .expect("tokens load failed") + .into_iter() + .map(|(key, val)| (key, val.symbol)) + .collect::>(); + let mempool = Mempool { mempool_state, requests, eth_watch_req, + ids_to_symbols, }; runtime.spawn(mempool.run()); } diff --git a/js/zksync.js/src/provider.ts b/js/zksync.js/src/provider.ts index 2da0d09d98..e532ea0842 100644 --- a/js/zksync.js/src/provider.ts +++ b/js/zksync.js/src/provider.ts @@ -12,7 +12,8 @@ import { PriorityOperationReceipt, ContractAddress, Tokens, - TokenAddress + TokenAddress, + TxEthSignature } from "./types"; import { isTokenETH, @@ -69,7 +70,7 @@ export class Provider { } // return transaction hash (e.g. sync-tx:dead..beef) - async submitTx(tx: any, signature?: string): Promise { + async submitTx(tx: any, signature?: TxEthSignature): Promise { return await this.transport.request("tx_submit", [tx, signature]); } diff --git a/js/zksync.js/src/types.ts b/js/zksync.js/src/types.ts index bf88d78c43..2e6d65504b 100644 --- a/js/zksync.js/src/types.ts +++ b/js/zksync.js/src/types.ts @@ -34,6 +34,10 @@ export interface AccountState { pubKeyHash: PubKeyHash; }; } +export interface TxEthSignature { + type: "EthereumSignature" | "EIP1271Signature"; + signature: string; +} export interface Signature { pubKey: string; diff --git a/js/zksync.js/src/wallet.ts b/js/zksync.js/src/wallet.ts index 929dc8d028..21ccfd2ddf 100644 --- a/js/zksync.js/src/wallet.ts +++ b/js/zksync.js/src/wallet.ts @@ -1,6 +1,6 @@ import { Contract, ContractTransaction, ethers, utils } from "ethers"; import { ETHProxy, Provider } from "./provider"; -import { serializeAddress, serializeNonce, Signer } from "./signer"; +import { Signer } from "./signer"; import { AccountState, Address, @@ -8,7 +8,8 @@ import { Nonce, PriorityOperationReceipt, TransactionReceipt, - PubKeyHash + PubKeyHash, + TxEthSignature } from "./types"; import { ERC20_APPROVE_TRESHOLD, @@ -20,7 +21,7 @@ import { } from "./utils"; // Our MetaMask users sometimes use custom gas price values, -// which we can't know. We use this constant to assure that +// which we can't know. We use this constant to assure that // gasprice from our calculations isn't smaller than actually used one. const metamaskIncreaseGasPriceFactor = 10; @@ -79,6 +80,7 @@ export class Wallet { amount: utils.BigNumberish; fee: utils.BigNumberish; nonce?: Nonce; + ethSignatureType?: "EthereumSignature" | "EIP1271Signature"; }): Promise { if (!this.signer) { throw new Error( @@ -113,9 +115,10 @@ export class Wallet { `Nonce: ${nonce}\n` + `Fee: ${stringFee} ${stringToken}`; - const txMessageEthSignature = await this.ethSigner.signMessage( - humanReadableTxInfo - ); + const txMessageEthSignature: TxEthSignature = { + type: transfer.ethSignatureType || "EthereumSignature", + signature: await this.ethSigner.signMessage(humanReadableTxInfo) + }; const signedTransferTransaction = this.signer.signSyncTransfer( transactionData @@ -138,6 +141,7 @@ export class Wallet { amount: utils.BigNumberish; fee: utils.BigNumberish; nonce?: Nonce; + ethSignatureType?: "EthereumSignature" | "EIP1271Signature"; }): Promise { if (!this.signer) { throw new Error( @@ -172,9 +176,10 @@ export class Wallet { `Nonce: ${nonce}\n` + `Fee: ${stringFee} ${stringToken}`; - const txMessageEthSignature = await this.ethSigner.signMessage( - humanReadableTxInfo - ); + const txMessageEthSignature: TxEthSignature = { + type: withdraw.ethSignatureType || "EthereumSignature", + signature: await this.ethSigner.signMessage(humanReadableTxInfo) + }; const signedWithdrawTransaction = this.signer.signSyncWithdraw( transactionData @@ -401,7 +406,9 @@ export class Wallet { deposit.token, gasPrice ); - maxFeeInETHToken = maxFeeInETHToken.mul(metamaskIncreaseGasPriceFactor); + maxFeeInETHToken = maxFeeInETHToken.mul( + metamaskIncreaseGasPriceFactor + ); } const mainZkSyncContract = new Contract( this.provider.contractAddress.mainContract, @@ -491,7 +498,9 @@ export class Wallet { maxFeeInETHToken = await ethProxy.estimateEmergencyWithdrawFeeInETHToken( gasPrice ); - maxFeeInETHToken = maxFeeInETHToken.mul(metamaskIncreaseGasPriceFactor); + maxFeeInETHToken = maxFeeInETHToken.mul( + metamaskIncreaseGasPriceFactor + ); } let accountId; From e622c2301e5b24ca58c35ded1c7338a6d7aa039c Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Thu, 26 Mar 2020 21:09:36 +0200 Subject: [PATCH 128/186] Fix serialization of EIP1271Signature --- contracts/contracts/test/EIP1271.sol | 4 ++-- core/models/src/misc/constants.rs | 2 +- core/models/src/node/tx.rs | 31 +++++++++++++++++++++++++++- core/server/src/eth_watch.rs | 11 +++++----- 4 files changed, 39 insertions(+), 9 deletions(-) diff --git a/contracts/contracts/test/EIP1271.sol b/contracts/contracts/test/EIP1271.sol index 8a60045ab6..7c4f7df456 100644 --- a/contracts/contracts/test/EIP1271.sol +++ b/contracts/contracts/test/EIP1271.sol @@ -5,7 +5,7 @@ import "test/IEIP1271.sol"; contract EIP1271 is IEIP1271 { // bytes4(keccak256("isValidSignature(bytes,bytes)") - bytes4 constant internal MAGICVALUE = 0x20c13b0b; + bytes4 constant internal EIP1271_SUCCESS_RETURN_VALUE = 0x20c13b0b; /** * @dev Should return whether the signature provided is valid for the provided data @@ -23,6 +23,6 @@ contract EIP1271 is IEIP1271 { view returns (bytes4) { - return MAGICVALUE; + return EIP1271_SUCCESS_RETURN_VALUE; } } diff --git a/core/models/src/misc/constants.rs b/core/models/src/misc/constants.rs index 13c22a8316..ea64b19df1 100644 --- a/core/models/src/misc/constants.rs +++ b/core/models/src/misc/constants.rs @@ -5,4 +5,4 @@ pub const ETH_SIGNATURE_LENGTH: usize = 65; pub const ETH_SIGNATURE_HEX_LENGTH: usize = (ETH_SIGNATURE_LENGTH * 2) + 2; /// EIP1271 isValidSignature return value -pub const MAGICVALUE: [u8; 4] = [0x20, 0xc1, 0x3b, 0x0b]; +pub const EIP1271_SUCCESS_RETURN_VALUE: [u8; 4] = [0x20, 0xc1, 0x3b, 0x0b]; diff --git a/core/models/src/node/tx.rs b/core/models/src/node/tx.rs index bb97099dd1..5843147aea 100644 --- a/core/models/src/node/tx.rs +++ b/core/models/src/node/tx.rs @@ -578,7 +578,36 @@ impl<'de> Deserialize<'de> for PackedSignature { #[serde(tag = "type", content = "signature")] pub enum TxEthSignature { EthereumSignature(PackedEthSignature), - EIP1271Signature(Vec), + EIP1271Signature(EIP1271Signature), +} + +#[derive(Debug, Clone)] +pub struct EIP1271Signature(pub Vec); + +impl<'de> Deserialize<'de> for EIP1271Signature { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + use hex::FromHex; + use serde::de::Error; + + let string = String::deserialize(deserializer)?; + + if !string.starts_with("0x") { + return Err(Error::custom("Packed eth signature should start with 0x")); + } + + Vec::from_hex(&string[2..]) + .map(Self) + .map_err(|err| Error::custom(err.to_string())) + } +} + +impl Serialize for EIP1271Signature { + fn serialize(&self, serializer: S) -> Result { + serializer.serialize_str(&format!("0x{}", &hex::encode(self.0.as_slice()))) + } } /// Struct used for working with ethereum signatures created using eth_sign (using geth, ethers.js, etc) diff --git a/core/server/src/eth_watch.rs b/core/server/src/eth_watch.rs index cefe941e77..0a24b97eaf 100644 --- a/core/server/src/eth_watch.rs +++ b/core/server/src/eth_watch.rs @@ -15,7 +15,8 @@ use web3::{Transport, Web3}; // Workspace deps use models::abi::{eip1271_contract, governance_contract, zksync_contract}; use models::config_options::ConfigurationOptions; -use models::misc::constants::MAGICVALUE; +use models::misc::constants::EIP1271_SUCCESS_RETURN_VALUE; +use models::node::tx::EIP1271Signature; use models::node::{Nonce, PriorityOp, PubKeyHash, TokenId}; use models::params::PRIORITY_EXPIRATION; use models::TokenAddedEvent; @@ -39,7 +40,7 @@ pub enum EthWatchRequest { CheckEIP1271Signature { address: Address, data: Vec, - signature: Vec, + signature: EIP1271Signature, resp: oneshot::Sender, }, } @@ -281,13 +282,13 @@ impl EthWatch { &self, address: Address, data: Vec, - signature: Vec, + signature: EIP1271Signature, ) -> Result { let received: [u8; 4] = self .get_eip1271_contract(address) .query( "isValidSignature", - (data, signature), + (data, signature.0), None, Options::default(), None, @@ -296,7 +297,7 @@ impl EthWatch { .await .map_err(|e| format_err!("Failed to query contract isValidSignature: {}", e))?; - Ok(received == MAGICVALUE) + Ok(received == EIP1271_SUCCESS_RETURN_VALUE) } async fn is_new_pubkey_hash_authorized( From 17b5b66e829da6bb8a275613afa93feec9480cf7 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 27 Mar 2020 09:43:17 +0300 Subject: [PATCH 129/186] Implement Clone for CircuitAccount --- core/models/src/circuit/account.rs | 1 + core/models/src/merkle_tree/pedersen_hasher.rs | 13 ++++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/core/models/src/circuit/account.rs b/core/models/src/circuit/account.rs index 69a997a039..7093135156 100644 --- a/core/models/src/circuit/account.rs +++ b/core/models/src/circuit/account.rs @@ -11,6 +11,7 @@ use crate::primitives::{GetBits, GetBitsFixed}; pub type CircuitAccountTree = SparseMerkleTree, Fr, PedersenHasher>; pub type CircuitBalanceTree = SparseMerkleTree, Fr, PedersenHasher>; +#[derive(Clone)] pub struct CircuitAccount { pub subtree: SparseMerkleTree, E::Fr, PedersenHasher>, pub nonce: E::Fr, diff --git a/core/models/src/merkle_tree/pedersen_hasher.rs b/core/models/src/merkle_tree/pedersen_hasher.rs index 9fb3c07b11..b7ab673d99 100644 --- a/core/models/src/merkle_tree/pedersen_hasher.rs +++ b/core/models/src/merkle_tree/pedersen_hasher.rs @@ -9,11 +9,22 @@ use crate::franklin_crypto::bellman::pairing::bn256::Bn256; use super::hasher::Hasher; use crate::primitives::BitIteratorLe; -#[derive(Clone)] pub struct PedersenHasher { params: &'static E::Params, } +// We have to implement `Clone` manually, since deriving it will depend on +// the `Clone` implementation of `E::Params` (and will `.clone()` will not work +// if `E::Params` are not `Clone`), which is redundant: we only hold a reference +// and can just copy it. +impl Clone for PedersenHasher { + fn clone(&self) -> Self { + Self { + params: self.params, + } + } +} + impl Hasher for PedersenHasher { fn hash_bits>(&self, input: I) -> E::Fr { baby_pedersen_hash::(Personalization::NoteCommitment, input, &self.params) From a507ea63b5a46e75390d37536902511c5b05350c Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 27 Mar 2020 09:45:29 +0300 Subject: [PATCH 130/186] Add benches for sequential and parallel SMT --- core/models/benches/criterion/lib.rs | 8 +- core/models/benches/criterion/merkle_tree.rs | 49 ------ core/models/benches/criterion/parallel_smt.rs | 139 ++++++++++++++++++ .../benches/criterion/sequential_smt.rs | 106 +++++++++++++ 4 files changed, 250 insertions(+), 52 deletions(-) delete mode 100644 core/models/benches/criterion/merkle_tree.rs create mode 100644 core/models/benches/criterion/parallel_smt.rs create mode 100644 core/models/benches/criterion/sequential_smt.rs diff --git a/core/models/benches/criterion/lib.rs b/core/models/benches/criterion/lib.rs index e9de2edec9..54411eebc2 100644 --- a/core/models/benches/criterion/lib.rs +++ b/core/models/benches/criterion/lib.rs @@ -1,8 +1,10 @@ use criterion::{criterion_group, criterion_main}; -use crate::merkle_tree::bench_merkle_tree; +use crate::parallel_smt::bench_merkle_tree as parallel_smt_bench; +use crate::sequential_smt::bench_merkle_tree as sequential_smt_bench; -mod merkle_tree; +mod parallel_smt; +mod sequential_smt; -criterion_group!(benches, bench_merkle_tree); +criterion_group!(benches, sequential_smt_bench, parallel_smt_bench); criterion_main!(benches); diff --git a/core/models/benches/criterion/merkle_tree.rs b/core/models/benches/criterion/merkle_tree.rs deleted file mode 100644 index c6d42520e7..0000000000 --- a/core/models/benches/criterion/merkle_tree.rs +++ /dev/null @@ -1,49 +0,0 @@ -use criterion::{black_box, BatchSize, Bencher, Criterion}; - -use models::circuit::account::CircuitAccount; -use models::franklin_crypto::bellman::pairing::bn256::{Bn256, Fr}; -use models::merkle_tree::{PedersenHasher, SparseMerkleTree}; - -const N_ACCOUNTS: u64 = 10; - -type RealSMT = SparseMerkleTree, Fr, PedersenHasher>; - -fn gen_account(id: u64) -> CircuitAccount { - let mut account = CircuitAccount::::default(); - - let id_hex = format!("{:064x}", id); - account.address = Fr::from_hex(id_hex.as_ref()).unwrap(); - - account -} - -fn bench_tree_create(b: &mut Bencher<'_>) { - let depth = models::params::account_tree_depth() as u32; - - b.iter(|| { - RealSMT::new(black_box(depth)); - }); -} - -fn bench_tree_insert(b: &mut Bencher<'_>) { - let depth = models::params::account_tree_depth() as u32; - - let setup = || (0..N_ACCOUNTS).map(gen_account).collect::>(); - - b.iter_batched( - setup, - |accounts| { - let mut tree = RealSMT::new(depth); - - for (id, account) in accounts.into_iter().enumerate() { - tree.insert(id as u32, account); - } - }, - BatchSize::SmallInput, - ); -} - -pub fn bench_merkle_tree(c: &mut Criterion) { - c.bench_function("Merkle tree create", bench_tree_create); - c.bench_function("Merkle tree insert", bench_tree_insert); -} diff --git a/core/models/benches/criterion/parallel_smt.rs b/core/models/benches/criterion/parallel_smt.rs new file mode 100644 index 0000000000..f39668d3f9 --- /dev/null +++ b/core/models/benches/criterion/parallel_smt.rs @@ -0,0 +1,139 @@ +//! Benchmarks for the Parallel Sparse Merkle Tree. + +use criterion::{black_box, BatchSize, Bencher, Criterion}; + +use models::circuit::account::CircuitAccount; +use models::franklin_crypto::bellman::pairing::bn256::{Bn256, Fr}; +use models::merkle_tree::{parallel_smt::SparseMerkleTree, PedersenHasher}; + +// This value should be not to high, since the bench will be run for thousands +// of iterations. Despite the tree cloning time won't affect the bench results +// (cloning is performed within `setup` closure), the bench will take forever to +// be completed if the value is too big. +const N_ACCOUNTS: usize = 100; + +/// Type alias equivalent to the actually used SMT (but parallel tree is used instead of sequential). +type RealSMT = SparseMerkleTree, Fr, PedersenHasher>; + +fn gen_account(id: usize) -> CircuitAccount { + let mut account = CircuitAccount::::default(); + + let id_hex = format!("{:064x}", id); + account.address = Fr::from_hex(id_hex.as_ref()).unwrap(); + + account +} + +/// Measures the time of `RealSMT` creation time. +fn smt_create(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth(); + + b.iter(|| { + RealSMT::new(black_box(depth)); + }); +} + +/// Measures the time of insertion into an empty SMT. +fn smt_insert_empty(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth(); + + // Create an empty SMT and one account in setup. + let tree = RealSMT::new(depth); + let account = gen_account(0); + + let setup = || (tree.clone(), account.clone()); + + b.iter_batched( + setup, + |(mut tree, account)| { + let id = 0; + tree.insert(black_box(id), account); + }, + BatchSize::SmallInput, + ); +} + +/// Measures the time of insertion into a non-empty SMT. +fn smt_insert_filled(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth(); + let accounts: Vec<_> = (0..N_ACCOUNTS).map(gen_account).collect(); + + // Create a tree and fill it with some accounts. + let mut tree = RealSMT::new(depth); + for (id, account) in accounts.into_iter().enumerate() { + tree.insert(id, account.clone()) + } + let latest_account = gen_account(N_ACCOUNTS); + + let setup = || (tree.clone(), latest_account.clone()); + + b.iter_batched( + setup, + |(mut tree, account)| { + let id = N_ACCOUNTS; + tree.insert(black_box(id), account); + }, + BatchSize::SmallInput, + ); +} + +/// Measures the time of obtaining a SMT root hash. +fn smt_root_hash(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth(); + let accounts: Vec<_> = (0..N_ACCOUNTS).map(gen_account).collect(); + + // Create a tree and fill it with some accounts. + let mut tree = RealSMT::new(depth); + for (id, account) in accounts.into_iter().enumerate() { + tree.insert(id, account.clone()); + } + + let setup = || (tree.clone()); + + b.iter_batched( + setup, + |mut tree| { + let _hash = black_box(tree.root_hash()); + }, + BatchSize::SmallInput, + ); +} + +/// Measures the time of obtaining a SMT root hash with `root_hash` invoked +/// when 50% of accounts are inserted. +/// +/// This bench is expected to get better results than `smt_root_hash` due +/// to some hashes being cached. +fn smt_root_hash_cached(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth(); + let accounts: Vec<_> = (0..N_ACCOUNTS).map(gen_account).collect(); + + // Create a tree and fill it with some accounts. + let mut tree = RealSMT::new(depth); + for (id, account) in accounts.into_iter().enumerate() { + tree.insert(id, account.clone()); + + if id == N_ACCOUNTS / 2 { + // Calculate the root hash to create cache. + let _ = tree.root_hash(); + } + } + + let setup = || (tree.clone()); + + b.iter_batched( + setup, + |mut tree| { + let _hash = black_box(tree.root_hash()); + }, + BatchSize::SmallInput, + ); +} + +pub fn bench_merkle_tree(c: &mut Criterion) { + c.bench_function("Parallel SMT create", smt_create); + c.bench_function("Parallel SMT insert (empty)", smt_insert_empty); + c.bench_function("Parallel SMT insert (filled)", smt_insert_filled); + c.bench_function("Parallel SMT root hash", smt_root_hash); + c.bench_function("Parallel SMT root hash (cached)", smt_root_hash_cached); +} diff --git a/core/models/benches/criterion/sequential_smt.rs b/core/models/benches/criterion/sequential_smt.rs new file mode 100644 index 0000000000..acecb588c7 --- /dev/null +++ b/core/models/benches/criterion/sequential_smt.rs @@ -0,0 +1,106 @@ +//! Benchmarks for the Sequential Sparse Merkle Tree. + +use criterion::{black_box, BatchSize, Bencher, Criterion}; + +use models::circuit::account::{CircuitAccount, CircuitAccountTree}; +use models::franklin_crypto::bellman::pairing::bn256::{Bn256, Fr}; + +// This value should be not to high, since the bench will be run for thousands +// of iterations. Despite the tree cloning time won't affect the bench results +// (cloning is performed within `setup` closure), the bench will take forever to +// be completed if the value is too big. +const N_ACCOUNTS: u32 = 100; + +/// Type alias equivalent to the actually used SMT. +type RealSMT = CircuitAccountTree; + +fn gen_account(id: u32) -> CircuitAccount { + let mut account = CircuitAccount::::default(); + + let id_hex = format!("{:064x}", id); + account.address = Fr::from_hex(id_hex.as_ref()).unwrap(); + + account +} + +/// Measures the time of `RealSMT` creation time. +fn smt_create(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth() as u32; + + b.iter(|| { + RealSMT::new(black_box(depth)); + }); +} + +/// Measures the time of insertion into an empty SMT. +fn smt_insert_empty(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth() as u32; + + // Create an empty SMT and one account in setup. + let tree = RealSMT::new(depth); + let account = gen_account(0); + + let setup = || (tree.clone(), account.clone()); + + b.iter_batched( + setup, + |(mut tree, account)| { + let id = 0; + tree.insert(black_box(id), account); + }, + BatchSize::SmallInput, + ); +} + +/// Measures the time of insertion into a non-empty SMT. +fn smt_insert_filled(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth() as u32; + let accounts: Vec<_> = (0..N_ACCOUNTS).map(gen_account).collect(); + + // Create a tree and fill it with some accounts. + let mut tree = RealSMT::new(depth); + for (id, account) in accounts.into_iter().enumerate() { + tree.insert(id as u32, account.clone()) + } + let latest_account = gen_account(N_ACCOUNTS); + + let setup = || (tree.clone(), latest_account.clone()); + + b.iter_batched( + setup, + |(mut tree, account)| { + let id = N_ACCOUNTS; + tree.insert(black_box(id), account); + }, + BatchSize::SmallInput, + ); +} + +/// Measures the time of obtaining a SMT root hash. +fn smt_root_hash(b: &mut Bencher<'_>) { + let depth = models::params::account_tree_depth() as u32; + let accounts: Vec<_> = (0..N_ACCOUNTS).map(gen_account).collect(); + + // Create a tree and fill it with some accounts. + let mut tree = RealSMT::new(depth); + for (id, account) in accounts.into_iter().enumerate() { + tree.insert(id as u32, account.clone()) + } + + let setup = || (tree.clone()); + + b.iter_batched( + setup, + |tree| { + let _hash = black_box(tree.root_hash()); + }, + BatchSize::SmallInput, + ); +} + +pub fn bench_merkle_tree(c: &mut Criterion) { + c.bench_function("Sequential SMT create", smt_create); + c.bench_function("Sequential SMT insert (empty)", smt_insert_empty); + c.bench_function("Sequential SMT insert (filled)", smt_insert_filled); + c.bench_function("Sequential SMT root hash", smt_root_hash); +} From 366972f51bf5cbac38068c7bc5460484f256d3be Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Fri, 27 Mar 2020 09:52:56 +0300 Subject: [PATCH 131/186] Add hierarchy to the benches --- core/models/benches/criterion/lib.rs | 11 ++++------- core/models/benches/criterion/merkle_tree/mod.rs | 14 ++++++++++++++ .../criterion/{ => merkle_tree}/parallel_smt.rs | 0 .../criterion/merkle_tree/pedersen_hasher.rs | 0 .../criterion/{ => merkle_tree}/sequential_smt.rs | 0 5 files changed, 18 insertions(+), 7 deletions(-) create mode 100644 core/models/benches/criterion/merkle_tree/mod.rs rename core/models/benches/criterion/{ => merkle_tree}/parallel_smt.rs (100%) create mode 100644 core/models/benches/criterion/merkle_tree/pedersen_hasher.rs rename core/models/benches/criterion/{ => merkle_tree}/sequential_smt.rs (100%) diff --git a/core/models/benches/criterion/lib.rs b/core/models/benches/criterion/lib.rs index 54411eebc2..7d336d13b3 100644 --- a/core/models/benches/criterion/lib.rs +++ b/core/models/benches/criterion/lib.rs @@ -1,10 +1,7 @@ -use criterion::{criterion_group, criterion_main}; +use criterion::criterion_main; -use crate::parallel_smt::bench_merkle_tree as parallel_smt_bench; -use crate::sequential_smt::bench_merkle_tree as sequential_smt_bench; +use merkle_tree::merkle_tree_benches; -mod parallel_smt; -mod sequential_smt; +mod merkle_tree; -criterion_group!(benches, sequential_smt_bench, parallel_smt_bench); -criterion_main!(benches); +criterion_main!(merkle_tree_benches); diff --git a/core/models/benches/criterion/merkle_tree/mod.rs b/core/models/benches/criterion/merkle_tree/mod.rs new file mode 100644 index 0000000000..4dbfc9d75f --- /dev/null +++ b/core/models/benches/criterion/merkle_tree/mod.rs @@ -0,0 +1,14 @@ +use criterion::criterion_group; + +use self::parallel_smt::bench_merkle_tree as parallel_smt_bench; +use self::sequential_smt::bench_merkle_tree as sequential_smt_bench; + +mod parallel_smt; +mod pedersen_hasher; +mod sequential_smt; + +criterion_group!( + merkle_tree_benches, + sequential_smt_bench, + parallel_smt_bench +); diff --git a/core/models/benches/criterion/parallel_smt.rs b/core/models/benches/criterion/merkle_tree/parallel_smt.rs similarity index 100% rename from core/models/benches/criterion/parallel_smt.rs rename to core/models/benches/criterion/merkle_tree/parallel_smt.rs diff --git a/core/models/benches/criterion/merkle_tree/pedersen_hasher.rs b/core/models/benches/criterion/merkle_tree/pedersen_hasher.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/core/models/benches/criterion/sequential_smt.rs b/core/models/benches/criterion/merkle_tree/sequential_smt.rs similarity index 100% rename from core/models/benches/criterion/sequential_smt.rs rename to core/models/benches/criterion/merkle_tree/sequential_smt.rs From 4367fad82b1ed43b76022175e9c4c66469003bba Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 27 Mar 2020 11:12:59 +0200 Subject: [PATCH 132/186] Added ABIEncoderV2 --- contracts/contracts/UpgradeGatekeeper.sol | 20 ++++++------------- contracts/scripts/test-upgrade-franklin.ts | 2 +- .../test/unit_tests/upgradeGatekeeper_test.ts | 4 ++-- 3 files changed, 9 insertions(+), 17 deletions(-) diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 2fa5a246a6..47eeee2856 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -1,8 +1,8 @@ pragma solidity 0.5.16; +pragma experimental ABIEncoderV2; import "./Events.sol"; import "./Ownable.sol"; -import "./Bytes.sol"; /// @title Interface of the main contract @@ -129,30 +129,22 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { } /// @notice Finishes upgrade - /// @param initParametersConcatenated New targets initialization parameters per each proxy (concatenated into one array) - /// @param sizeOfInitParameters Sizes of targets initialization parameters (in bytes) - function finishUpgrade(bytes calldata initParametersConcatenated, uint[] calldata sizeOfInitParameters) external { + /// @param targetsInitializationParameters New targets initialization parameters per each proxy + function finishUpgrade(bytes[] calldata targetsInitializationParameters) external { requireMaster(msg.sender); require(upgradeStatus == UpgradeStatus.Preparation, "fpu11"); // fpu11 - unable to finish upgrade without preparation status active - require(sizeOfInitParameters.length == proxies.length, "fpu12"); // fpu12 - number of new targets initialization parameters must be equal to the number of proxies + require(targetsInitializationParameters.length == proxies.length, "fpu12"); // fpu12 - number of new targets initialization parameters must be equal to the number of proxies require(mainContract.readyForUpgrade(), "fpu13"); // fpu13 - main contract is not ready for upgrade mainContract.upgradeFinishes(); - bytes memory initParametersConcatenated = initParametersConcatenated; - uint processedBytes = 0; for (uint64 i = 0; i < proxies.length; i++) { address proxy = proxies[i]; address nextTarget = nextTargets[i]; - if (nextTarget == address(0)) { - require(sizeOfInitParameters[i] == 0, "fpu14"); // fpu14 - there must be no init parameters bytes for proxy that wouldn't be upgraded - } else { - bytes memory targetInitParameters; - (processedBytes, targetInitParameters) = Bytes.read(initParametersConcatenated, processedBytes, sizeOfInitParameters[i]); - UpgradeableProxy(proxy).upgradeTarget(nextTarget, targetInitParameters); + if (nextTarget != address(0)) { + UpgradeableProxy(proxy).upgradeTarget(nextTarget, targetsInitializationParameters[i]); emit UpgradeCompleted(proxy, nextTarget); } } - require(processedBytes == initParametersConcatenated.length, "fpu15"); // fpu15 - all targets initialization parameters bytes must be processed upgradeStatus = UpgradeStatus.Idle; noticePeriodActivationTime = 0; diff --git a/contracts/scripts/test-upgrade-franklin.ts b/contracts/scripts/test-upgrade-franklin.ts index 0a2c8b2b71..a0d78636a5 100644 --- a/contracts/scripts/test-upgrade-franklin.ts +++ b/contracts/scripts/test-upgrade-franklin.ts @@ -59,7 +59,7 @@ async function main() { // finish upgrade await (await upgradeGatekeeper.startPreparation()).wait(); - await (await upgradeGatekeeper.finishUpgrade([], [0, 0, 0])).wait(); + await (await upgradeGatekeeper.finishUpgrade([[], [], []])).wait(); await expect(await proxyContract.getTarget()) .to.equal(newTargetFranklin.address); diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index ca2c518f96..c3c404e912 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -96,10 +96,10 @@ describe("UpgradeGatekeeper unit tests", function () { expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishUpgrade([], []) )).revertReason).equal("fpu12") // finish upgrade without verifying priority operations - expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishUpgrade([bytes[2], bytes[3]], [2]) )).revertReason).equal("fpu13") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract.finishUpgrade([[bytes[2], bytes[3]]]) )).revertReason).equal("fpu13") // finish upgrade await proxyDummyInterface.verifyPriorityOperation(); - await expect(UpgradeGatekeeperContract.finishUpgrade([bytes[2], bytes[3]], [2])) + await expect(UpgradeGatekeeperContract.finishUpgrade([[bytes[2], bytes[3]]])) .to.emit(UpgradeGatekeeperContract, 'UpgradeCompleted') .withArgs(proxyTestContract.address, DummySecond.address) From c843143f4ad3a24239101f559de07f03953a757a Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Fri, 27 Mar 2020 13:49:45 +0200 Subject: [PATCH 133/186] v0.0.2 --- js/zksync-crypto/package.json | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/js/zksync-crypto/package.json b/js/zksync-crypto/package.json index a2699b6ab3..2ad3e955ea 100644 --- a/js/zksync-crypto/package.json +++ b/js/zksync-crypto/package.json @@ -1,8 +1,13 @@ { "name": "zksync-crypto", - "version": "0.0.0", + "version": "0.0.2", "browser": "dist/web.js", "main": "dist/node.js", + "files": [ + "dist/*.ts", + "dist/*.wasm", + "dist/*.js" + ], "scripts": { "build": "./build.sh", "test": "echo \"Error: no test specified\" && exit 1" From 01254d9041ce2e80909e8b026780564e0fc48a36 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Fri, 27 Mar 2020 17:58:13 +0200 Subject: [PATCH 134/186] Fix wasm in nginx, use published zksync-crypto --- docker/nginx/nginx.conf | 8 ++++++++ js/client/yarn.lock | 6 ++++-- js/zksync.js/package.json | 2 +- js/zksync.js/yarn.lock | 7 ++++--- 4 files changed, 17 insertions(+), 6 deletions(-) diff --git a/docker/nginx/nginx.conf b/docker/nginx/nginx.conf index 01d23bcc11..5cb64c90bf 100644 --- a/docker/nginx/nginx.conf +++ b/docker/nginx/nginx.conf @@ -20,12 +20,20 @@ http { location ~ ^/explorer { include /etc/nginx/mime.types; + types { + application/wasm wasm; + } + root /usr/share/nginx/html/; # the directory (/admin) will be appended to this, so don't include it in the root otherwise it'll look for /var/www/html/www_new/admin/admin try_files $uri /explorer/index.html; # try_files will need to be relative to root } location ~ ^/client { include /etc/nginx/mime.types; + types { + application/wasm wasm; + } + root /usr/share/nginx/html/; # the directory (/admin) will be appended to this, so don't include it in the root otherwise it'll look for /var/www/html/www_new/admin/admin try_files $uri /client/index.html; # try_files will need to be relative to root } diff --git a/js/client/yarn.lock b/js/client/yarn.lock index 60fb70ffca..4778f5609f 100644 --- a/js/client/yarn.lock +++ b/js/client/yarn.lock @@ -9672,8 +9672,10 @@ yorkie@^2.0.0: normalize-path "^1.0.0" strip-indent "^2.0.0" -zksync-crypto@../zksync-crypto: - version "0.0.0" +zksync-crypto@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/zksync-crypto/-/zksync-crypto-0.0.2.tgz#a39925395b8a433e13b2742b8acea874d5e44656" + integrity sha512-dT6KnrNj8MtbhBRk41Xsr8S2DwSOgfWD0Ym8YmWi+gTHKF3l8/4dzCxRj4iTI8AClnhUv//FHnb6g2/ZeaaVGQ== "zksync@link:../zksync.js": version "0.0.0" diff --git a/js/zksync.js/package.json b/js/zksync.js/package.json index 28d70ff925..0fccb2aef2 100644 --- a/js/zksync.js/package.json +++ b/js/zksync.js/package.json @@ -13,7 +13,7 @@ "js-sha256": "^0.9.0", "websocket": "^1.0.30", "websocket-as-promised": "^0.10.1", - "zksync-crypto": "link:../zksync-crypto" + "zksync-crypto": "^0.0.2" }, "peerDependencies": { "ethers": "^4.0.33" diff --git a/js/zksync.js/yarn.lock b/js/zksync.js/yarn.lock index 10c5b6bf1a..5c7b4489ad 100644 --- a/js/zksync.js/yarn.lock +++ b/js/zksync.js/yarn.lock @@ -1331,6 +1331,7 @@ yn@^3.0.0: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.0.tgz#fcbe2db63610361afcc5eb9e0ac91e976d046114" integrity sha512-kKfnnYkbTfrAdd0xICNFw7Atm8nKpLcLv9AZGEt+kczL/WQVai4e2V6ZN8U/O+iI6WrNuJjNNOyu4zfhl9D3Hg== -"zksync-crypto@link:../zksync-crypto": - version "0.0.0" - uid "" +zksync-crypto@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/zksync-crypto/-/zksync-crypto-0.0.2.tgz#a39925395b8a433e13b2742b8acea874d5e44656" + integrity sha512-dT6KnrNj8MtbhBRk41Xsr8S2DwSOgfWD0Ym8YmWi+gTHKF3l8/4dzCxRj4iTI8AClnhUv//FHnb6g2/ZeaaVGQ== From 4bfcbae82eeb1474ce40d0de3daf3eeb7cb5e9e4 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 27 Mar 2020 19:21:03 +0200 Subject: [PATCH 135/186] Added intrfaces Upgradeable and UpgradeableMaster --- contracts/contracts/Events.sol | 10 ++- contracts/contracts/Franklin.sol | 12 ++- contracts/contracts/Proxy.sol | 73 +++++++++------ contracts/contracts/UpgradeGatekeeper.sol | 90 ++++++------------- contracts/contracts/Upgradeable.sol | 38 ++++++++ contracts/contracts/test/DummyTarget.sol | 55 ++++++++---- contracts/scripts/test-upgrade-franklin.ts | 8 +- contracts/src.ts/deploy.ts | 6 +- .../test/unit_tests/upgradeGatekeeper_test.ts | 10 +-- 9 files changed, 180 insertions(+), 122 deletions(-) create mode 100644 contracts/contracts/Upgradeable.sol diff --git a/contracts/contracts/Events.sol b/contracts/contracts/Events.sol index 0303a69047..d123a4debb 100644 --- a/contracts/contracts/Events.sol +++ b/contracts/contracts/Events.sol @@ -1,5 +1,7 @@ pragma solidity 0.5.16; +import "./Upgradeable.sol"; + /// @title zkSync events /// @author Matter Labs @@ -57,9 +59,9 @@ contract Events { /// @author Matter Labs contract UpgradeEvents { - /// @notice Event emitted when new proxy is added to upgrade gatekeeper's list of managed contracts - event ProxyAdded( - address proxyAddress + /// @notice Event emitted when new upgradeable contract is added to upgrade gatekeeper's list of managed contracts + event UpgradeableAdded( + Upgradeable upgradeable ); /// @notice Upgrade mode enter event @@ -75,7 +77,7 @@ contract UpgradeEvents { /// @notice Upgrade mode complete event event UpgradeCompleted( - address proxyAddress, + Upgradeable upgradeable, address newTargetAddress ); diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index 3c15862c58..4a37d22b6d 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -2,6 +2,7 @@ pragma solidity 0.5.16; import "../node_modules/openzeppelin-solidity/contracts/token/ERC20/IERC20.sol"; +import "./Upgradeable.sol"; import "./Storage.sol"; import "./Config.sol"; import "./Events.sol"; @@ -12,15 +13,20 @@ import "./Operations.sol"; /// @title zkSync main contract /// @author Matter Labs -contract Franklin is Storage, Config, Events { +contract Franklin is UpgradeableMaster, Storage, Config, Events { // Upgrade functional /// @notice Notice period before activation preparation status of upgrade mode - function upgradeNoticePeriod() external pure returns (uint) { + function upgradeNoticePeriod() external returns (uint) { return UPGRADE_NOTICE_PERIOD; } + /// @notice Notification that upgrade notice period started + function upgradeNoticePeriodStarted() external { + + } + /// @notice Notification that upgrade preparation status is activated function upgradePreparationStarted() external { upgradePreparation = true; @@ -41,7 +47,7 @@ contract Franklin is Storage, Config, Events { /// @notice Checks that contract is ready for upgrade /// @return bool flag indicating that contract is ready for upgrade - function readyForUpgrade() external view returns (bool) { + function readyForUpgrade() external returns (bool) { return !exodusMode && totalOpenPriorityRequests == 0; } diff --git a/contracts/contracts/Proxy.sol b/contracts/contracts/Proxy.sol index 7660c6dbad..15eb88f45a 100644 --- a/contracts/contracts/Proxy.sol +++ b/contracts/contracts/Proxy.sol @@ -1,11 +1,13 @@ pragma solidity 0.5.16; import "./Ownable.sol"; +import "./Upgradeable.sol"; /// @title Proxy Contract +/// @dev NOTICE: Proxy must implement UpgradeableMaster to prevent calling some function of it not by master of proxy /// @author Matter Labs -contract Proxy is Ownable { +contract Proxy is Upgradeable, UpgradeableMaster, Ownable { /// @notice Storage position of "target" (actual implementation address) bytes32 private constant targetPosition = keccak256("target"); @@ -58,30 +60,6 @@ contract Proxy is Ownable { require(initializationSuccess, "ufu11"); // ufu11 - target initialization failed } - /// @notice Notifies proxy contract that notice period started - function upgradeNoticePeriodStarted() external { - requireMaster(msg.sender); - getTarget().delegatecall(abi.encodeWithSignature("upgradeNoticePeriodStarted()")); - } - - /// @notice Notifies proxy contract that upgrade preparation status is activated - function upgradePreparationStarted() external { - requireMaster(msg.sender); - getTarget().delegatecall(abi.encodeWithSignature("upgradePreparationStarted()")); - } - - /// @notice Notifies proxy contract that upgrade canceled - function upgradeCanceled() external { - requireMaster(msg.sender); - getTarget().delegatecall(abi.encodeWithSignature("upgradeCanceled()")); - } - - /// @notice Notifies proxy contract that upgrade finishes - function upgradeFinishes() external { - requireMaster(msg.sender); - getTarget().delegatecall(abi.encodeWithSignature("upgradeFinishes()")); - } - /// @notice Performs a delegatecall to the contract implementation /// @dev Fallback function allowing to perform a delegatecall to the given implementation /// This function will return whatever the implementation call returns @@ -120,4 +98,49 @@ contract Proxy is Ownable { } } + /// UpgradeableMaster functions + + /// @notice Notice period before activation preparation status of upgrade mode + function upgradeNoticePeriod() external returns (uint) { + (bool success, bytes memory result) = getTarget().delegatecall(abi.encodeWithSignature("upgradeNoticePeriod()")); + require(success, "unp11"); // unp11 - upgradeNoticePeriod delegatecall failed + return abi.decode(result, (uint)); + } + + /// @notice Notifies proxy contract that notice period started + function upgradeNoticePeriodStarted() external { + requireMaster(msg.sender); + (bool success, ) = getTarget().delegatecall(abi.encodeWithSignature("upgradeNoticePeriodStarted()")); + require(success, "nps11"); // nps11 - upgradeNoticePeriodStarted delegatecall failed + } + + /// @notice Notifies proxy contract that upgrade preparation status is activated + function upgradePreparationStarted() external { + requireMaster(msg.sender); + (bool success, ) = getTarget().delegatecall(abi.encodeWithSignature("upgradePreparationStarted()")); + require(success, "ups11"); // ups11 - upgradePreparationStarted delegatecall failed + } + + /// @notice Notifies proxy contract that upgrade canceled + function upgradeCanceled() external { + requireMaster(msg.sender); + (bool success, ) = getTarget().delegatecall(abi.encodeWithSignature("upgradeCanceled()")); + require(success, "puc11"); // puc11 - upgradeCanceled delegatecall failed + } + + /// @notice Notifies proxy contract that upgrade finishes + function upgradeFinishes() external { + requireMaster(msg.sender); + (bool success, ) = getTarget().delegatecall(abi.encodeWithSignature("upgradeFinishes()")); + require(success, "puf11"); // puf11 - upgradeFinishes delegatecall failed + } + + /// @notice Checks that contract is ready for upgrade + /// @return bool flag indicating that contract is ready for upgrade + function readyForUpgrade() external returns (bool) { + (bool success, bytes memory result) = getTarget().delegatecall(abi.encodeWithSignature("readyForUpgrade()")); + require(success, "rfu11"); // rfu11 - readyForUpgrade delegatecall failed + return abi.decode(result, (bool)); + } + } diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index 47eeee2856..c3a507a1b3 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -3,48 +3,15 @@ pragma experimental ABIEncoderV2; import "./Events.sol"; import "./Ownable.sol"; +import "./Upgradeable.sol"; -/// @title Interface of the main contract -interface MainContract { - - /// @notice Notice period before activation preparation status of upgrade mode - function upgradeNoticePeriod() external pure returns (uint); - - /// @notice Notifies proxy contract that notice period started - function upgradeNoticePeriodStarted() external; - - /// @notice Notifies proxy contract that upgrade preparation status is activated - function upgradePreparationStarted() external; - - /// @notice Notifies proxy contract that upgrade canceled - function upgradeCanceled() external; - - /// @notice Notifies proxy contract that upgrade finishes - function upgradeFinishes() external; - - /// @notice Checks that contract is ready for upgrade - /// @return bool flag indicating that contract is ready for upgrade - function readyForUpgrade() external view returns (bool); - -} - -/// @title Interface of the proxy contract -interface UpgradeableProxy { - - /// @notice Upgrades target of upgradeable contract - /// @param newTarget New target - /// @param newTargetInitializationParameters New target initialization parameters - function upgradeTarget(address newTarget, bytes calldata newTargetInitializationParameters) external; - -} - /// @title Upgrade Gatekeeper Contract /// @author Matter Labs contract UpgradeGatekeeper is UpgradeEvents, Ownable { - /// @notice Array of addresses of proxy contracts managed by the gatekeeper - address[] public proxies; + /// @notice Array of addresses of upgradeable contracts managed by the gatekeeper + Upgradeable[] public managedContracts; /// @notice Upgrade mode statuses enum UpgradeStatus { @@ -53,42 +20,42 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { Preparation } - UpgradeStatus upgradeStatus; + UpgradeStatus public upgradeStatus; /// @notice Notice period activation timestamp (as seconds since unix epoch) /// @dev Will be equal to zero in case of not active upgrade mode - uint noticePeriodActivationTime; + uint public noticePeriodActivationTime; - /// @notice Addresses of the next versions of the contracts to be upgraded (if element of this array is equal to zero address it means that this proxy will not be upgraded) + /// @notice Addresses of the next versions of the contracts to be upgraded (if element of this array is equal to zero address it means that appropriate upgradeable contract wouldn't be upgraded this time) /// @dev Will be empty in case of not active upgrade mode - address[] nextTargets; + address[] public nextTargets; - /// @notice Contract which allows finish upgrade during preparation status of upgrade - MainContract mainContract; + /// @notice Contract which defines notice period duration and allows finish upgrade during preparation of it + UpgradeableMaster public mainContract; /// @notice Contract constructor - /// @param _mainContractAddress Address of contract which processes priority operations - /// @dev Calls Ownable contract constructor and adds _mainContractAddress to the list of contracts managed by the gatekeeper - constructor(address _mainContractAddress) Ownable(msg.sender) public { - mainContract = MainContract(_mainContractAddress); + /// @param _upgradeableMasterAddress Address of contract which defines notice period duration and allows finish upgrade during preparation of it + /// @dev Calls Ownable contract constructor + constructor(address _upgradeableMasterAddress) Ownable(msg.sender) public { + mainContract = UpgradeableMaster(_upgradeableMasterAddress); } - /// @notice Adds a new proxy to the list of contracts managed by the gatekeeper - /// @param proxy Address of proxy to add - function addProxyContract(address proxy) external { + /// @notice Adds a new upgradeable contract to the list of contracts managed by the gatekeeper + /// @param addr Address of upgradeable contract to add + function addUpgradeable(address addr) external { requireMaster(msg.sender); - require(upgradeStatus == UpgradeStatus.Idle, "apc11"); /// apc11 - proxy can't be added during upgrade + require(upgradeStatus == UpgradeStatus.Idle, "apc11"); /// apc11 - upgradeable contract can't be added during upgrade - proxies.push(proxy); - emit ProxyAdded(proxy); + managedContracts.push(Upgradeable(addr)); + emit UpgradeableAdded(Upgradeable(addr)); } /// @notice Starts upgrade (activates notice period) - /// @param newTargets New proxies targets (if element of this array is equal to zero address it means that this proxy will not be upgraded) + /// @param newTargets New managed contracts targets (if element of this array is equal to zero address it means that appropriate upgradeable contract wouldn't be upgraded this time) function startUpgrade(address[] calldata newTargets) external { requireMaster(msg.sender); require(upgradeStatus == UpgradeStatus.Idle, "spu11"); // spu11 - unable to activate active upgrade mode - require(newTargets.length == proxies.length, "spu12"); // spu12 - number of new targets must be equal to the number of proxies + require(newTargets.length == managedContracts.length, "spu12"); // spu12 - number of new targets must be equal to the number of managed contracts mainContract.upgradeNoticePeriodStarted(); upgradeStatus = UpgradeStatus.NoticePeriod; @@ -129,20 +96,19 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { } /// @notice Finishes upgrade - /// @param targetsInitializationParameters New targets initialization parameters per each proxy + /// @param targetsInitializationParameters New targets initialization parameters per each upgradeable contract function finishUpgrade(bytes[] calldata targetsInitializationParameters) external { requireMaster(msg.sender); require(upgradeStatus == UpgradeStatus.Preparation, "fpu11"); // fpu11 - unable to finish upgrade without preparation status active - require(targetsInitializationParameters.length == proxies.length, "fpu12"); // fpu12 - number of new targets initialization parameters must be equal to the number of proxies + require(targetsInitializationParameters.length == managedContracts.length, "fpu12"); // fpu12 - number of new targets initialization parameters must be equal to the number of managed contracts require(mainContract.readyForUpgrade(), "fpu13"); // fpu13 - main contract is not ready for upgrade mainContract.upgradeFinishes(); - for (uint64 i = 0; i < proxies.length; i++) { - address proxy = proxies[i]; - address nextTarget = nextTargets[i]; - if (nextTarget != address(0)) { - UpgradeableProxy(proxy).upgradeTarget(nextTarget, targetsInitializationParameters[i]); - emit UpgradeCompleted(proxy, nextTarget); + for (uint64 i = 0; i < managedContracts.length; i++) { + address newTarget = nextTargets[i]; + if (newTarget != address(0)) { + managedContracts[i].upgradeTarget(newTarget, targetsInitializationParameters[i]); + emit UpgradeCompleted(managedContracts[i], newTarget); } } diff --git a/contracts/contracts/Upgradeable.sol b/contracts/contracts/Upgradeable.sol new file mode 100644 index 0000000000..82f1f0e38d --- /dev/null +++ b/contracts/contracts/Upgradeable.sol @@ -0,0 +1,38 @@ +pragma solidity 0.5.16; + + +/// @title Interface of the upgradeable master contract (defines notice period duration and allows finish upgrade during preparation of it) +/// @author Matter Labs +interface UpgradeableMaster { + + /// @notice Notice period before activation preparation status of upgrade mode + function upgradeNoticePeriod() external returns (uint); + + /// @notice Notifies contract that notice period started + function upgradeNoticePeriodStarted() external; + + /// @notice Notifies contract that upgrade preparation status is activated + function upgradePreparationStarted() external; + + /// @notice Notifies contract that upgrade canceled + function upgradeCanceled() external; + + /// @notice Notifies contract that upgrade finishes + function upgradeFinishes() external; + + /// @notice Checks that contract is ready for upgrade + /// @return bool flag indicating that contract is ready for upgrade + function readyForUpgrade() external returns (bool); + +} + +/// @title Interface of the upgradeable contract +/// @author Matter Labs +interface Upgradeable { + + /// @notice Upgrades target of upgradeable contract + /// @param newTarget New target + /// @param newTargetInitializationParameters New target initialization parameters + function upgradeTarget(address newTarget, bytes calldata newTargetInitializationParameters) external; + +} \ No newline at end of file diff --git a/contracts/contracts/test/DummyTarget.sol b/contracts/contracts/test/DummyTarget.sol index dcd5d431e4..c2763de0c6 100644 --- a/contracts/contracts/test/DummyTarget.sol +++ b/contracts/contracts/test/DummyTarget.sol @@ -1,8 +1,9 @@ pragma solidity 0.5.16; -interface DummyTarget { +import "../Upgradeable.sol"; + - function upgradeNoticePeriod() external pure returns (uint); +interface DummyTarget { function get_DUMMY_INDEX() external pure returns (uint256); @@ -10,17 +11,31 @@ interface DummyTarget { function verifyPriorityOperation() external; - function readyForUpgrade() external returns (bool); - } -contract DummyFirst is DummyTarget { +contract DummyFirst is UpgradeableMaster, DummyTarget { uint constant UPGRADE_NOTICE_PERIOD = 4; - function upgradeNoticePeriod() external pure returns (uint) { + function get_UPGRADE_NOTICE_PERIOD() external pure returns (uint) { return UPGRADE_NOTICE_PERIOD; } + function upgradeNoticePeriod() external returns (uint) { + return UPGRADE_NOTICE_PERIOD; + } + + function upgradeNoticePeriodStarted() external {} + + function upgradePreparationStarted() external {} + + function upgradeCanceled() external {} + + function upgradeFinishes() external {} + + function readyForUpgrade() external returns (bool) { + return totalVerifiedPriorityOperations() >= totalRegisteredPriorityOperations(); + } + uint256 private constant DUMMY_INDEX = 1; function get_DUMMY_INDEX() external pure returns (uint256) { return DUMMY_INDEX; @@ -49,19 +64,31 @@ contract DummyFirst is DummyTarget { _verifiedPriorityOperations++; } - function readyForUpgrade() external returns (bool) { - return totalVerifiedPriorityOperations() >= totalRegisteredPriorityOperations(); - } - } -contract DummySecond is DummyTarget { +contract DummySecond is UpgradeableMaster, DummyTarget { uint constant UPGRADE_NOTICE_PERIOD = 4; - function upgradeNoticePeriod() external pure returns (uint) { + function get_UPGRADE_NOTICE_PERIOD() external pure returns (uint) { + return UPGRADE_NOTICE_PERIOD; + } + + function upgradeNoticePeriod() external returns (uint) { return UPGRADE_NOTICE_PERIOD; } + function upgradeNoticePeriodStarted() external {} + + function upgradePreparationStarted() external {} + + function upgradeCanceled() external {} + + function upgradeFinishes() external {} + + function readyForUpgrade() external returns (bool) { + return totalVerifiedPriorityOperations() >= totalRegisteredPriorityOperations(); + } + uint256 private constant DUMMY_INDEX = 2; function get_DUMMY_INDEX() external pure returns (uint256) { return DUMMY_INDEX; @@ -90,8 +117,4 @@ contract DummySecond is DummyTarget { _verifiedPriorityOperations++; } - function readyForUpgrade() external returns (bool) { - return totalVerifiedPriorityOperations() >= totalRegisteredPriorityOperations(); - } - } diff --git a/contracts/scripts/test-upgrade-franklin.ts b/contracts/scripts/test-upgrade-franklin.ts index a0d78636a5..ebdb829ced 100644 --- a/contracts/scripts/test-upgrade-franklin.ts +++ b/contracts/scripts/test-upgrade-franklin.ts @@ -50,15 +50,15 @@ async function main() { {gasLimit: 6500000}, ); - let notice_period = parseInt(await newTargetFranklin.upgradeNoticePeriod()); // in tests notice period of FranklinTestNoInit will be equal to FranklinTest - await (await upgradeGatekeeper.startUpgrade([AddressZero, AddressZero, newTargetFranklin.address])).wait(); // wait notice period - await new Promise(r => setTimeout(r, notice_period * 1000 + 10)); + while (parseInt(await upgradeGatekeeper.upgradeStatus()) !== 2/*Preparation*/) { + await new Promise(r => setTimeout(r, 1000)); + await (await upgradeGatekeeper.startPreparation()).wait(); + } // finish upgrade - await (await upgradeGatekeeper.startPreparation()).wait(); await (await upgradeGatekeeper.finishUpgrade([[], [], []])).wait(); await expect(await proxyContract.getTarget()) diff --git a/contracts/src.ts/deploy.ts b/contracts/src.ts/deploy.ts index 117b60ae9b..255789f430 100644 --- a/contracts/src.ts/deploy.ts +++ b/contracts/src.ts/deploy.ts @@ -227,9 +227,9 @@ export class Deployer { await (await this.getDeployedContract('Verifier').transferMastership(contract.address)).wait(); await (await this.getDeployedContract('Franklin').transferMastership(contract.address)).wait(); - await (await contract.addProxyContract(this.getDeployedContract('Governance').address)).wait(); - await (await contract.addProxyContract(this.getDeployedContract('Verifier').address)).wait(); - await (await contract.addProxyContract(this.getDeployedContract('Franklin').address)).wait(); + await (await contract.addUpgradeable(this.getDeployedContract('Governance').address)).wait(); + await (await contract.addUpgradeable(this.getDeployedContract('Verifier').address)).wait(); + await (await contract.addUpgradeable(this.getDeployedContract('Franklin').address)).wait(); return contract; } diff --git a/contracts/test/unit_tests/upgradeGatekeeper_test.ts b/contracts/test/unit_tests/upgradeGatekeeper_test.ts index c3c404e912..d1ee09bcbe 100644 --- a/contracts/test/unit_tests/upgradeGatekeeper_test.ts +++ b/contracts/test/unit_tests/upgradeGatekeeper_test.ts @@ -31,7 +31,7 @@ describe("UpgradeGatekeeper unit tests", function () { }) await proxyTestContract.transferMastership(UpgradeGatekeeperContract.address); - await UpgradeGatekeeperContract.addProxyContract(proxyTestContract.address); + await UpgradeGatekeeperContract.addUpgradeable(proxyTestContract.address); // check initial dummy index and storage expect(await proxyDummyInterface.get_DUMMY_INDEX()) @@ -45,7 +45,7 @@ describe("UpgradeGatekeeper unit tests", function () { it("checking that requireMaster calls present", async () => { let UpgradeGatekeeperContract_with_wallet2_signer = await UpgradeGatekeeperContract.connect(wallet2); - expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.addProxyContract(AddressZero) )).revertReason).equal("oro11") + expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.addUpgradeable(AddressZero) )).revertReason).equal("oro11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.startUpgrade([]) )).revertReason).equal("oro11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.cancelUpgrade() )).revertReason).equal("oro11") expect((await getCallRevertReason( () => UpgradeGatekeeperContract_with_wallet2_signer.finishUpgrade([], []) )).revertReason).equal("oro11") @@ -74,14 +74,14 @@ describe("UpgradeGatekeeper unit tests", function () { let activated_time = performance.now(); // wait and activate preparation status - let all_time_in_sec = parseInt(await DummyFirst.upgradeNoticePeriod()); + let notice_period = parseInt(await DummyFirst.get_UPGRADE_NOTICE_PERIOD()); for (let step = 1; step <= 3; step++) { if (step != 3) { - while ((performance.now() - start_time) < Math.round(all_time_in_sec * 1000.0 * step / 10.0 + 10)) { + while ((performance.now() - start_time) < Math.round(notice_period * 1000.0 * step / 10.0 + 10)) { // wait } } else { - while ((performance.now() - activated_time) < all_time_in_sec * 1000 + 10) { + while ((performance.now() - activated_time) < notice_period * 1000 + 10) { // wait } } From ec43d3374bc8480b892d7024ae873661487a729b Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 27 Mar 2020 19:31:40 +0200 Subject: [PATCH 136/186] Receiving UpgradeableMaster in constructor of UpgradeGatekeeper.sol --- contracts/contracts/Proxy.sol | 2 +- contracts/contracts/UpgradeGatekeeper.sol | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/contracts/contracts/Proxy.sol b/contracts/contracts/Proxy.sol index 15eb88f45a..81f0d0e51b 100644 --- a/contracts/contracts/Proxy.sol +++ b/contracts/contracts/Proxy.sol @@ -5,7 +5,7 @@ import "./Upgradeable.sol"; /// @title Proxy Contract -/// @dev NOTICE: Proxy must implement UpgradeableMaster to prevent calling some function of it not by master of proxy +/// @dev NOTICE: Proxy must implement UpgradeableMaster interface to prevent calling some function of it not by master of proxy /// @author Matter Labs contract Proxy is Upgradeable, UpgradeableMaster, Ownable { diff --git a/contracts/contracts/UpgradeGatekeeper.sol b/contracts/contracts/UpgradeGatekeeper.sol index c3a507a1b3..21566566c4 100644 --- a/contracts/contracts/UpgradeGatekeeper.sol +++ b/contracts/contracts/UpgradeGatekeeper.sol @@ -34,10 +34,10 @@ contract UpgradeGatekeeper is UpgradeEvents, Ownable { UpgradeableMaster public mainContract; /// @notice Contract constructor - /// @param _upgradeableMasterAddress Address of contract which defines notice period duration and allows finish upgrade during preparation of it + /// @param _mainContract Contract which defines notice period duration and allows finish upgrade during preparation of it /// @dev Calls Ownable contract constructor - constructor(address _upgradeableMasterAddress) Ownable(msg.sender) public { - mainContract = UpgradeableMaster(_upgradeableMasterAddress); + constructor(UpgradeableMaster _mainContract) Ownable(msg.sender) public { + mainContract = _mainContract; } /// @notice Adds a new upgradeable contract to the list of contracts managed by the gatekeeper From 58740a3ba8455df4efbbe3f6b454b523c3d75b58 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 27 Mar 2020 19:34:02 +0200 Subject: [PATCH 137/186] upgradePreparation => upgradePreparationActive --- contracts/contracts/Franklin.sol | 8 ++++---- contracts/contracts/Storage.sol | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index 4a37d22b6d..f0e0b57a5a 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -29,19 +29,19 @@ contract Franklin is UpgradeableMaster, Storage, Config, Events { /// @notice Notification that upgrade preparation status is activated function upgradePreparationStarted() external { - upgradePreparation = true; + upgradePreparationActive = true; upgradePreparationActivationTime = now; } /// @notice Notification that upgrade canceled function upgradeCanceled() external { - upgradePreparation = false; + upgradePreparationActive = false; upgradePreparationActivationTime = 0; } /// @notice Notification that upgrade finishes function upgradeFinishes() external { - upgradePreparation = false; + upgradePreparationActive = false; upgradePreparationActivationTime = 0; } @@ -725,7 +725,7 @@ contract Franklin is UpgradeableMaster, Storage, Config, Events { uint256 _fee, bytes memory _pubData ) internal { - require(!upgradePreparation || now >= upgradePreparationActivationTime + UPGRADE_PREPARATION_LOCK_PERIOD, "apr11"); // apr11 - priority request can't be added during lock period of preparation status of upgrade + require(!upgradePreparationActive || now >= upgradePreparationActivationTime + UPGRADE_PREPARATION_LOCK_PERIOD, "apr11"); // apr11 - priority request can't be added during lock period of preparation status of upgrade // Expiration block is: current block number + priority expiration delta uint256 expirationBlock = block.number + PRIORITY_EXPIRATION; diff --git a/contracts/contracts/Storage.sol b/contracts/contracts/Storage.sol index 2d416b8182..ba71f7a849 100644 --- a/contracts/contracts/Storage.sol +++ b/contracts/contracts/Storage.sol @@ -13,7 +13,7 @@ contract Storage { /// @notice Flag indicates that upgrade preparation status is active /// @dev Will store false in case of not active upgrade mode - bool public upgradePreparation; + bool public upgradePreparationActive; /// @notice Upgrade preparation activation timestamp (as seconds since unix epoch) /// @dev Will be equal to zero in case of not active upgrade mode From 96e3873fb60b9c8729a1c56de82beb335e527d92 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 27 Mar 2020 19:43:20 +0200 Subject: [PATCH 138/186] Added upgradePreparationLockStatus() function --- contracts/contracts/Franklin.sol | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index f0e0b57a5a..cf4650cd57 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -670,6 +670,10 @@ contract Franklin is UpgradeableMaster, Storage, Config, Events { totalCommittedPriorityRequests -= _reverted.priorityOperations; } + function upgradePreparationLockStatus() public returns (bool) { + return upgradePreparationActive && now < upgradePreparationActivationTime + UPGRADE_PREPARATION_LOCK_PERIOD; + } + /// @notice Checks that current state not is exodus mode function requireActive() internal view { require(!exodusMode, "fre11"); // exodus mode activated @@ -725,7 +729,7 @@ contract Franklin is UpgradeableMaster, Storage, Config, Events { uint256 _fee, bytes memory _pubData ) internal { - require(!upgradePreparationActive || now >= upgradePreparationActivationTime + UPGRADE_PREPARATION_LOCK_PERIOD, "apr11"); // apr11 - priority request can't be added during lock period of preparation status of upgrade + require(!upgradePreparationLockStatus(), "apr11"); // apr11 - priority request can't be added during lock period of preparation of upgrade // Expiration block is: current block number + priority expiration delta uint256 expirationBlock = block.number + PRIORITY_EXPIRATION; From adfb8f37ec6c72ec4546bf4f4372a0bad80d64ec Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 27 Mar 2020 19:51:00 +0200 Subject: [PATCH 139/186] Added upgradePreparationLockStatus() function notice --- contracts/contracts/Franklin.sol | 1 + 1 file changed, 1 insertion(+) diff --git a/contracts/contracts/Franklin.sol b/contracts/contracts/Franklin.sol index cf4650cd57..c1f0e651ed 100644 --- a/contracts/contracts/Franklin.sol +++ b/contracts/contracts/Franklin.sol @@ -670,6 +670,7 @@ contract Franklin is UpgradeableMaster, Storage, Config, Events { totalCommittedPriorityRequests -= _reverted.priorityOperations; } + /// @notice Checks that upgrade preparation is active and it is in lock period (period when contract will not add any new priority requests) function upgradePreparationLockStatus() public returns (bool) { return upgradePreparationActive && now < upgradePreparationActivationTime + UPGRADE_PREPARATION_LOCK_PERIOD; } From b004e0aa4c5a29c596c5827fc3db5029a97cf2ec Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Fri, 27 Mar 2020 22:55:56 +0200 Subject: [PATCH 140/186] Setted Bytes.sol to dev version --- contracts/contracts/Bytes.sol | 103 ++++++++++++++++++---------------- 1 file changed, 54 insertions(+), 49 deletions(-) diff --git a/contracts/contracts/Bytes.sol b/contracts/contracts/Bytes.sol index 0ada58e650..d707b1801c 100644 --- a/contracts/contracts/Bytes.sol +++ b/contracts/contracts/Bytes.sol @@ -88,22 +88,20 @@ library Bytes { uint _start, uint _length ) - internal - pure - returns (bytes memory) + internal + pure + returns (bytes memory) { require(_bytes.length >= (_start + _length), "bse11"); // bytes length is less then start byte + length bytes - if (_length == 0) { - return new bytes(0); - } - bytes memory tempBytes; assembly { + switch iszero(_length) + case 0 { // Get a location of some free memory and store it in tempBytes as // Solidity does for memory variables. - tempBytes := mload(0x40) + tempBytes := mload(0x40) // The first word of the slice result is potentially a partial // word read from the original array. To read it, we calculate @@ -113,31 +111,38 @@ library Bytes { // land at the beginning of the contents of the new array. When // we're done copying, we overwrite the full first word with // the actual length of the slice. - let lengthmod := and(_length, 31) + let lengthmod := and(_length, 31) // The multiplication in the next line is necessary // because when slicing multiples of 32 bytes (lengthmod == 0) // the following copy loop was copying the origin's length // and then ending prematurely not copying everything it should. - let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod))) - let end := add(mc, _length) + let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod))) + let end := add(mc, _length) - for { + for { // The multiplication in the next line has the same exact purpose // as the one above. - let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start) - } lt(mc, end) { - mc := add(mc, 0x20) - cc := add(cc, 0x20) - } { - mstore(mc, mload(cc)) - } + let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start) + } lt(mc, end) { + mc := add(mc, 0x20) + cc := add(cc, 0x20) + } { + mstore(mc, mload(cc)) + } - mstore(tempBytes, _length) + mstore(tempBytes, _length) //update free-memory pointer //allocating the array padded to 32 bytes like the compiler does now - mstore(0x40, and(add(mc, 31), not(31))) + mstore(0x40, and(add(mc, 31), not(31))) + } + //if we want a zero-length slice let's just return a zero-length array + default { + tempBytes := mload(0x40) + + mstore(0x40, add(tempBytes, 0x20)) + } } return tempBytes; @@ -190,48 +195,48 @@ library Bytes { ) internal pure returns (bytes memory) { bytes memory tempBytes; assembly { - // Get a location of some free memory and store it in tempBytes as - // Solidity does for memory variables. + // Get a location of some free memory and store it in tempBytes as + // Solidity does for memory variables. tempBytes := mload(0x40) - // Store the length of the first bytes array at the beginning of - // the memory for tempBytes. + // Store the length of the first bytes array at the beginning of + // the memory for tempBytes. let length := mload(_preBytes) mstore(tempBytes, length) - // Maintain a memory counter for the current write location in the - // temp bytes array by adding the 32 bytes for the array length to - // the starting location. + // Maintain a memory counter for the current write location in the + // temp bytes array by adding the 32 bytes for the array length to + // the starting location. let mc := add(tempBytes, 0x20) - // Stop copying when the memory counter reaches the length of the - // first bytes array. + // Stop copying when the memory counter reaches the length of the + // first bytes array. let end := add(mc, length) for { - // Initialize a copy counter to the start of the _preBytes data, - // 32 bytes into its memory. + // Initialize a copy counter to the start of the _preBytes data, + // 32 bytes into its memory. let cc := add(_preBytes, 0x20) } lt(mc, end) { - // Increase both counters by 32 bytes each iteration. + // Increase both counters by 32 bytes each iteration. mc := add(mc, 0x20) cc := add(cc, 0x20) } { - // Write the _preBytes data into the tempBytes memory 32 bytes - // at a time. + // Write the _preBytes data into the tempBytes memory 32 bytes + // at a time. mstore(mc, mload(cc)) } - // Add the length of _postBytes to the current length of tempBytes - // and store it as the new length in the first 32 bytes of the - // tempBytes memory. + // Add the length of _postBytes to the current length of tempBytes + // and store it as the new length in the first 32 bytes of the + // tempBytes memory. length := mload(_postBytes) mstore(tempBytes, add(length, mload(tempBytes))) - // Move the memory counter back from a multiple of 0x20 to the - // actual end of the _preBytes data. + // Move the memory counter back from a multiple of 0x20 to the + // actual end of the _preBytes data. mc := end - // Stop copying when the memory counter reaches the new combined - // length of the arrays. + // Stop copying when the memory counter reaches the new combined + // length of the arrays. end := add(mc, length) for { @@ -243,14 +248,14 @@ library Bytes { mstore(mc, mload(cc)) } - // Update the free-memory pointer by padding our last write location - // to 32 bytes: add 31 bytes to the end of tempBytes to move to the - // next 32 byte block, then round down to the nearest multiple of - // 32. If the sum of the length of the two arrays is zero then add - // one before rounding down to leave a blank 32 bytes (the length block with 0). + // Update the free-memory pointer by padding our last write location + // to 32 bytes: add 31 bytes to the end of tempBytes to move to the + // next 32 byte block, then round down to the nearest multiple of + // 32. If the sum of the length of the two arrays is zero then add + // one before rounding down to leave a blank 32 bytes (the length block with 0). mstore(0x40, and( - add(add(end, iszero(add(length, mload(_preBytes)))), 31), - not(31) // Round down to the nearest 32 bytes. + add(add(end, iszero(add(length, mload(_preBytes)))), 31), + not(31) // Round down to the nearest 32 bytes. )) } return tempBytes; From fd3386712d4888a1257b4e6dce5688d43b919af7 Mon Sep 17 00:00:00 2001 From: Ihor Barenblat Date: Sat, 28 Mar 2020 13:18:40 +0200 Subject: [PATCH 141/186] Setted Bytes.sol to dev version --- contracts/contracts/Bytes.sol | 83 +++++++++++++++++++---------------- 1 file changed, 44 insertions(+), 39 deletions(-) diff --git a/contracts/contracts/Bytes.sol b/contracts/contracts/Bytes.sol index 0ada58e650..717b09a921 100644 --- a/contracts/contracts/Bytes.sol +++ b/contracts/contracts/Bytes.sol @@ -94,50 +94,55 @@ library Bytes { { require(_bytes.length >= (_start + _length), "bse11"); // bytes length is less then start byte + length bytes - if (_length == 0) { - return new bytes(0); - } - bytes memory tempBytes; assembly { - // Get a location of some free memory and store it in tempBytes as - // Solidity does for memory variables. - tempBytes := mload(0x40) - - // The first word of the slice result is potentially a partial - // word read from the original array. To read it, we calculate - // the length of that partial word and start copying that many - // bytes into the array. The first word we copy will start with - // data we don't care about, but the last `lengthmod` bytes will - // land at the beginning of the contents of the new array. When - // we're done copying, we overwrite the full first word with - // the actual length of the slice. - let lengthmod := and(_length, 31) - - // The multiplication in the next line is necessary - // because when slicing multiples of 32 bytes (lengthmod == 0) - // the following copy loop was copying the origin's length - // and then ending prematurely not copying everything it should. - let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod))) - let end := add(mc, _length) - - for { - // The multiplication in the next line has the same exact purpose - // as the one above. - let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start) - } lt(mc, end) { - mc := add(mc, 0x20) - cc := add(cc, 0x20) - } { - mstore(mc, mload(cc)) + switch iszero(_length) + case 0 { + // Get a location of some free memory and store it in tempBytes as + // Solidity does for memory variables. + tempBytes := mload(0x40) + + // The first word of the slice result is potentially a partial + // word read from the original array. To read it, we calculate + // the length of that partial word and start copying that many + // bytes into the array. The first word we copy will start with + // data we don't care about, but the last `lengthmod` bytes will + // land at the beginning of the contents of the new array. When + // we're done copying, we overwrite the full first word with + // the actual length of the slice. + let lengthmod := and(_length, 31) + + // The multiplication in the next line is necessary + // because when slicing multiples of 32 bytes (lengthmod == 0) + // the following copy loop was copying the origin's length + // and then ending prematurely not copying everything it should. + let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod))) + let end := add(mc, _length) + + for { + // The multiplication in the next line has the same exact purpose + // as the one above. + let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start) + } lt(mc, end) { + mc := add(mc, 0x20) + cc := add(cc, 0x20) + } { + mstore(mc, mload(cc)) + } + + mstore(tempBytes, _length) + + //update free-memory pointer + //allocating the array padded to 32 bytes like the compiler does now + mstore(0x40, and(add(mc, 31), not(31))) } + //if we want a zero-length slice let's just return a zero-length array + default { + tempBytes := mload(0x40) - mstore(tempBytes, _length) - - //update free-memory pointer - //allocating the array padded to 32 bytes like the compiler does now - mstore(0x40, and(add(mc, 31), not(31))) + mstore(0x40, add(tempBytes, 0x20)) + } } return tempBytes; From f06b3f300562d09b3c8234ed2639b34ae7f1882d Mon Sep 17 00:00:00 2001 From: Vitaly Drogan Date: Sun, 29 Mar 2020 21:52:18 +0200 Subject: [PATCH 142/186] fast pow2 gate count --- Cargo.lock | 11 +++++---- core/key_generator/Cargo.toml | 1 + core/key_generator/src/franklin_key.rs | 33 ++++++++++++++++++++------ 3 files changed, 33 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 167a41ae72..623d483369 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -463,7 +463,7 @@ dependencies = [ [[package]] name = "bellman_ce" version = "0.3.2" -source = "git+https://github.com/matter-labs/bellman?branch=plonk_release#0e500f633476265fe34772cb97e1d073386c9ad2" +source = "git+https://github.com/matter-labs/bellman?branch=plonk_release#31c92e502863131a03c6aff1753b72d7d3b5e448" dependencies = [ "bit-vec", "blake2s_const", @@ -524,7 +524,7 @@ dependencies = [ [[package]] name = "blake2s_const" version = "0.6.0" -source = "git+https://github.com/matter-labs/bellman?branch=plonk_release#0e500f633476265fe34772cb97e1d073386c9ad2" +source = "git+https://github.com/matter-labs/bellman?branch=plonk_release#31c92e502863131a03c6aff1753b72d7d3b5e448" dependencies = [ "arrayref", "arrayvec 0.5.1", @@ -1235,7 +1235,7 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "franklin-crypto" version = "0.0.5" -source = "git+https://github.com/matter-labs/franklin-crypto.git?branch=check-franklin-circuit-transpile#6f33d778cfe2a5d889752902d1b077fdf2aa2de4" +source = "git+https://github.com/matter-labs/franklin-crypto.git?branch=check-franklin-circuit-transpile#5f5125bc96338644d03b3f1850662b0209418963" dependencies = [ "bellman_ce", "bit-vec", @@ -1866,6 +1866,7 @@ dependencies = [ "hex 0.3.2", "log 0.4.8", "models", + "rayon", "rust-crypto", "time", ] @@ -3131,9 +3132,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.49" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02044a6a92866fd61624b3db4d2c9dccc2feabbc6be490b87611bf285edbac55" +checksum = "78a7a12c167809363ec3bd7329fc0a3369056996de43c4b37ef3cd54a6ce4867" dependencies = [ "itoa", "ryu", diff --git a/core/key_generator/Cargo.toml b/core/key_generator/Cargo.toml index 9017101713..283b149286 100644 --- a/core/key_generator/Cargo.toml +++ b/core/key_generator/Cargo.toml @@ -18,3 +18,4 @@ rust-crypto = "0.2" log = "0.4" env_logger = "0.6" clap = "2.33.0" +rayon = "1.3" diff --git a/core/key_generator/src/franklin_key.rs b/core/key_generator/src/franklin_key.rs index aa82f83695..34c7e1d1d2 100644 --- a/core/key_generator/src/franklin_key.rs +++ b/core/key_generator/src/franklin_key.rs @@ -82,7 +82,7 @@ pub fn make_exodus_key() { ); } -fn estimate_power_of_two(block_size: usize) -> u32 { +fn estimate_power_of_two(block_size: usize) -> (usize, u32) { // let p_g = FixedGenerators::SpendingKeyGenerator; let params = ¶ms::JUBJUB_PARAMS; // let rng = &mut XorShiftRng::from_seed([0x3dbe6258, 0x8d313d76, 0x3237db17, 0xe5bc0654]); @@ -166,9 +166,10 @@ fn estimate_power_of_two(block_size: usize) -> u32 { use crypto_exports::franklin_crypto::bellman::plonk::better_cs::{adaptor::{TranspilationVariant, write_transpilation_hints}, cs::PlonkCsWidth4WithNextStepParams}; use crypto_exports::franklin_crypto::bellman::plonk::fft::cooley_tukey_ntt::*; - let hints = transpile::(instance_for_generation.clone()) - .expect("transpilation is successful"); - let setup = setup(instance_for_generation.clone(), &hints).expect("must make setup"); + let (size, _) = transpile_with_gates_count(instance_for_generation.clone()).expect("tranapoile with gates counts"); + //let hints = transpile::(instance_for_generation.clone()) + // .expect("transpilation is successful"); + //let setup = setup(instance_for_generation.clone(), &hints).expect("must make setup"); // let timer = Instant::now(); // let setup = SetupPolynomials::<_, PlonkCsWidth4WithNextStepParams>::read( @@ -177,9 +178,8 @@ fn estimate_power_of_two(block_size: usize) -> u32 { // .expect("setup read"); // println!("setup read: {}", timer.elapsed().as_secs()); - let size = setup.n.next_power_of_two(); - let power_of_two = size.trailing_zeros(); - return power_of_two; + let power_of_two = size.next_power_of_two().trailing_zeros(); + return (size, power_of_two); } pub fn make_circuit_parameters(block_size: usize) -> Parameters { @@ -626,6 +626,25 @@ pub fn make_circuit_parameters_plonk(block_size: usize) { // println!("omegas time: {}", timer.elapsed().as_secs()); } +#[test] +fn run_estimate_power_of_two() { + use rayon::prelude::*; + println!("chunks,power_of_two,gates"); + let testable_pows = (6..=180).step_by(2).collect::>(); + + let pows = testable_pows.into_par_iter().map(|c| { + let (gates, pow) = estimate_power_of_two(c); + (c, pow, gates) + }).collect::>(); + + for (c,pow,gates) in pows { + if pow > 26 { + break; + } + println!("{},{},{}",c, pow, gates); + } +} + #[test] fn run_make_circuit_parameters() { for n_chunks in [1].iter() { From b211a193ea26755b4d225e81a760d39c6054af23 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 30 Mar 2020 07:18:27 +0300 Subject: [PATCH 143/186] Add pedersen hasher benches --- .../benches/criterion/merkle_tree/mod.rs | 10 ++-- .../criterion/merkle_tree/pedersen_hasher.rs | 51 +++++++++++++++++++ 2 files changed, 57 insertions(+), 4 deletions(-) diff --git a/core/models/benches/criterion/merkle_tree/mod.rs b/core/models/benches/criterion/merkle_tree/mod.rs index 4dbfc9d75f..09f7896c95 100644 --- a/core/models/benches/criterion/merkle_tree/mod.rs +++ b/core/models/benches/criterion/merkle_tree/mod.rs @@ -1,7 +1,8 @@ use criterion::criterion_group; -use self::parallel_smt::bench_merkle_tree as parallel_smt_bench; -use self::sequential_smt::bench_merkle_tree as sequential_smt_bench; +use self::parallel_smt::bench_merkle_tree as bench_parallel_smt; +use self::pedersen_hasher::bench_pedersen_hasher; +use self::sequential_smt::bench_merkle_tree as bench_sequential_smt; mod parallel_smt; mod pedersen_hasher; @@ -9,6 +10,7 @@ mod sequential_smt; criterion_group!( merkle_tree_benches, - sequential_smt_bench, - parallel_smt_bench + bench_parallel_smt, + bench_sequential_smt, + bench_pedersen_hasher, ); diff --git a/core/models/benches/criterion/merkle_tree/pedersen_hasher.rs b/core/models/benches/criterion/merkle_tree/pedersen_hasher.rs index e69de29bb2..7c66591813 100644 --- a/core/models/benches/criterion/merkle_tree/pedersen_hasher.rs +++ b/core/models/benches/criterion/merkle_tree/pedersen_hasher.rs @@ -0,0 +1,51 @@ +//! Benchmarks for the Parallel Sparse Merkle Tree. + +use criterion::{black_box, BatchSize, Bencher, Criterion}; +use models::franklin_crypto::bellman::pairing::bn256::Bn256; +use models::merkle_tree::{hasher::Hasher, PedersenHasher}; + +/// Creates a boolean vector for `PedersonHasher` input. +fn generate_input(size: usize) -> Vec { + (0..size).map(|i| i % 2 == 0).collect() +} + +/// Measures the hashing time for a small input. +fn pedersen_small(b: &mut Bencher<'_>) { + const INPUT_SIZE: usize = 8; // 1 byte. + + let hasher = PedersenHasher::::default(); + let input: Vec = generate_input(INPUT_SIZE); + + let setup = || (hasher.clone(), input.clone()); + + b.iter_batched( + setup, + |(hasher, input)| { + let _ = hasher.hash_bits(black_box(input)); + }, + BatchSize::SmallInput, + ); +} + +/// Measures the hashing time for a (relatively) big input. +fn pedersen_big(b: &mut Bencher<'_>) { + const INPUT_SIZE: usize = models::params::MAX_CIRCUIT_PEDERSEN_HASH_BITS; // Biggest supported size. + + let hasher = PedersenHasher::::default(); + let input: Vec = generate_input(INPUT_SIZE); + + let setup = || (hasher.clone(), input.clone()); + + b.iter_batched( + setup, + |(hasher, input)| { + let _ = hasher.hash_bits(black_box(input)); + }, + BatchSize::SmallInput, + ); +} + +pub fn bench_pedersen_hasher(c: &mut Criterion) { + c.bench_function("Pedersen Hasher small input", pedersen_small); + c.bench_function("Pedersen Hasher big input", pedersen_big); +} From 1450127774a299e3365c77d438dd71d2a3c70ece Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 30 Mar 2020 08:53:29 +0300 Subject: [PATCH 144/186] Add initial benches skeleton for plasma --- Cargo.lock | 1 + .../models/src/merkle_tree/pedersen_hasher.rs | 8 ++++++++ core/plasma/Cargo.toml | 7 +++++++ core/plasma/benches/criterion/lib.rs | 7 +++++++ core/plasma/benches/criterion/ops.rs | 19 +++++++++++++++++++ core/plasma/src/state.rs | 1 + 6 files changed, 43 insertions(+) create mode 100644 core/plasma/benches/criterion/lib.rs create mode 100644 core/plasma/benches/criterion/ops.rs diff --git a/Cargo.lock b/Cargo.lock index 789932f303..760ff8186c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2300,6 +2300,7 @@ name = "plasma" version = "0.1.1" dependencies = [ "bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "criterion 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "crypto_exports 0.1.0", "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/core/models/src/merkle_tree/pedersen_hasher.rs b/core/models/src/merkle_tree/pedersen_hasher.rs index b7ab673d99..6157f78231 100644 --- a/core/models/src/merkle_tree/pedersen_hasher.rs +++ b/core/models/src/merkle_tree/pedersen_hasher.rs @@ -1,5 +1,7 @@ // Pedersen hash implementation of the Hasher trait +use std::fmt; + use crate::franklin_crypto::bellman::pairing::ff::PrimeField; use crate::franklin_crypto::pedersen_hash::{baby_pedersen_hash, Personalization}; @@ -13,6 +15,12 @@ pub struct PedersenHasher { params: &'static E::Params, } +impl fmt::Debug for PedersenHasher { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("PedersenHasher").finish() + } +} + // We have to implement `Clone` manually, since deriving it will depend on // the `Clone` implementation of `E::Params` (and will `.clone()` will not work // if `E::Params` are not `Clone`), which is redundant: we only hold a reference diff --git a/core/plasma/Cargo.toml b/core/plasma/Cargo.toml index 78264d9495..e12be92e89 100644 --- a/core/plasma/Cargo.toml +++ b/core/plasma/Cargo.toml @@ -16,3 +16,10 @@ failure = "0.1" crypto_exports = { path = "../crypto_exports", version = "0.1.0" } web3 = "0.8.0" +[dev-dependencies] +criterion = "0.3.0" + +[[bench]] +name = "criterion" +harness = false +path = "benches/criterion/lib.rs" diff --git a/core/plasma/benches/criterion/lib.rs b/core/plasma/benches/criterion/lib.rs new file mode 100644 index 0000000000..6b375299b0 --- /dev/null +++ b/core/plasma/benches/criterion/lib.rs @@ -0,0 +1,7 @@ +use criterion::criterion_main; + +use ops::ops_benches; + +mod ops; + +criterion_main!(ops_benches); diff --git a/core/plasma/benches/criterion/ops.rs b/core/plasma/benches/criterion/ops.rs new file mode 100644 index 0000000000..0a63ed42e0 --- /dev/null +++ b/core/plasma/benches/criterion/ops.rs @@ -0,0 +1,19 @@ +// External uses +use criterion::{criterion_group, Criterion}; +// Workspace uses +use models::node::AccountTree; +// Local uses +use plasma::state::PlasmaState; + +/// Creates a `PlasmaState` object and fills it with accounts. +fn generate_state() -> PlasmaState { + let depth = models::params::account_tree_depth() as u32; + + let mut accounts = AccountTree::new(depth); + + PlasmaState::empty() +} + +pub fn bench_ops(_c: &mut Criterion) {} + +criterion_group!(ops_benches, bench_ops); diff --git a/core/plasma/src/state.rs b/core/plasma/src/state.rs index 4098322dd1..5d23e4f97a 100644 --- a/core/plasma/src/state.rs +++ b/core/plasma/src/state.rs @@ -22,6 +22,7 @@ pub struct OpSuccess { pub executed_op: FranklinOp, } +#[derive(Debug, Clone)] pub struct PlasmaState { /// Accounts stored in a sparse Merkle tree balance_tree: AccountTree, From e5e7d12ff26471081f743fb1bd64f08120f47bb9 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 30 Mar 2020 09:03:26 +0300 Subject: [PATCH 145/186] Save ongoing op before sending it --- core/server/src/eth_sender/mod.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 57553b6bff..983cb33bf9 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -200,15 +200,18 @@ impl ETHSender { /// 2. Sifts all the ongoing operations, filtering the completed ones and /// managing the rest (e.g. by sending a supplement txs for stuck operations). fn proceed_next_operations(&mut self) { + // Queue for storing all the operations that were not finished at this iteration. + let mut new_ongoing_ops = VecDeque::new(); + while let Some(tx) = self.tx_queue.pop_front() { self.initialize_operation(tx).unwrap_or_else(|e| { warn!("Error while trying to complete uncommitted op: {}", e); + + // TODO: We should not forget about the tx here. }); } // Commit the next operations (if any). - let mut new_ongoing_ops = VecDeque::new(); - while let Some(mut current_op) = self.ongoing_ops.pop_front() { // We perform a commitment step here. In case of error, we suppose that this is some // network issue which won't appear the next time, so we report the situation to the @@ -289,6 +292,10 @@ impl ETHSender { new_op.used_tx_hashes.push(signed_tx.hash); self.db.add_hash_entry(new_op.id, &signed_tx.hash)?; + // We should store the operation as `ongoing` **before** sending it as well, + // so if sending will fail, we won't forget about it. + self.ongoing_ops.push_back(new_op.clone()); + // After storing all the tx data in the database, we can finally send the tx. info!( "Sending new tx: [ETH Operation . Tx hash: <{:#x}>. ZKSync operation: {}]", @@ -296,8 +303,6 @@ impl ETHSender { ); self.ethereum.send_tx(&signed_tx)?; - self.ongoing_ops.push_back(new_op); - Ok(()) } From 62a007354a5ee62ce4844625ff990b5f35ee94e4 Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Mon, 30 Mar 2020 09:05:15 +0300 Subject: [PATCH 146/186] for bench --- core/circuit/src/witness/deposit.rs | 145 +++++++++++++++------------- 1 file changed, 77 insertions(+), 68 deletions(-) diff --git a/core/circuit/src/witness/deposit.rs b/core/circuit/src/witness/deposit.rs index e3a8684069..1e7801fa06 100644 --- a/core/circuit/src/witness/deposit.rs +++ b/core/circuit/src/witness/deposit.rs @@ -304,6 +304,7 @@ mod test { use crate::witness::test_utils::{check_circuit, test_genesis_plasma_state}; use bigdecimal::BigDecimal; use models::node::{Account, Deposit}; + use std::time::Instant; #[test] #[ignore] @@ -546,43 +547,43 @@ mod test { // c.clone().synthesize(&mut transpiler).unwrap(); + let timer = Instant::now(); let mut hints = transpile::(c.clone()).expect("transpilation is successful"); - let mut tmp_buff = Vec::new(); - write_transpilation_hints(&hints, &mut tmp_buff).expect("hint write"); - hints = read_transpilation_hints(tmp_buff.as_slice()).expect("hint read"); - - let mut hints_hist = std::collections::HashMap::new(); - hints_hist.insert("into addition gate".to_owned(), 0); - hints_hist.insert("merge LC".to_owned(), 0); - hints_hist.insert("into quadratic gate".to_owned(), 0); - hints_hist.insert("into multiplication gate".to_owned(), 0); - - use crate::franklin_crypto::bellman::plonk::better_cs::adaptor::TranspilationVariant; - - for (_, h) in hints.iter() { - match h { - TranspilationVariant::IntoQuadraticGate => { - *hints_hist - .get_mut(&"into quadratic gate".to_owned()) - .unwrap() += 1; - } - TranspilationVariant::MergeLinearCombinations(..) => { - *hints_hist.get_mut(&"merge LC".to_owned()).unwrap() += 1; - } - TranspilationVariant::IntoAdditionGate(..) => { - *hints_hist - .get_mut(&"into addition gate".to_owned()) - .unwrap() += 1; - } - TranspilationVariant::IntoMultiplicationGate(..) => { - *hints_hist - .get_mut(&"into multiplication gate".to_owned()) - .unwrap() += 1; - } - } - } - - println!("Transpilation hist = {:?}", hints_hist); + println!("Transpilation time: {}s", timer.elapsed().as_secs()); + + // + // let mut hints_hist = std::collections::HashMap::new(); + // hints_hist.insert("into addition gate".to_owned(), 0); + // hints_hist.insert("merge LC".to_owned(), 0); + // hints_hist.insert("into quadratic gate".to_owned(), 0); + // hints_hist.insert("into multiplication gate".to_owned(), 0); + // + // use crate::franklin_crypto::bellman::plonk::better_cs::adaptor::TranspilationVariant; + // + // for (_, h) in hints.iter() { + // match h { + // TranspilationVariant::IntoQuadraticGate => { + // *hints_hist + // .get_mut(&"into quadratic gate".to_owned()) + // .unwrap() += 1; + // } + // TranspilationVariant::MergeLinearCombinations(..) => { + // *hints_hist.get_mut(&"merge LC".to_owned()).unwrap() += 1; + // } + // TranspilationVariant::IntoAdditionGate(..) => { + // *hints_hist + // .get_mut(&"into addition gate".to_owned()) + // .unwrap() += 1; + // } + // TranspilationVariant::IntoMultiplicationGate(..) => { + // *hints_hist + // .get_mut(&"into multiplication gate".to_owned()) + // .unwrap() += 1; + // } + // } + // } + + // println!("Transpilation hist = {:?}", hints_hist); println!("Done transpiling"); @@ -594,29 +595,37 @@ mod test { println!("Done checking if satisfied"); + let timer = Instant::now(); let mut setup = setup(c.clone(), &hints).expect("must make setup"); - tmp_buff = Vec::new(); - setup.write(&mut tmp_buff).expect("setup write"); - setup = SetupPolynomials::read(tmp_buff.as_slice()).expect("setup read"); + println!("Setup generated time: {}s", timer.elapsed().as_secs()); - println!("Made into {} gates", setup.n); let size = setup.n.next_power_of_two(); + println!("Power of two {}", size); + let size_log2 = size.trailing_zeros(); + assert!(size_log2 <= 26, "power of two too big"); + let timer = Instant::now(); let mut monomial_form_reader = std::io::BufReader::with_capacity( - 1 << 24, - std::fs::File::open(format!("{}/setup_2^22.key", universal_setup_path)).unwrap(), + 1 << 29, + std::fs::File::open(format!( + "{}/setup_2^{}.key", + universal_setup_path, size_log2 + )) + .unwrap(), ); - let mut lagrange_form_reader = std::io::BufReader::with_capacity( - 1 << 24, - std::fs::File::open(format!("{}/setup_2^22_lagrange.key", universal_setup_path)) - .unwrap(), + 1 << 29, + std::fs::File::open(format!( + "{}/setup_2^{}_lagrange.key", + universal_setup_path, size_log2 + )) + .unwrap(), ); - let key_monomial_form = Crs::::read(&mut monomial_form_reader).unwrap(); let key_lagrange_form = Crs::::read(&mut lagrange_form_reader).unwrap(); + println!("Setup files read: {}s", timer.elapsed().as_secs()); // let worker = Worker::new(); @@ -626,23 +635,31 @@ mod test { // let key_monomial_form = Crs::::dummy_crs(size); // let key_lagrange_form = Crs::::dummy_crs(size); + let timer = Instant::now(); let mut verification_key = make_verification_key(&setup, &key_monomial_form) .expect("must make a verification key"); - tmp_buff = Vec::new(); + println!("Verification key generated: {}s", timer.elapsed().as_secs()); + + // tmp_buff = Vec::new(); verification_key - .write(&mut tmp_buff) - .expect("verification key write"); - verification_key = - VerificationKey::read(tmp_buff.as_slice()).expect("verification key read"); + .write( + std::fs::File::create(format!("{}/verification.key", param_save_path)) + .expect("ver key file create"), + ) + .expect("ver key serialize"); + // verification_key = + // VerificationKey::read(tmp_buff.as_slice()).expect("verification key read"); + let timer = Instant::now(); let mut precomputations = make_precomputations(&setup).expect("must make precomputations for proving"); - tmp_buff = Vec::new(); - precomputations - .write(&mut tmp_buff) - .expect("precomputation write"); - precomputations = SetupPolynomialsPrecomputations::read(tmp_buff.as_slice()) - .expect("precomputation read"); + println!("Precomputations generated: {}s", timer.elapsed().as_secs()); + // tmp_buff = Vec::new(); + // precomputations + // .write(&mut tmp_buff) + // .expect("precomputation write"); + // precomputations = SetupPolynomialsPrecomputations::read(tmp_buff.as_slice()) + // .expect("precomputation read"); use crate::franklin_crypto::bellman::plonk::fft::cooley_tukey_ntt::*; @@ -650,6 +667,7 @@ mod test { let omegas_inv_bitreversed = as CTPrecomputations>::new_for_domain_size(size); + let timer = Instant::now(); let mut proof = prove_from_recomputations::<_, _, RollingKeccakTranscript, _, _>( c.clone(), &hints, @@ -661,16 +679,7 @@ mod test { &key_lagrange_form, ) .expect("must make a proof"); - - let mut proof_writer = std::io::BufWriter::with_capacity( - 1<<24, - std::fs::File::create("./deposit_proof.proof").unwrap() - ); - proof.write(&mut proof_writer).unwrap(); - - tmp_buff = Vec::new(); - proof.write(&mut tmp_buff).expect("proof write"); - proof = Proof::read(tmp_buff.as_slice()).expect("proof read"); + println!("Proof generated: {}s", timer.elapsed().as_secs()); proof .write( From bfab93d49590870327b41ed1bcbc06e6ff7fd59c Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 30 Mar 2020 10:04:19 +0300 Subject: [PATCH 147/186] Perform consecutive db updates in a db transaction --- core/server/src/eth_sender/database.rs | 15 +++++ core/server/src/eth_sender/mod.rs | 73 +++++++++++++----------- core/server/src/eth_sender/tests/mock.rs | 7 +++ core/storage/src/lib.rs | 8 +++ 4 files changed, 69 insertions(+), 34 deletions(-) diff --git a/core/server/src/eth_sender/database.rs b/core/server/src/eth_sender/database.rs index 7c43470345..c064a44aa7 100644 --- a/core/server/src/eth_sender/database.rs +++ b/core/server/src/eth_sender/database.rs @@ -51,10 +51,16 @@ pub(super) trait DatabaseAccess { /// Loads the stored Ethereum operations stats. fn load_stats(&self) -> Result; + + /// Performs several database operations within one database transaction. + fn transaction(&self, f: F) -> Result + where + F: FnOnce() -> Result; } /// The actual database wrapper. /// This structure uses `ConnectionPool` to interact with an existing database. +#[derive(Debug, Clone)] pub struct Database { /// Connection to the database. db_pool: ConnectionPool, @@ -127,4 +133,13 @@ impl DatabaseAccess for Database { let stats = storage.ethereum_schema().load_stats()?; Ok(stats.into()) } + + fn transaction(&self, f: F) -> Result + where + F: FnOnce() -> Result, + { + let storage = self.db_pool.access_storage()?; + + storage.transaction(|| f()) + } } diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 983cb33bf9..90682de067 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -262,35 +262,39 @@ impl ETHSender { let deadline_block = self.get_deadline_block(current_block); let gas_price = self.ethereum.gas_price()?; - // First, we should store the operation in the database and obtain the assigned - // operation ID and nonce. Without them we won't be able to sign the transaction. - let assigned_data = self.db.save_new_eth_tx( - tx.op_type, - tx.operation.clone(), - deadline_block as i64, - gas_price, - tx.raw.clone(), - )?; - - let mut new_op = ETHOperation { - id: assigned_data.id, - op_type: tx.op_type, - op: tx.operation, - nonce: assigned_data.nonce, - last_deadline_block: deadline_block, - last_used_gas_price: gas_price, - used_tx_hashes: vec![], // No hash yet, will be added below. - encoded_tx_data: tx.raw, - confirmed: false, - final_hash: None, - }; + let (new_op, signed_tx) = self.db.transaction(|| { + // First, we should store the operation in the database and obtain the assigned + // operation ID and nonce. Without them we won't be able to sign the transaction. + let assigned_data = self.db.save_new_eth_tx( + tx.op_type, + tx.operation.clone(), + deadline_block as i64, + gas_price, + tx.raw.clone(), + )?; + + let mut new_op = ETHOperation { + id: assigned_data.id, + op_type: tx.op_type, + op: tx.operation, + nonce: assigned_data.nonce, + last_deadline_block: deadline_block, + last_used_gas_price: gas_price, + used_tx_hashes: vec![], // No hash yet, will be added below. + encoded_tx_data: tx.raw, + confirmed: false, + final_hash: None, + }; + + // Sign the transaction. + let signed_tx = Self::sign_new_tx(&self.ethereum, &new_op)?; - // Sign the transaction. - let signed_tx = self.sign_new_tx(&new_op)?; + // With signed tx, update the hash in the operation entry and in the db. + new_op.used_tx_hashes.push(signed_tx.hash); + self.db.add_hash_entry(new_op.id, &signed_tx.hash)?; - // With signed tx, update the hash in the operation entry and in the db. - new_op.used_tx_hashes.push(signed_tx.hash); - self.db.add_hash_entry(new_op.id, &signed_tx.hash)?; + Ok((new_op, signed_tx)) + })?; // We should store the operation as `ongoing` **before** sending it as well, // so if sending will fail, we won't forget about it. @@ -386,9 +390,12 @@ impl ETHSender { // create a new one from the old one with updated parameters. let new_tx = self.create_supplement_tx(deadline_block, op)?; // New transaction should be persisted in the DB *before* sending it. - self.db - .update_eth_tx(op.id, deadline_block as i64, new_tx.gas_price)?; - self.db.add_hash_entry(op.id, &new_tx.hash)?; + self.db.transaction(|| { + self.db + .update_eth_tx(op.id, deadline_block as i64, new_tx.gas_price)?; + self.db.add_hash_entry(op.id, &new_tx.hash)?; + Ok(()) + })?; info!( "Stuck tx processing: sending tx for op, eth_op_id: {} tx_hash: {:#x}, nonce: {}", @@ -460,7 +467,7 @@ impl ETHSender { } /// Creates a new Ethereum operation. - fn sign_new_tx(&self, op: ÐOperation) -> Result { + fn sign_new_tx(ethereum: Ð, op: ÐOperation) -> Result { let tx_options = { let mut options = Options::default(); options.nonce = Some(op.nonce); @@ -468,9 +475,7 @@ impl ETHSender { options }; - let signed_tx = self - .ethereum - .sign_prepared_tx(op.encoded_tx_data.clone(), tx_options)?; + let signed_tx = ethereum.sign_prepared_tx(op.encoded_tx_data.clone(), tx_options)?; Ok(signed_tx) } diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 584ab6e361..81dfbd417d 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -191,6 +191,13 @@ impl DatabaseAccess for MockDatabase { fn load_stats(&self) -> Result { Ok(self.stats.borrow().clone()) } + + fn transaction(&self, f: F) -> Result + where + F: FnOnce() -> Result, + { + f() + } } /// Mock Ethereum client is capable of recording all the incoming requests for the further analysis. diff --git a/core/storage/src/lib.rs b/core/storage/src/lib.rs index bac6350eab..e7cd8dc67f 100644 --- a/core/storage/src/lib.rs +++ b/core/storage/src/lib.rs @@ -157,6 +157,14 @@ impl StorageProcessor { tokens::TokensSchema(self) } + /// Performs several database operations within one database transaction. + pub fn transaction(&self, f: F) -> Result + where + F: FnOnce() -> Result, + { + self.conn().transaction(|| f()) + } + fn conn(&self) -> &RecoverableConnection { match self.conn { ConnectionHolder::Pooled(ref conn) => conn, From b72b2d6f1d38cf81308bf582d82c40e45823f659 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 30 Mar 2020 10:41:33 +0300 Subject: [PATCH 148/186] Return txs failed to initialize to the queue --- core/server/src/eth_sender/mod.rs | 14 ++++++++--- .../src/eth_sender/tx_queue/counter_queue.rs | 12 ++++++++++ core/server/src/eth_sender/tx_queue/mod.rs | 23 ++++++++++++++++++- .../src/eth_sender/tx_queue/sparse_queue.rs | 18 +++++++++++++++ 4 files changed, 63 insertions(+), 4 deletions(-) diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 90682de067..c805291b6f 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -204,10 +204,12 @@ impl ETHSender { let mut new_ongoing_ops = VecDeque::new(); while let Some(tx) = self.tx_queue.pop_front() { - self.initialize_operation(tx).unwrap_or_else(|e| { + self.initialize_operation(tx.clone()).unwrap_or_else(|e| { warn!("Error while trying to complete uncommitted op: {}", e); - // TODO: We should not forget about the tx here. + // Return the unperformed operation to the queue, since failing the + // operation initialization means that it was not stored in the database. + self.tx_queue.return_popped(tx); }); } @@ -305,7 +307,13 @@ impl ETHSender { "Sending new tx: [ETH Operation . Tx hash: <{:#x}>. ZKSync operation: {}]", new_op.id, new_op.op_type, signed_tx.hash, self.zksync_operation_description(&new_op), ); - self.ethereum.send_tx(&signed_tx)?; + self.ethereum.send_tx(&signed_tx).unwrap_or_else(|e| { + // Sending tx error is not critical: this will result in transaction being considered stuck, + // and resent. We can't do anything about this failure either, since it's most probably is not + // related to the node logic, so we just log this error and pretend to have this operation + // processed. + warn!("Error while sending the operation: {}", e); + }); Ok(()) } diff --git a/core/server/src/eth_sender/tx_queue/counter_queue.rs b/core/server/src/eth_sender/tx_queue/counter_queue.rs index 734735b9b5..cdef69a836 100644 --- a/core/server/src/eth_sender/tx_queue/counter_queue.rs +++ b/core/server/src/eth_sender/tx_queue/counter_queue.rs @@ -27,6 +27,12 @@ impl CounterQueue { } } + /// Returns a previously popped element to the front of the queue. + pub fn return_popped(&mut self, element: T) { + self.elements.push_front(element); + self.counter -= 1; + } + /// Inserts an element to the end of the queue. pub fn push_back(&mut self, element: T) { self.elements.push_back(element); @@ -81,5 +87,11 @@ mod tests { // Now attempt take no value, and check that counter is not increased. assert_eq!(queue.pop_front(), None); assert_eq!(queue.get_count(), 2); + + // Return the popped element back. + queue.return_popped("two".into()); + assert_eq!(queue.get_count(), 1); + assert_eq!(queue.pop_front().unwrap(), "two"); + assert_eq!(queue.get_count(), 2); } } diff --git a/core/server/src/eth_sender/tx_queue/mod.rs b/core/server/src/eth_sender/tx_queue/mod.rs index f6a8258253..b7eab41c2a 100644 --- a/core/server/src/eth_sender/tx_queue/mod.rs +++ b/core/server/src/eth_sender/tx_queue/mod.rs @@ -11,7 +11,7 @@ pub type RawTxData = Vec; /// Representation of the transaction data stored in the queue. /// This structure contains only essential fields required for the `eth_sender` /// to create an actual operation. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct TxData { /// Type of the operation. pub op_type: OperationType, @@ -161,6 +161,21 @@ impl TxQueue { self.withdraw_operations.push_back(withdraw_operation); } + /// Returns a previously popped element to the front of the queue. + pub fn return_popped(&mut self, element: TxData) { + match &element.op_type { + OperationType::Commit => { + self.commit_operations.return_popped(element); + } + OperationType::Verify => { + self.verify_operations.return_popped(element); + } + OperationType::Withdraw => { + self.withdraw_operations.return_popped(element); + } + } + } + /// Gets the next transaction to send, according to the transaction sending policy. /// For details, see the structure doc-comment. pub fn pop_front(&mut self) -> Option { @@ -309,5 +324,11 @@ mod tests { // Though the limit is not met (2 txs in fly, and limit is 3), there should be no txs in the queue. assert_eq!(queue.pop_front(), None); + + // Return the operation to the queue. + queue.return_popped(op_6); + + let op_6 = queue.pop_front().unwrap(); + assert_eq!(op_6.raw, vec![WITHDRAW_MARK, 1]); } } diff --git a/core/server/src/eth_sender/tx_queue/sparse_queue.rs b/core/server/src/eth_sender/tx_queue/sparse_queue.rs index b30947d508..ce0cd7717d 100644 --- a/core/server/src/eth_sender/tx_queue/sparse_queue.rs +++ b/core/server/src/eth_sender/tx_queue/sparse_queue.rs @@ -31,6 +31,13 @@ impl SparseQueue { } } + /// Returns a previously popped element to the front of the queue. + pub fn return_popped(&mut self, element: T) { + let popped_index = self.next_expected_idx - 1; + self.elements.insert(popped_index, element); + self.next_expected_idx = popped_index; + } + /// Inserts an element to the queue given its index. pub fn insert(&mut self, idx: usize, element: T) { assert!( @@ -78,19 +85,30 @@ mod tests { // Insert the next element and obtain it. queue.insert(0, "zero".into()); assert!(queue.has_next()); + assert_eq!(queue.next_id(), 0); assert_eq!(queue.pop_front().unwrap(), "zero"); + assert_eq!(queue.next_id(), 1); // Now insert an element with a gap, and check that it won't be yielded. queue.insert(2, "two".into()); assert!(!queue.has_next()); + assert_eq!(queue.next_id(), 1); assert!(queue.pop_front().is_none()); // Now fill the gap and obtain both elements. queue.insert(1, "one".into()); assert!(queue.has_next()); assert_eq!(queue.pop_front().unwrap(), "one"); + assert_eq!(queue.next_id(), 2); assert!(queue.has_next()); assert_eq!(queue.pop_front().unwrap(), "two"); + assert_eq!(queue.next_id(), 3); + + // Return the popped element back. + queue.return_popped("two".into()); + assert_eq!(queue.next_id(), 2); + assert_eq!(queue.pop_front().unwrap(), "two"); + assert_eq!(queue.next_id(), 3); } /// Checks that we can use the difference `next_expected_idx` as the custom From ca7b87177eddeecd518bca03eebc48d664c5f666 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Mon, 30 Mar 2020 10:48:10 +0300 Subject: [PATCH 149/186] Add more operations to transactions history tests --- core/models/src/node/tx.rs | 16 ++++---- .../storage/src/tests/chain/operations_ext.rs | 40 ++++++++++++++++++- core/testkit/src/zksync_account.rs | 17 +++++++- 3 files changed, 63 insertions(+), 10 deletions(-) diff --git a/core/models/src/node/tx.rs b/core/models/src/node/tx.rs index 5ae5e8071d..874d450492 100644 --- a/core/models/src/node/tx.rs +++ b/core/models/src/node/tx.rs @@ -351,13 +351,6 @@ pub struct TxSignature { } impl TxSignature { - pub fn default() -> Self { - Self { - pub_key: PackedPublicKey::deserialize_packed(&[0; 32]).unwrap(), - signature: PackedSignature::deserialize_packed(&[0; 64]).unwrap(), - } - } - pub fn verify_musig_pedersen(&self, msg: &[u8]) -> Option> { let hashed_msg = pedersen_hash_tx_msg(msg); let valid = self.pub_key.0.verify_musig_pedersen( @@ -429,6 +422,15 @@ impl TxSignature { } } +impl Default for TxSignature { + fn default() -> Self { + Self { + pub_key: PackedPublicKey::deserialize_packed(&[0; 32]).unwrap(), + signature: PackedSignature::deserialize_packed(&[0; 64]).unwrap(), + } + } +} + impl std::fmt::Debug for TxSignature { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { let hex_pk = hex::encode(&self.pub_key.serialize_packed().unwrap()); diff --git a/core/storage/src/tests/chain/operations_ext.rs b/core/storage/src/tests/chain/operations_ext.rs index 67c5a36355..5cbcbf053f 100644 --- a/core/storage/src/tests/chain/operations_ext.rs +++ b/core/storage/src/tests/chain/operations_ext.rs @@ -5,9 +5,9 @@ use bigdecimal::BigDecimal; // Workspace imports use crypto_exports::franklin_crypto::bellman::pairing::ff::Field; use models::node::block::{Block, ExecutedOperations, ExecutedPriorityOp, ExecutedTx}; -use models::node::operations::FranklinOp; +use models::node::operations::{ChangePubKeyOp, FranklinOp}; use models::node::priority_ops::PriorityOp; -use models::node::{Deposit, DepositOp, Fr, TransferOp, WithdrawOp}; +use models::node::{CloseOp, Deposit, DepositOp, Fr, TransferOp, WithdrawOp}; use testkit::zksync_account::ZksyncAccount; // Local imports use crate::tests::db_test; @@ -106,6 +106,40 @@ fn get_account_transactions_history() { ExecutedOperations::Tx(Box::new(executed_withdraw_op)) }; + let executed_close_op = { + let close_op = FranklinOp::Close(Box::new(CloseOp { + tx: from_zksync_account.sign_close(None, false), + account_id: from_account_id, + })); + + let executed_close_op = ExecutedTx { + tx: close_op.try_get_tx().unwrap(), + success: true, + op: Some(close_op), + fail_reason: None, + block_index: None, + }; + + ExecutedOperations::Tx(Box::new(executed_close_op)) + }; + + let executed_change_pubkey_op = { + let change_pubkey_op = FranklinOp::ChangePubKeyOffchain(Box::new(ChangePubKeyOp { + tx: from_zksync_account.create_change_pubkey_tx(None, false, false), + account_id: from_account_id, + })); + + let executed_change_pubkey_op = ExecutedTx { + tx: change_pubkey_op.try_get_tx().unwrap(), + success: true, + op: Some(change_pubkey_op), + fail_reason: None, + block_index: None, + }; + + ExecutedOperations::Tx(Box::new(executed_change_pubkey_op)) + }; + let block = Block { block_number: 1, new_root_hash: Fr::zero(), @@ -114,6 +148,8 @@ fn get_account_transactions_history() { executed_deposit_op, executed_transfer_op, executed_withdraw_op, + executed_close_op, + executed_change_pubkey_op, ], processed_priority_ops: (0, 0), // Not important }; diff --git a/core/testkit/src/zksync_account.rs b/core/testkit/src/zksync_account.rs index 62c8b9dc41..fa54bf69b9 100644 --- a/core/testkit/src/zksync_account.rs +++ b/core/testkit/src/zksync_account.rs @@ -2,7 +2,7 @@ use bigdecimal::BigDecimal; use crypto_exports::rand::{thread_rng, Rng}; use models::node::tx::{ChangePubKey, PackedEthSignature, TxSignature}; use models::node::{ - priv_key_from_fs, Address, Nonce, PrivateKey, PubKeyHash, TokenId, Transfer, Withdraw, + priv_key_from_fs, Address, Close, Nonce, PrivateKey, PubKeyHash, TokenId, Transfer, Withdraw, }; use std::sync::Mutex; use web3::types::H256; @@ -119,6 +119,21 @@ impl ZksyncAccount { withdraw } + pub fn sign_close(&self, nonce: Option, increment_nonce: bool) -> Close { + let mut stored_nonce = self.nonce.lock().unwrap(); + let mut close = Close { + account: self.address, + nonce: nonce.unwrap_or_else(|| *stored_nonce), + signature: TxSignature::default(), + }; + close.signature = TxSignature::sign_musig_sha256(&self.private_key, &close.get_bytes()); + + if increment_nonce { + *stored_nonce += 1; + } + close + } + pub fn create_change_pubkey_tx( &self, nonce: Option, From 3ade9f57c20112ee28dff04fb31101211ec5606d Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Mon, 30 Mar 2020 11:00:54 +0300 Subject: [PATCH 150/186] Revert removing '\' from queries --- core/storage/src/chain/block/mod.rs | 114 ++++++++++++++-------------- 1 file changed, 57 insertions(+), 57 deletions(-) diff --git a/core/storage/src/chain/block/mod.rs b/core/storage/src/chain/block/mod.rs index 8c2fac09c9..61d9a1dc64 100644 --- a/core/storage/src/chain/block/mod.rs +++ b/core/storage/src/chain/block/mod.rs @@ -231,33 +231,33 @@ impl<'a> BlockSchema<'a> { // and verified operations; // - collects the {limit} blocks in the descending order with the data gathered above. let query = format!( - " - with eth_ops as ( - select - operations.block_number, - '0x' || encode(eth_operations.tx_hash::bytea, 'hex') as tx_hash, - operations.action_type, - operations.created_at - from operations - left join eth_operations on eth_operations.op_id = operations.id - ) - select - blocks.number as block_number, - blocks.root_hash as new_state_root, - blocks.block_size as block_size, - committed.tx_hash as commit_tx_hash, - verified.tx_hash as verify_tx_hash, - committed.created_at as committed_at, - verified.created_at as verified_at - from blocks - inner join eth_ops committed on - committed.block_number = blocks.number and committed.action_type = 'COMMIT' - left join eth_ops verified on - verified.block_number = blocks.number and verified.action_type = 'VERIFY' - where - blocks.number <= {max_block} - order by blocks.number desc - limit {limit}; + " \ + with eth_ops as ( \ + select \ + operations.block_number, \ + '0x' || encode(eth_operations.tx_hash::bytea, 'hex') as tx_hash, \ + operations.action_type, \ + operations.created_at \ + from operations \ + left join eth_operations on eth_operations.op_id = operations.id \ + ) \ + select \ + blocks.number as block_number, \ + blocks.root_hash as new_state_root, \ + blocks.block_size as block_size, \ + committed.tx_hash as commit_tx_hash, \ + verified.tx_hash as verify_tx_hash, \ + committed.created_at as committed_at, \ + verified.created_at as verified_at \ + from blocks \ + inner join eth_ops committed on \ + committed.block_number = blocks.number and committed.action_type = 'COMMIT' \ + left join eth_ops verified on \ + verified.block_number = blocks.number and verified.action_type = 'VERIFY' \ + where \ + blocks.number <= {max_block} \ + order by blocks.number desc \ + limit {limit}; \ ", max_block = i64::from(max_block), limit = i64::from(limit) @@ -286,36 +286,36 @@ impl<'a> BlockSchema<'a> { // + query equals to the state hash obtained in the block (in form of `sync-bl:00{..}00`); // + query equals to the number of the block. let sql_query = format!( - " - with eth_ops as ( - select - operations.block_number, - '0x' || encode(eth_operations.tx_hash::bytea, 'hex') as tx_hash, - operations.action_type, - operations.created_at - from operations - left join eth_operations on eth_operations.op_id = operations.id - ) - select - blocks.number as block_number, - blocks.root_hash as new_state_root, - blocks.block_size as block_size, - committed.tx_hash as commit_tx_hash, - verified.tx_hash as verify_tx_hash, - committed.created_at as committed_at, - verified.created_at as verified_at - from blocks - inner join eth_ops committed on - committed.block_number = blocks.number and committed.action_type = 'COMMIT' - left join eth_ops verified on - verified.block_number = blocks.number and verified.action_type = 'VERIFY' - where false - or lower(committed.tx_hash) = $1 - or lower(verified.tx_hash) = $1 - or lower(blocks.root_hash) = $1 - or blocks.number = {block_number} - order by blocks.number desc - limit 1; + " \ + with eth_ops as ( \ + select \ + operations.block_number, \ + '0x' || encode(eth_operations.tx_hash::bytea, 'hex') as tx_hash, \ + operations.action_type, \ + operations.created_at \ + from operations \ + left join eth_operations on eth_operations.op_id = operations.id \ + ) \ + select \ + blocks.number as block_number, \ + blocks.root_hash as new_state_root, \ + blocks.block_size as block_size, \ + committed.tx_hash as commit_tx_hash, \ + verified.tx_hash as verify_tx_hash, \ + committed.created_at as committed_at, \ + verified.created_at as verified_at \ + from blocks \ + inner join eth_ops committed on \ + committed.block_number = blocks.number and committed.action_type = 'COMMIT' \ + left join eth_ops verified on \ + verified.block_number = blocks.number and verified.action_type = 'VERIFY' \ + where false \ + or lower(committed.tx_hash) = $1 \ + or lower(verified.tx_hash) = $1 \ + or lower(blocks.root_hash) = $1 \ + or blocks.number = {block_number} \ + order by blocks.number desc \ + limit 1; \ ", block_number = block_number ); From a232f34e36e0a1d3b8a475b902905c7bf119fb3c Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 30 Mar 2020 12:24:58 +0300 Subject: [PATCH 151/186] Configure eth_sender using env variables --- core/eth_client/src/lib.rs | 17 +++ core/models/src/config_options.rs | 128 +++++++++++------- .../src/eth_sender/ethereum_interface.rs | 1 + core/server/src/eth_sender/mod.rs | 30 ++-- core/server/src/eth_sender/tests/mock.rs | 20 +-- core/server/src/eth_sender/tests/mod.rs | 58 ++++---- etc/env/dev.env.example | 7 +- 7 files changed, 156 insertions(+), 105 deletions(-) diff --git a/core/eth_client/src/lib.rs b/core/eth_client/src/lib.rs index 0359fc92cb..2cfdf48b7a 100644 --- a/core/eth_client/src/lib.rs +++ b/core/eth_client/src/lib.rs @@ -1,6 +1,10 @@ #[macro_use] extern crate serde_derive; +// Built-in deps +use std::fmt; + +// External uses use futures::compat::Future01CompatExt; use web3::contract::tokens::Tokenize; use web3::contract::Options; @@ -20,6 +24,19 @@ pub struct ETHClient { pub web3: Web3, } +impl fmt::Debug for ETHClient { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // We do not want to have a private key in the debug representation. + + f.debug_struct("ETHClient") + .field("sender_account", &self.sender_account) + .field("contract_addr", &self.contract_addr) + .field("chain_id", &self.chain_id) + .field("gas_price_factor", &self.gas_price_factor) + .finish() + } +} + #[derive(Debug, Clone, PartialEq)] pub struct SignedCallResult { pub raw_tx: Vec, diff --git a/core/models/src/config_options.rs b/core/models/src/config_options.rs index 74e769c91d..7067532dae 100644 --- a/core/models/src/config_options.rs +++ b/core/models/src/config_options.rs @@ -1,8 +1,14 @@ -use crate::node::Address; -use futures::{channel::mpsc, executor::block_on, SinkExt}; +// Built-in deps use std::env; use std::net::SocketAddr; +use std::str::FromStr; +use std::time::Duration; +// External uses +use futures::{channel::mpsc, executor::block_on, SinkExt}; use web3::types::{H160, H256}; +// Local uses +use crate::node::Address; + /// If its placed inside thread::spawn closure it will notify channel when this thread panics. pub struct ThreadPanicNotify(pub mpsc::Sender); @@ -14,11 +20,63 @@ impl Drop for ThreadPanicNotify { } } +/// Obtains the environment variable value. +/// Panics if there is no environment variable with provided name set. fn get_env(name: &str) -> String { env::var(name).unwrap_or_else(|e| panic!("Env var {} missing, {}", name, e)) } -#[derive(Clone)] +/// Obtains the environment variable value and parses it using the `FromStr` type implementation. +/// Panics if there is no environment variable with provided name set, or the value cannot be parsed. +fn parse_env(name: &str) -> F +where + F: FromStr, + F::Err: std::fmt::Debug, +{ + get_env(name) + .parse() + .unwrap_or_else(|e| panic!("Failed to parse environment variable {}: {:?}", name, e)) +} + +/// Similar to `parse_env`, but also takes a function to change the variable value before parsing. +fn parse_env_with(name: &str, f: F) -> T +where + T: FromStr, + T::Err: std::fmt::Debug, + F: FnOnce(&str) -> &str, +{ + let env_var = get_env(name); + + f(&env_var) + .parse() + .unwrap_or_else(|e| panic!("Failed to parse environment variable {}: {:?}", name, e)) +} + +/// Configuration options for `eth_sender`. +#[derive(Debug, Clone)] +pub struct EthSenderOptions { + pub expected_wait_time_block: u64, + pub tx_poll_period: Duration, + pub wait_confirmations: u64, + pub max_txs_in_flight: u64, +} + +impl EthSenderOptions { + /// Parses the `eth_sender` configuration options values from the environment variables. + /// Panics if any of options is missing or has inappropriate value. + pub fn from_env() -> Self { + let tx_poll_period_secs: u64 = parse_env("ETH_TX_POLL_PERIOD"); + + Self { + expected_wait_time_block: parse_env("ETH_EXPECTED_WAIT_TIME_BLOCK"), + tx_poll_period: Duration::new(tx_poll_period_secs, 0), + wait_confirmations: parse_env("ETH_WAIT_CONFIRMATIONS"), + max_txs_in_flight: parse_env("ETH_MAX_TXS_IN_FLIGHT"), + } + } +} + +#[derive(Debug, Clone)] pub struct ConfigurationOptions { pub rest_api_server_address: SocketAddr, pub json_rpc_http_server_address: SocketAddr, @@ -39,52 +97,26 @@ pub struct ConfigurationOptions { } impl ConfigurationOptions { - pub fn from_env() -> ConfigurationOptions { - ConfigurationOptions { - rest_api_server_address: get_env("REST_API_BIND") - .parse() - .expect("Failed to parse REST_API_BIND bind address"), - json_rpc_http_server_address: get_env("HTTP_RPC_API_BIND") - .parse() - .expect("Failed to parse HTTP_RPC_API_BIND bind address"), - json_rpc_ws_server_address: get_env("WS_API_BIND") - .parse() - .expect("Failed to parse WS_API_BIND bind address"), - contract_eth_addr: get_env("CONTRACT_ADDR")[2..] - .parse() - .expect("Failed to parse CONTRACT_ADDR as ETH contract address"), - contract_genesis_tx_hash: get_env("CONTRACT_GENESIS_TX_HASH")[2..] - .parse() - .expect("Failed to parse CONTRACT_GENESIS_TX_HASH"), + /// Parses the configuration options values from the environment variables. + /// Panics if any of options is missing or has inappropriate value. + pub fn from_env() -> Self { + Self { + rest_api_server_address: parse_env("REST_API_BIND"), + json_rpc_http_server_address: parse_env("HTTP_RPC_API_BIND"), + json_rpc_ws_server_address: parse_env("WS_API_BIND"), + contract_eth_addr: parse_env_with("CONTRACT_ADDR", |s| &s[2..]), + contract_genesis_tx_hash: parse_env_with("CONTRACT_GENESIS_TX_HASH", |s| &s[2..]), web3_url: get_env("WEB3_URL"), - governance_eth_addr: get_env("GOVERNANCE_ADDR")[2..] - .parse() - .expect("Failed to parse GOVERNANCE_ADDR as ETH contract address"), - governance_genesis_tx_hash: get_env("GOVERNANCE_GENESIS_TX_HASH")[2..] - .parse() - .expect("Failed to parse GOVERNANCE_GENESIS_TX_HASH"), - operator_franklin_addr: get_env("OPERATOR_FRANKLIN_ADDRESS")[2..] - .parse() - .expect("Failed to parse OPERATOR_FRANKLIN_ADDRESS"), - operator_eth_addr: get_env("OPERATOR_ETH_ADDRESS")[2..] - .parse() - .expect("Failed to parse OPERATOR_ETH_ADDRESS as ETH contract address"), - operator_private_key: get_env("OPERATOR_PRIVATE_KEY") - .parse() - .expect("Failed to parse OPERATOR_ETH_ADDRESS"), - chain_id: get_env("CHAIN_ID").parse().expect("CHAIN_ID invalid value"), - gas_price_factor: get_env("GAS_PRICE_FACTOR") - .parse() - .expect("gas price factor invalid"), - tx_batch_size: get_env("TX_BATCH_SIZE") - .parse() - .expect("TX_BATCH_SIZE invalid value"), - prover_server_address: get_env("PROVER_SERVER_BIND") - .parse() - .expect("Failed to parse PROVER_SERVER_BIND bind address"), - confirmations_for_eth_event: get_env("CONFIRMATIONS_FOR_ETH_EVENT") - .parse() - .expect("Failed to parse CONFIRMATIONS_FOR_ETH_EVENT"), + governance_eth_addr: parse_env_with("GOVERNANCE_ADDR", |s| &s[2..]), + governance_genesis_tx_hash: parse_env_with("GOVERNANCE_GENESIS_TX_HASH", |s| &s[2..]), + operator_franklin_addr: parse_env_with("OPERATOR_FRANKLIN_ADDRESS", |s| &s[2..]), + operator_eth_addr: parse_env_with("OPERATOR_ETH_ADDRESS", |s| &s[2..]), + operator_private_key: parse_env("OPERATOR_PRIVATE_KEY"), + chain_id: parse_env("CHAIN_ID"), + gas_price_factor: parse_env("GAS_PRICE_FACTOR"), + tx_batch_size: parse_env("TX_BATCH_SIZE"), + prover_server_address: parse_env("PROVER_SERVER_BIND"), + confirmations_for_eth_event: parse_env("CONFIRMATIONS_FOR_ETH_EVENT"), } } } diff --git a/core/server/src/eth_sender/ethereum_interface.rs b/core/server/src/eth_sender/ethereum_interface.rs index 997ee390c2..dfb2c43f7f 100644 --- a/core/server/src/eth_sender/ethereum_interface.rs +++ b/core/server/src/eth_sender/ethereum_interface.rs @@ -54,6 +54,7 @@ pub(super) trait EthereumInterface { /// Wrapper over `ETHClient` using `Http` transport. /// Supposed to be an actual Ethereum intermediator for the `ETHSender`. +#[derive(Debug)] pub struct EthereumHttpClient { eth_client: ETHClient, // We have to prevent handle from drop, since it will cause event loop termination. diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index c805291b6f..1fd24129a7 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -5,8 +5,6 @@ // Built-in deps use std::collections::VecDeque; -use std::str::FromStr; -use std::time::Duration; // External uses use futures::channel::mpsc; use tokio::runtime::Runtime; @@ -16,7 +14,7 @@ use web3::types::{TransactionReceipt, H256, U256}; // Workspace uses use eth_client::SignedCallResult; use models::{ - config_options::{ConfigurationOptions, ThreadPanicNotify}, + config_options::{ConfigurationOptions, EthSenderOptions, ThreadPanicNotify}, ethereum::{ETHOperation, OperationType}, node::config, Action, Operation, @@ -38,10 +36,6 @@ mod tx_queue; #[cfg(test)] mod tests; -const EXPECTED_WAIT_TIME_BLOCKS: u64 = 30; -const TX_POLL_PERIOD: Duration = Duration::from_secs(5); -const WAIT_CONFIRMATIONS: u64 = 1; - /// `TxCheckMode` enum determines the policy on the obtaining the tx status. /// The latest sent transaction can be pending (we're still waiting for it), /// but if there is more than one tx for some Ethereum operation, it means that we @@ -120,11 +114,13 @@ struct ETHSender { op_notify: mpsc::Sender, /// Queue for ordered transaction processing. tx_queue: TxQueue, + /// Settings for the `ETHSender`. + options: EthSenderOptions, } impl ETHSender { pub fn new( - max_txs_in_flight: usize, + options: EthSenderOptions, db: DB, ethereum: ETH, rx_for_eth: mpsc::Receiver, @@ -136,7 +132,7 @@ impl ETHSender { .load_stats() .expect("Failed loading ETH operations stats"); - let tx_queue = TxQueueBuilder::new(max_txs_in_flight) + let tx_queue = TxQueueBuilder::new(options.max_txs_in_flight as usize) .with_sent_pending_txs(ongoing_ops.len()) .with_commit_operations_count(stats.commit_ops) .with_verify_operations_count(stats.verify_ops) @@ -150,6 +146,7 @@ impl ETHSender { rx_for_eth, op_notify, tx_queue, + options, }; // Add all the unprocessed operations to the queue. @@ -168,7 +165,7 @@ impl ETHSender { /// Main routine of `ETHSender`. pub async fn run(mut self) { - let mut timer = time::interval(TX_POLL_PERIOD); + let mut timer = time::interval(self.options.tx_poll_period); loop { // Update the incoming operations. @@ -426,7 +423,7 @@ impl ETHSender { /// Helper method encapsulating the logic of determining the next deadline block. fn get_deadline_block(&self, current_block: u64) -> u64 { - current_block + EXPECTED_WAIT_TIME_BLOCKS + current_block + self.options.expected_wait_time_block } /// Looks up for a transaction state on the Ethereum chain @@ -444,7 +441,7 @@ impl ETHSender { // Successful execution. Some(status) if status.success => { // Check if transaction has enough confirmations. - if status.confirmations >= WAIT_CONFIRMATIONS { + if status.confirmations >= self.options.wait_confirmations { TxCheckOutcome::Committed } else { TxCheckOutcome::Pending @@ -625,11 +622,6 @@ pub fn start_eth_sender( send_requst_receiver: mpsc::Receiver, config_options: ConfigurationOptions, ) { - let max_txs_in_flight = - std::env::var("ETH_MAX_TXS_IN_FLIGHT").expect("ETH_MAX_TXS_IN_FLIGHT env variable missing"); - let max_txs_in_flight = usize::from_str(&max_txs_in_flight) - .expect("ETH_MAX_TXS_IN_FLIGHT env variable has invalid value"); - std::thread::Builder::new() .name("eth_sender".to_string()) .spawn(move || { @@ -640,9 +632,11 @@ pub fn start_eth_sender( let db = Database::new(pool); + let eth_sender_options = EthSenderOptions::from_env(); + let mut runtime = Runtime::new().expect("eth-sender-runtime"); let eth_sender = ETHSender::new( - max_txs_in_flight, + eth_sender_options, db, ethereum, send_requst_receiver, diff --git a/core/server/src/eth_sender/tests/mock.rs b/core/server/src/eth_sender/tests/mock.rs index 81dfbd417d..3d3f1b1795 100644 --- a/core/server/src/eth_sender/tests/mock.rs +++ b/core/server/src/eth_sender/tests/mock.rs @@ -10,6 +10,7 @@ use web3::types::{H256, U256}; // Workspace uses use eth_client::SignedCallResult; use models::{ + config_options::EthSenderOptions, ethereum::{ETHOperation, EthOpId, InsertedOperationResponse, OperationType}, Action, Operation, }; @@ -337,7 +338,7 @@ pub(super) fn default_eth_sender() -> ( /// which supports multiple transactions in flight. /// Returns the `ETHSender` itself along with communication channels to interact with it. pub(super) fn concurrent_eth_sender( - max_txs_in_flight: usize, + max_txs_in_flight: u64, ) -> ( ETHSender, mpsc::Sender, @@ -356,14 +357,14 @@ pub(super) fn restored_eth_sender( mpsc::Sender, mpsc::Receiver, ) { - const MAX_TXS_IN_FLIGHT: usize = 1; + const MAX_TXS_IN_FLIGHT: u64 = 1; build_eth_sender(MAX_TXS_IN_FLIGHT, restore_state, stats) } /// Helper method for configurable creation of `ETHSender`. fn build_eth_sender( - max_txs_in_flight: usize, + max_txs_in_flight: u64, restore_state: impl IntoIterator, stats: ETHStats, ) -> ( @@ -377,13 +378,14 @@ fn build_eth_sender( let (operation_sender, operation_receiver) = mpsc::channel(CHANNEL_CAPACITY); let (notify_sender, notify_receiver) = mpsc::channel(CHANNEL_CAPACITY); - let eth_sender = ETHSender::new( + let options = EthSenderOptions { max_txs_in_flight, - db, - ethereum, - operation_receiver, - notify_sender, - ); + expected_wait_time_block: super::EXPECTED_WAIT_TIME_BLOCKS, + wait_confirmations: super::WAIT_CONFIRMATIONS, + tx_poll_period: Default::default(), + }; + + let eth_sender = ETHSender::new(options, db, ethereum, operation_receiver, notify_sender); (eth_sender, operation_sender, notify_receiver) } diff --git a/core/server/src/eth_sender/tests/mod.rs b/core/server/src/eth_sender/tests/mod.rs index 1814cbacdb..7b34590dda 100644 --- a/core/server/src/eth_sender/tests/mod.rs +++ b/core/server/src/eth_sender/tests/mod.rs @@ -11,6 +11,9 @@ use super::{ ETHSender, TxCheckMode, }; +const EXPECTED_WAIT_TIME_BLOCKS: u64 = 30; +const WAIT_CONFIRMATIONS: u64 = 1; + mod mock; mod test_data; @@ -46,13 +49,10 @@ fn scale_gas() { fn deadline_block() { let (eth_sender, _, _) = default_eth_sender(); - assert_eq!( - eth_sender.get_deadline_block(0), - super::EXPECTED_WAIT_TIME_BLOCKS - ); + assert_eq!(eth_sender.get_deadline_block(0), EXPECTED_WAIT_TIME_BLOCKS); assert_eq!( eth_sender.get_deadline_block(10), - 10 + super::EXPECTED_WAIT_TIME_BLOCKS + 10 + EXPECTED_WAIT_TIME_BLOCKS ); } @@ -82,7 +82,7 @@ fn transaction_state() { // Committed operation. let committed_response = ExecutedTxStatus { - confirmations: super::WAIT_CONFIRMATIONS, + confirmations: WAIT_CONFIRMATIONS, success: true, receipt: None, }; @@ -92,7 +92,7 @@ fn transaction_state() { // Pending operation. let pending_response = ExecutedTxStatus { - confirmations: super::WAIT_CONFIRMATIONS - 1, + confirmations: WAIT_CONFIRMATIONS - 1, success: true, receipt: None, }; @@ -102,7 +102,7 @@ fn transaction_state() { // Failed operation. let failed_response = ExecutedTxStatus { - confirmations: super::WAIT_CONFIRMATIONS, + confirmations: WAIT_CONFIRMATIONS, success: false, receipt: Some(Default::default()), }; @@ -158,7 +158,7 @@ fn transaction_state() { TxCheckMode::Latest, &operations[3], &operations[3].used_tx_hashes[0], - current_block + super::EXPECTED_WAIT_TIME_BLOCKS + current_block + EXPECTED_WAIT_TIME_BLOCKS ) .unwrap(), TxCheckOutcome::Stuck @@ -171,7 +171,7 @@ fn transaction_state() { TxCheckMode::Latest, &operations[4], &operations[4].used_tx_hashes[0], - current_block + super::EXPECTED_WAIT_TIME_BLOCKS - 1 + current_block + EXPECTED_WAIT_TIME_BLOCKS - 1 ) .unwrap(), TxCheckOutcome::Pending @@ -184,7 +184,7 @@ fn transaction_state() { TxCheckMode::Old, &operations[4], &operations[4].used_tx_hashes[0], - current_block + super::EXPECTED_WAIT_TIME_BLOCKS - 1 + current_block + EXPECTED_WAIT_TIME_BLOCKS - 1 ) .unwrap(), TxCheckOutcome::Stuck @@ -242,7 +242,7 @@ fn operation_commitment_workflow() { // operation again. eth_sender .ethereum - .add_successfull_execution(expected_tx.used_tx_hashes[0], super::WAIT_CONFIRMATIONS); + .add_successfull_execution(expected_tx.used_tx_hashes[0], WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); // Check that operation is confirmed. @@ -268,7 +268,7 @@ fn operation_commitment_workflow() { // Mark `completeWithdrawals` as completed. eth_sender .ethereum - .add_successfull_execution(withdraw_op_tx.used_tx_hashes[0], super::WAIT_CONFIRMATIONS); + .add_successfull_execution(withdraw_op_tx.used_tx_hashes[0], WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); // Check that `completeWithdrawals` is completed in the DB. @@ -305,7 +305,7 @@ fn stuck_transaction() { let mut stuck_tx = create_signed_tx(eth_op_id, ð_sender, &operation, deadline_block, nonce); // Skip some blocks and expect sender to send a new tx. - eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; + eth_sender.ethereum.block_number += EXPECTED_WAIT_TIME_BLOCKS; eth_sender.proceed_next_operations(); // Check that new transaction is sent (and created based on the previous stuck tx). @@ -322,7 +322,7 @@ fn stuck_transaction() { // operation again. eth_sender .ethereum - .add_successfull_execution(stuck_tx.used_tx_hashes[1], super::WAIT_CONFIRMATIONS); + .add_successfull_execution(stuck_tx.used_tx_hashes[1], WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); // Check that operation is confirmed (we set the final hash to the second sent tx). @@ -360,7 +360,7 @@ fn operations_order() { commit_operations.iter().zip(verify_operations).enumerate() { // Create the commit operation. - let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; + let start_block = 1 + WAIT_CONFIRMATIONS * (idx * 3) as u64; let deadline_block = eth_sender.get_deadline_block(start_block); let eth_op_idx = (idx * 3) as i64; let nonce = eth_op_idx; @@ -376,7 +376,7 @@ fn operations_order() { expected_txs.push(commit_op_tx); // Create the verify operation, as by priority it will be processed right after `commit`. - let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 1) as u64; + let start_block = 1 + WAIT_CONFIRMATIONS * (idx * 3 + 1) as u64; let deadline_block = eth_sender.get_deadline_block(start_block); let eth_op_idx = (idx * 3 + 1) as i64; let nonce = eth_op_idx; @@ -392,7 +392,7 @@ fn operations_order() { expected_txs.push(verify_op_tx); // Create the withdraw operation. - let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 2) as u64; + let start_block = 1 + WAIT_CONFIRMATIONS * (idx * 3 + 2) as u64; let deadline_block = eth_sender.get_deadline_block(start_block); let eth_op_idx = (idx * 3 + 2) as i64; let nonce = eth_op_idx; @@ -421,7 +421,7 @@ fn operations_order() { // Mark the tx as successfully eth_sender .ethereum - .add_successfull_execution(current_tx_hash, super::WAIT_CONFIRMATIONS); + .add_successfull_execution(current_tx_hash, WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); // Update the fields in the tx and check if it's confirmed. @@ -456,7 +456,7 @@ fn transaction_failure() { eth_sender .ethereum - .add_failed_execution(&failing_tx.used_tx_hashes[0], super::WAIT_CONFIRMATIONS); + .add_failed_execution(&failing_tx.used_tx_hashes[0], WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); } @@ -512,7 +512,7 @@ fn restore_state() { eth_sender .ethereum - .add_successfull_execution(expected_tx.used_tx_hashes[0], super::WAIT_CONFIRMATIONS); + .add_successfull_execution(expected_tx.used_tx_hashes[0], WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); expected_tx.confirmed = true; @@ -544,7 +544,7 @@ fn confirmations_independence() { let deadline_block = eth_sender.get_deadline_block(eth_sender.ethereum.block_number); let mut stuck_tx = create_signed_tx(eth_op_id, ð_sender, &operation, deadline_block, nonce); - eth_sender.ethereum.block_number += super::EXPECTED_WAIT_TIME_BLOCKS; + eth_sender.ethereum.block_number += EXPECTED_WAIT_TIME_BLOCKS; eth_sender.proceed_next_operations(); let next_tx = eth_sender @@ -559,7 +559,7 @@ fn confirmations_independence() { // Add a confirmation for a *stuck* transaction. eth_sender .ethereum - .add_successfull_execution(stuck_tx.used_tx_hashes[0], super::WAIT_CONFIRMATIONS); + .add_successfull_execution(stuck_tx.used_tx_hashes[0], WAIT_CONFIRMATIONS); eth_sender.proceed_next_operations(); // Check that operation is confirmed (we set the final hash to the *first* sent tx). @@ -572,7 +572,7 @@ fn confirmations_independence() { /// to use 3 transactions in flight, and checks that they are being sent concurrently. #[test] fn concurrent_operations_order() { - const MAX_TXS_IN_FLIGHT: usize = 3; + const MAX_TXS_IN_FLIGHT: u64 = 3; let (mut eth_sender, mut sender, mut receiver) = concurrent_eth_sender(MAX_TXS_IN_FLIGHT); // We send multiple the operations at once to the channel. @@ -595,7 +595,7 @@ fn concurrent_operations_order() { // thus the deadline block is the same for them. // However, withdraw operation will be sent after these txs are confirmed, // so it will have a different deadline block, - let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3) as u64; + let start_block = 1 + WAIT_CONFIRMATIONS * (idx * 3) as u64; let deadline_block = eth_sender.get_deadline_block(start_block); // Create the commit operation. @@ -627,7 +627,7 @@ fn concurrent_operations_order() { expected_txs.push(verify_op_tx); // Create the withdraw operation. - let start_block = 1 + super::WAIT_CONFIRMATIONS * (idx * 3 + 2) as u64; + let start_block = 1 + WAIT_CONFIRMATIONS * (idx * 3 + 2) as u64; let deadline_block = eth_sender.get_deadline_block(start_block); let eth_op_idx = (idx * 3 + 2) as i64; let nonce = eth_op_idx; @@ -643,7 +643,7 @@ fn concurrent_operations_order() { // Then we go through the operations and check that the order of operations is preserved. // Here we take N txs at each interaction. - for txs in expected_txs.chunks(MAX_TXS_IN_FLIGHT) { + for txs in expected_txs.chunks(MAX_TXS_IN_FLIGHT as usize) { // We send operations by two, so the order will be "commit-verify-withdraw". // If we'll send all the operations together, the order will be "commit-verify-commit-verify-withdraw", // since withdraw is only sent after verify operation is confirmed. @@ -670,7 +670,7 @@ fn concurrent_operations_order() { // Mark the tx as successfully eth_sender .ethereum - .add_successfull_execution(current_tx_hash, super::WAIT_CONFIRMATIONS); + .add_successfull_execution(current_tx_hash, WAIT_CONFIRMATIONS); } // Call `proceed_next_operations` again. Both txs should become confirmed. @@ -697,7 +697,7 @@ fn concurrent_operations_order() { // Mark the tx as successfully eth_sender .ethereum - .add_successfull_execution(withdraw_tx_hash, super::WAIT_CONFIRMATIONS); + .add_successfull_execution(withdraw_tx_hash, WAIT_CONFIRMATIONS); // Call `proceed_next_operations` again. Withdraw tx should become confirmed. eth_sender.proceed_next_operations(); diff --git a/etc/env/dev.env.example b/etc/env/dev.env.example index bef5c90333..037153ea58 100755 --- a/etc/env/dev.env.example +++ b/etc/env/dev.env.example @@ -35,8 +35,13 @@ DATABASE_URL=postgres://postgres@localhost/plasma DB_POOL_SIZE=10 +# `eth_sender` options + +ETH_WAIT_CONFIRMATIONS = 1 +ETH_EXPECTED_WAIT_TIME_BLOCK = 30 +ETH_TX_POLL_PERIOD=3 # Seconds # The maximum amount of simultaneously sent Ethereum transactions. -ETH_MAX_TXS_IN_FLIGHT=3 +ETH_MAX_TXS_IN_FLIGHT=3 PADDING_PUB_KEY="[\"0x18936d8e5f18dc41425e85a25d7a76f63715be4b3c9fac18475d028fca64c740\", \"0x0f933c18160257e0aa54056652e6bc2b8673b31c80cda933421f99dada946bf4\"]" FROM_BLOCK=0 From 438bbde524476cc68d40458b3f4f6df248f3e06e Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 30 Mar 2020 12:25:52 +0300 Subject: [PATCH 152/186] Remove bogus spaces --- etc/env/dev.env.example | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/etc/env/dev.env.example b/etc/env/dev.env.example index 037153ea58..39ad11e00a 100755 --- a/etc/env/dev.env.example +++ b/etc/env/dev.env.example @@ -37,8 +37,8 @@ DB_POOL_SIZE=10 # `eth_sender` options -ETH_WAIT_CONFIRMATIONS = 1 -ETH_EXPECTED_WAIT_TIME_BLOCK = 30 +ETH_WAIT_CONFIRMATIONS=1 +ETH_EXPECTED_WAIT_TIME_BLOCK=30 ETH_TX_POLL_PERIOD=3 # Seconds # The maximum amount of simultaneously sent Ethereum transactions. ETH_MAX_TXS_IN_FLIGHT=3 From 29a65946a63c18f8d4390de97bfaa0982d53b524 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 30 Mar 2020 16:35:21 +0300 Subject: [PATCH 153/186] Fix dev.env.example --- etc/env/dev.env.example | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/etc/env/dev.env.example b/etc/env/dev.env.example index 39ad11e00a..2372dd63f5 100755 --- a/etc/env/dev.env.example +++ b/etc/env/dev.env.example @@ -38,8 +38,9 @@ DB_POOL_SIZE=10 # `eth_sender` options ETH_WAIT_CONFIRMATIONS=1 -ETH_EXPECTED_WAIT_TIME_BLOCK=30 -ETH_TX_POLL_PERIOD=3 # Seconds +ETH_EXPECTED_WAIT_TIME_BLOCK=30 +# Node polling period in seconds. +ETH_TX_POLL_PERIOD=3 # The maximum amount of simultaneously sent Ethereum transactions. ETH_MAX_TXS_IN_FLIGHT=3 From 0abf42f66f4403c3b813f09cc824ad62b768a535 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Mon, 30 Mar 2020 16:37:25 +0300 Subject: [PATCH 154/186] Auto update yarn.lock files by zksync init command --- contracts/yarn.lock | 7 ++++--- js/explorer/yarn.lock | 6 ++++-- js/tests/yarn.lock | 7 ++++--- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/contracts/yarn.lock b/contracts/yarn.lock index f04affb354..9beb54d3e1 100644 --- a/contracts/yarn.lock +++ b/contracts/yarn.lock @@ -7291,9 +7291,10 @@ yn@^3.0.0: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== -"zksync-crypto@link:../js/zksync-crypto": - version "0.0.0" - uid "" +zksync-crypto@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/zksync-crypto/-/zksync-crypto-0.0.2.tgz#a39925395b8a433e13b2742b8acea874d5e44656" + integrity sha512-dT6KnrNj8MtbhBRk41Xsr8S2DwSOgfWD0Ym8YmWi+gTHKF3l8/4dzCxRj4iTI8AClnhUv//FHnb6g2/ZeaaVGQ== "zksync@link:../js/zksync.js": version "0.0.0" diff --git a/js/explorer/yarn.lock b/js/explorer/yarn.lock index 8636a9dd1e..06967ec1dd 100644 --- a/js/explorer/yarn.lock +++ b/js/explorer/yarn.lock @@ -9057,8 +9057,10 @@ yorkie@^2.0.0: normalize-path "^1.0.0" strip-indent "^2.0.0" -zksync-crypto@../zksync-crypto: - version "0.0.0" +zksync-crypto@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/zksync-crypto/-/zksync-crypto-0.0.2.tgz#a39925395b8a433e13b2742b8acea874d5e44656" + integrity sha512-dT6KnrNj8MtbhBRk41Xsr8S2DwSOgfWD0Ym8YmWi+gTHKF3l8/4dzCxRj4iTI8AClnhUv//FHnb6g2/ZeaaVGQ== "zksync@link:../zksync.js": version "0.0.0" diff --git a/js/tests/yarn.lock b/js/tests/yarn.lock index 19bd38e934..81aaac0952 100644 --- a/js/tests/yarn.lock +++ b/js/tests/yarn.lock @@ -534,9 +534,10 @@ yn@3.1.1: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== -"zksync-crypto@link:../zksync-crypto": - version "0.0.0" - uid "" +zksync-crypto@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/zksync-crypto/-/zksync-crypto-0.0.2.tgz#a39925395b8a433e13b2742b8acea874d5e44656" + integrity sha512-dT6KnrNj8MtbhBRk41Xsr8S2DwSOgfWD0Ym8YmWi+gTHKF3l8/4dzCxRj4iTI8AClnhUv//FHnb6g2/ZeaaVGQ== "zksync@link:../zksync.js": version "0.0.0" From 10e738926521ecb53108a9fe7b223d5396e9c13b Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Mon, 30 Mar 2020 18:02:52 +0300 Subject: [PATCH 155/186] try number 1 --- .drone.yml | 1 + contracts/yarn.lock | 7 ++++--- js/explorer/yarn.lock | 6 ++++-- js/tests/simple-integration-test.ts | 4 ++-- js/tests/yarn.lock | 7 ++++--- 5 files changed, 15 insertions(+), 10 deletions(-) diff --git a/.drone.yml b/.drone.yml index 98e3771c4f..24259eab35 100644 --- a/.drone.yml +++ b/.drone.yml @@ -179,6 +179,7 @@ steps: - export ZKSYNC_HOME=`pwd` - export PATH=$ZKSYNC_HOME/bin:$PATH - export CARGO_HOME=$ZKSYNC_HOME/target/cargo + - sleep 15 - zksync integration-simple depends_on: - start-server-detached diff --git a/contracts/yarn.lock b/contracts/yarn.lock index a36d7ba574..67ff47cbeb 100644 --- a/contracts/yarn.lock +++ b/contracts/yarn.lock @@ -7299,9 +7299,10 @@ yn@^3.0.0: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== -"zksync-crypto@link:../js/zksync-crypto": - version "0.0.0" - uid "" +zksync-crypto@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/zksync-crypto/-/zksync-crypto-0.0.2.tgz#a39925395b8a433e13b2742b8acea874d5e44656" + integrity sha512-dT6KnrNj8MtbhBRk41Xsr8S2DwSOgfWD0Ym8YmWi+gTHKF3l8/4dzCxRj4iTI8AClnhUv//FHnb6g2/ZeaaVGQ== "zksync@link:../js/zksync.js": version "0.0.0" diff --git a/js/explorer/yarn.lock b/js/explorer/yarn.lock index 8636a9dd1e..06967ec1dd 100644 --- a/js/explorer/yarn.lock +++ b/js/explorer/yarn.lock @@ -9057,8 +9057,10 @@ yorkie@^2.0.0: normalize-path "^1.0.0" strip-indent "^2.0.0" -zksync-crypto@../zksync-crypto: - version "0.0.0" +zksync-crypto@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/zksync-crypto/-/zksync-crypto-0.0.2.tgz#a39925395b8a433e13b2742b8acea874d5e44656" + integrity sha512-dT6KnrNj8MtbhBRk41Xsr8S2DwSOgfWD0Ym8YmWi+gTHKF3l8/4dzCxRj4iTI8AClnhUv//FHnb6g2/ZeaaVGQ== "zksync@link:../zksync.js": version "0.0.0" diff --git a/js/tests/simple-integration-test.ts b/js/tests/simple-integration-test.ts index 0107af9cab..0171d33327 100644 --- a/js/tests/simple-integration-test.ts +++ b/js/tests/simple-integration-test.ts @@ -259,9 +259,9 @@ async function moveFunds(contract: Contract, ethProxy: ETHProxy, depositWallet: "m/44'/60'/0'/0/0" ).connect(ethersProvider); const syncDepositorWallet = ethers.Wallet.createRandom().connect(ethersProvider); - await (await ethWallet.sendTransaction({to: syncDepositorWallet.address, value: parseEther("0.02")})).wait(); + await (await ethWallet.sendTransaction({to: syncDepositorWallet.address, value: parseEther("0.05")})).wait(); const erc20contract = new Contract(ERC_20TOKEN, IERC20_INTERFACE, ethWallet); - await (await erc20contract.transfer(syncDepositorWallet.address, parseEther("0.02"))).wait(); + await (await erc20contract.transfer(syncDepositorWallet.address, parseEther("0.05"))).wait(); const zksyncDepositorWallet = await Wallet.fromEthSigner(syncDepositorWallet, syncProvider); const syncWalletSigner = ethers.Wallet.createRandom().connect(ethersProvider); diff --git a/js/tests/yarn.lock b/js/tests/yarn.lock index 19bd38e934..81aaac0952 100644 --- a/js/tests/yarn.lock +++ b/js/tests/yarn.lock @@ -534,9 +534,10 @@ yn@3.1.1: resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== -"zksync-crypto@link:../zksync-crypto": - version "0.0.0" - uid "" +zksync-crypto@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/zksync-crypto/-/zksync-crypto-0.0.2.tgz#a39925395b8a433e13b2742b8acea874d5e44656" + integrity sha512-dT6KnrNj8MtbhBRk41Xsr8S2DwSOgfWD0Ym8YmWi+gTHKF3l8/4dzCxRj4iTI8AClnhUv//FHnb6g2/ZeaaVGQ== "zksync@link:../zksync.js": version "0.0.0" From 6cd43df1174132678d65a836fcb7e3858c2b6a35 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Mon, 30 Mar 2020 19:10:12 +0300 Subject: [PATCH 156/186] Refactor --- core/models/src/node/tx.rs | 11 +++--- core/server/src/mempool.rs | 77 +++++++++++++++++++++++++++----------- 2 files changed, 61 insertions(+), 27 deletions(-) diff --git a/core/models/src/node/tx.rs b/core/models/src/node/tx.rs index 5843147aea..f45ab5a97e 100644 --- a/core/models/src/node/tx.rs +++ b/core/models/src/node/tx.rs @@ -22,7 +22,6 @@ use crate::primitives::{big_decimal_to_u128, pedersen_hash_tx_msg, u128_to_bigde use ethsign::{SecretKey, Signature as ETHSignature}; use failure::{ensure, format_err}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use std::collections::HashMap; use std::convert::TryInto; use std::str::FromStr; use web3::types::{Address, H256}; @@ -346,16 +345,16 @@ impl FranklinTx { /// Returns a message that user has to sign to send the transaction. /// If the transaction doesn't need a message signature, returns `None`. - /// If any error is encountered during the message generation, returns `jsonrpc_core::Error`. + /// If any error is encountered during the message generation, returns `failure::Error` pub fn get_tx_info_message_to_sign( &self, - ids_to_symbols: &HashMap, - ) -> Result, &'static str> { + get_token_symbol: &mut dyn FnMut(TokenId) -> Result, + ) -> Result, failure::Error> { match self { FranklinTx::Transfer(tx) => Ok(Some(format!( "Transfer {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", amount = format_ether(&tx.amount), - token = ids_to_symbols.get(&tx.token).ok_or("no such symbol")?, // TODO: jazzandrock better message | other error type + token = get_token_symbol(tx.token)?, to = tx.to, nonce = tx.nonce, fee = format_ether(&tx.fee), @@ -363,7 +362,7 @@ impl FranklinTx { FranklinTx::Withdraw(tx) => Ok(Some(format!( "Withdraw {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", amount = format_ether(&tx.amount), - token = ids_to_symbols.get(&tx.token).ok_or("no such symbol")?, // TODO: jazzandrock better message | other error type + token = get_token_symbol(tx.token)?, to = tx.to, nonce = tx.nonce, fee = format_ether(&tx.fee), diff --git a/core/server/src/mempool.rs b/core/server/src/mempool.rs index 4c0144006c..215f2516e6 100644 --- a/core/server/src/mempool.rs +++ b/core/server/src/mempool.rs @@ -15,7 +15,7 @@ //! on restart mempool restores nonces of the accounts that are stored in the account tree. use crate::eth_watch::EthWatchRequest; -use failure::Fail; +use failure::{format_err, Fail}; use futures::channel::{mpsc, oneshot}; use futures::{SinkExt, StreamExt}; use models::node::tx::TxEthSignature; @@ -142,11 +142,7 @@ struct Mempool { mempool_state: MempoolState, requests: mpsc::Receiver, eth_watch_req: mpsc::Sender, - - // TODO: jazzandrock find a better place to store such cached structs. - // Maybe, something like storage scheme but for hashmaps? - // if we plan to cache stuff like that more often - ids_to_symbols: HashMap, + token_cache: TokenCache, } impl Mempool { @@ -176,10 +172,15 @@ impl Mempool { } } - if let Some(message_to_sign) = tx - .get_tx_info_message_to_sign(&self.ids_to_symbols) - .or(Err(TxAddError::IncorrectTx))? - { + let message_to_sign = tx + .get_tx_info_message_to_sign(&mut |token_id| { + self.token_cache + .token_symbol_from_id(token_id)? + .ok_or_else(|| format_err!("No symbol for TokenId {}", token_id)) + }) + .or(Err(TxAddError::IncorrectTx))?; + + if let Some(message_to_sign) = message_to_sign { let tx_eth_signature = signature.ok_or(TxAddError::MissingEthSignature)?; match tx_eth_signature { @@ -325,6 +326,49 @@ impl Mempool { } } +struct TokenCache { + db_pool: ConnectionPool, + ids_to_symbols: HashMap, +} + +impl TokenCache { + pub fn new(db_pool: ConnectionPool) -> Self { + Self { + db_pool, + ids_to_symbols: HashMap::new(), + } + } + + pub fn token_symbol_from_id( + &mut self, + token_id: TokenId, + ) -> Result, failure::Error> { + match self.ids_to_symbols.get(&token_id).cloned() { + Some(token_symbol) => Ok(Some(token_symbol)), + None => { + match self + .db_pool + .access_storage_fragile() + .map_err(|e| format_err!("Failed to access storage: {}", e))? + .tokens_schema() + .load_tokens() + .map_err(|e| format_err!("Tokens load failed: {}", e))? + .get(&token_id) + .cloned() + { + Some(token_info) => Ok(Some( + self.ids_to_symbols + .entry(token_id) + .or_insert(token_info.symbol) + .clone(), + )), + None => Ok(None), + } + } + } + } +} + pub fn run_mempool_task( db_pool: ConnectionPool, requests: mpsc::Receiver, @@ -333,22 +377,13 @@ pub fn run_mempool_task( ) { let mempool_state = MempoolState::restore_from_db(&db_pool); - // TODO: jazzandrock - let ids_to_symbols = db_pool - .access_storage_fragile() - .expect("fragile enough") - .tokens_schema() - .load_tokens() - .expect("tokens load failed") - .into_iter() - .map(|(key, val)| (key, val.symbol)) - .collect::>(); + let token_cache = TokenCache::new(db_pool); let mempool = Mempool { mempool_state, requests, eth_watch_req, - ids_to_symbols, + token_cache, }; runtime.spawn(mempool.run()); } From 6665e7de3e9acb045e083c606c5a17afce304eb8 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Mon, 30 Mar 2020 22:55:56 +0300 Subject: [PATCH 157/186] Refactor --- core/models/src/node/tx.rs | 52 ++++++++++++++++++-------------------- core/server/src/mempool.rs | 25 +++++++++++++----- 2 files changed, 42 insertions(+), 35 deletions(-) diff --git a/core/models/src/node/tx.rs b/core/models/src/node/tx.rs index f45ab5a97e..e3bc966702 100644 --- a/core/models/src/node/tx.rs +++ b/core/models/src/node/tx.rs @@ -122,6 +122,18 @@ impl Transfer { .as_ref() .map(PubKeyHash::from_pubkey) } + + /// Get message that should be signed by Ethereum keys of the account for 2F authentication. + pub fn get_ethereum_sign_message(&self, token_symbol: &str) -> String { + format!( + "Transfer {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", + amount = format_ether(&self.amount), + token = token_symbol, + to = self.to, + nonce = self.nonce, + fee = format_ether(&self.fee), + ) + } } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -164,6 +176,18 @@ impl Withdraw { None } } + + /// Get message that should be signed by Ethereum keys of the account for 2F authentication. + pub fn get_ethereum_sign_message(&self, token_symbol: &str) -> String { + format!( + "Withdraw {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", + amount = format_ether(&self.amount), + token = token_symbol, + to = self.to, + nonce = self.nonce, + fee = format_ether(&self.fee), + ) + } } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -342,34 +366,6 @@ impl FranklinTx { _ => false, } } - - /// Returns a message that user has to sign to send the transaction. - /// If the transaction doesn't need a message signature, returns `None`. - /// If any error is encountered during the message generation, returns `failure::Error` - pub fn get_tx_info_message_to_sign( - &self, - get_token_symbol: &mut dyn FnMut(TokenId) -> Result, - ) -> Result, failure::Error> { - match self { - FranklinTx::Transfer(tx) => Ok(Some(format!( - "Transfer {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", - amount = format_ether(&tx.amount), - token = get_token_symbol(tx.token)?, - to = tx.to, - nonce = tx.nonce, - fee = format_ether(&tx.fee), - ))), - FranklinTx::Withdraw(tx) => Ok(Some(format!( - "Withdraw {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", - amount = format_ether(&tx.amount), - token = get_token_symbol(tx.token)?, - to = tx.to, - nonce = tx.nonce, - fee = format_ether(&tx.fee), - ))), - _ => Ok(None), - } - } } #[derive(Clone, Serialize, Deserialize)] diff --git a/core/server/src/mempool.rs b/core/server/src/mempool.rs index 215f2516e6..6909ebcadc 100644 --- a/core/server/src/mempool.rs +++ b/core/server/src/mempool.rs @@ -146,6 +146,13 @@ struct Mempool { } impl Mempool { + fn token_symbol_from_id(&mut self, token_id: TokenId) -> Result { + self.token_cache + .token_symbol_from_id(token_id) + .or(Err(TxAddError::Other))? + .ok_or(TxAddError::IncorrectTx) + } + async fn add_tx( &mut self, tx: FranklinTx, @@ -172,13 +179,17 @@ impl Mempool { } } - let message_to_sign = tx - .get_tx_info_message_to_sign(&mut |token_id| { - self.token_cache - .token_symbol_from_id(token_id)? - .ok_or_else(|| format_err!("No symbol for TokenId {}", token_id)) - }) - .or(Err(TxAddError::IncorrectTx))?; + let message_to_sign = match &tx { + FranklinTx::Transfer(tx) => { + let token_symbol = self.token_symbol_from_id(tx.token)?; + Some(tx.get_ethereum_sign_message(&token_symbol)) + } + FranklinTx::Withdraw(tx) => { + let token_symbol = self.token_symbol_from_id(tx.token)?; + Some(tx.get_ethereum_sign_message(&token_symbol)) + } + _ => None, + }; if let Some(message_to_sign) = message_to_sign { let tx_eth_signature = signature.ok_or(TxAddError::MissingEthSignature)?; From 9f3649abaec55b4322d0e7c3bd22a930a93ffce5 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Tue, 31 Mar 2020 00:44:17 +0300 Subject: [PATCH 158/186] Add FullExit to transactions history test --- core/storage/src/chain/operations_ext/mod.rs | 8 ++- .../storage/src/tests/chain/operations_ext.rs | 67 +++++++++++++------ 2 files changed, 53 insertions(+), 22 deletions(-) diff --git a/core/storage/src/chain/operations_ext/mod.rs b/core/storage/src/chain/operations_ext/mod.rs index e3005152ed..eeb20a9738 100644 --- a/core/storage/src/chain/operations_ext/mod.rs +++ b/core/storage/src/chain/operations_ext/mod.rs @@ -305,6 +305,8 @@ impl<'a> OperationsExtSchema<'a> { tx->>'from' = '{address}' or tx->>'to' = '{address}' + or + tx->>'account' = '{address}' union all select operation as tx, @@ -318,7 +320,11 @@ impl<'a> OperationsExtSchema<'a> { where operation->'priority_op'->>'from' = '{address}' or - operation->'priority_op'->>'to' = '{address}') t + operation->'priority_op'->>'to' = '{address}' + or + operation->'priority_op'->>'account' = '{address}' + or + operation->'priority_op'->>'eth_address' = '{address}') t order by block_number desc offset diff --git a/core/storage/src/tests/chain/operations_ext.rs b/core/storage/src/tests/chain/operations_ext.rs index 5cbcbf053f..054cb3d3ca 100644 --- a/core/storage/src/tests/chain/operations_ext.rs +++ b/core/storage/src/tests/chain/operations_ext.rs @@ -7,7 +7,7 @@ use crypto_exports::franklin_crypto::bellman::pairing::ff::Field; use models::node::block::{Block, ExecutedOperations, ExecutedPriorityOp, ExecutedTx}; use models::node::operations::{ChangePubKeyOp, FranklinOp}; use models::node::priority_ops::PriorityOp; -use models::node::{CloseOp, Deposit, DepositOp, Fr, TransferOp, WithdrawOp}; +use models::node::{CloseOp, Deposit, DepositOp, Fr, FullExit, FullExitOp, TransferOp, WithdrawOp}; use testkit::zksync_account::ZksyncAccount; // Local imports use crate::tests::db_test; @@ -57,6 +57,31 @@ fn get_account_transactions_history() { ExecutedOperations::PriorityOp(Box::new(executed_op)) }; + let executed_full_exit_op = { + let full_exit_op = FranklinOp::FullExit(Box::new(FullExitOp { + priority_op: FullExit { + account_id: from_account_id, + eth_address: from_account_address, + token, + }, + withdraw_amount: Some(amount.clone()), + })); + + let executed_op = ExecutedPriorityOp { + priority_op: PriorityOp { + serial_id: 0, + data: full_exit_op.try_get_priority_op().unwrap(), + deadline_block: 0, + eth_fee: 0.into(), + eth_hash: b"1234567890".to_vec(), + }, + op: full_exit_op, + block_index: 31, + }; + + ExecutedOperations::PriorityOp(Box::new(executed_op)) + }; + let executed_transfer_op = { let transfer_op = FranklinOp::Transfer(Box::new(TransferOp { tx: from_zksync_account.sign_transfer( @@ -88,7 +113,7 @@ fn get_account_transactions_history() { token, amount.clone(), BigDecimal::from(0), - &from_account_address, + &to_account_address, None, true, ), @@ -146,6 +171,7 @@ fn get_account_transactions_history() { fee_account: 0, block_transactions: vec![ executed_deposit_op, + executed_full_exit_op, executed_transfer_op, executed_withdraw_op, executed_close_op, @@ -178,7 +204,7 @@ fn get_account_transactions_history() { "Withdraw", ( Some(from_account_address_string.as_str()), - Some(from_account_address_string.as_str()), + Some(to_account_address_string.as_str()), Some(&token), Some(amount.to_string()), ), @@ -198,25 +224,24 @@ fn get_account_transactions_history() { for tx in &from_history { let tx_type: &str = tx.tx["type"].as_str().expect("no tx_type"); - let (from, to, token, amount) = expected_behavior - .get(tx_type) - .expect("no expected behavior"); - - let tx_info = match tx_type { - "Deposit" => tx.tx["priority_op"].clone(), - _ => tx.tx.clone(), - }; - let tx_from_addr = tx_info["from"].as_str(); - let tx_to_addr = tx_info["to"].as_str(); - let tx_token = tx_info["token"].as_u64().map(|x| x as u16); - let tx_amount = tx_info["amount"].as_str().map(String::from); assert!(tx.hash.is_some()); - assert_eq!(tx_from_addr, *from); - assert_eq!(tx_to_addr, *to); - assert_eq!(tx_token, token.cloned()); - assert_eq!(tx_amount, *amount); + if let Some((from, to, token, amount)) = expected_behavior.get(tx_type) { + let tx_info = match tx_type { + "Deposit" => tx.tx["priority_op"].clone(), + _ => tx.tx.clone(), + }; + let tx_from_addr = tx_info["from"].as_str(); + let tx_to_addr = tx_info["to"].as_str(); + let tx_token = tx_info["token"].as_u64().map(|x| x as u16); + let tx_amount = tx_info["amount"].as_str().map(String::from); + + assert_eq!(tx_from_addr, *from); + assert_eq!(tx_to_addr, *to); + assert_eq!(tx_token, token.cloned()); + assert_eq!(tx_amount, *amount); + } } let to_history = conn @@ -224,8 +249,8 @@ fn get_account_transactions_history() { .operations_ext_schema() .get_account_transactions_history(&to_account_address, 0, 10)?; - assert_eq!(from_history.len(), 3); - assert_eq!(to_history.len(), 2); + assert_eq!(from_history.len(), 6); + assert_eq!(to_history.len(), 3); Ok(()) }); From ee557c6364e7f9f410d14db28c48e1ec365396eb Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 07:23:00 +0300 Subject: [PATCH 159/186] Improve logs for eth txs --- core/server/src/eth_sender/mod.rs | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/core/server/src/eth_sender/mod.rs b/core/server/src/eth_sender/mod.rs index 1fd24129a7..288185bfa9 100644 --- a/core/server/src/eth_sender/mod.rs +++ b/core/server/src/eth_sender/mod.rs @@ -301,8 +301,8 @@ impl ETHSender { // After storing all the tx data in the database, we can finally send the tx. info!( - "Sending new tx: [ETH Operation . Tx hash: <{:#x}>. ZKSync operation: {}]", - new_op.id, new_op.op_type, signed_tx.hash, self.zksync_operation_description(&new_op), + "Sending new tx: [ETH Operation . ETH tx: {}. ZKSync operation: {}]", + new_op.id, new_op.op_type, self.eth_tx_description(&signed_tx), self.zksync_operation_description(&new_op), ); self.ethereum.send_tx(&signed_tx).unwrap_or_else(|e| { // Sending tx error is not critical: this will result in transaction being considered stuck, @@ -315,6 +315,17 @@ impl ETHSender { Ok(()) } + /// Helper method to obtain the string representation of the Ethereum transaction. + /// Intended to be used for log entries. + fn eth_tx_description(&self, tx: &SignedCallResult) -> String { + // Gas price in gwei (wei / 10^9). + let gas_price = tx.gas_price / (1_000_000_000); + format!( + "", + tx.hash, gas_price, tx.nonce + ) + } + /// Helper method to obtain the string representation of the ZK Sync operation. /// Intended to be used for log entries. fn zksync_operation_description(&self, operation: ÐOperation) -> String { @@ -403,8 +414,9 @@ impl ETHSender { })?; info!( - "Stuck tx processing: sending tx for op, eth_op_id: {} tx_hash: {:#x}, nonce: {}", - op.id, new_tx.hash, new_tx.nonce, + "Stuck tx processing: sending tx for op, eth_op_id: {}; ETH tx: {}", + op.id, + self.eth_tx_description(&new_tx), ); self.ethereum.send_tx(&new_tx)?; From 80afccc4e3439d64121d40426a5d2af1a3fc4cb2 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 09:40:42 +0300 Subject: [PATCH 160/186] Add bench for PlasmaState::apply_transfer_to_new_op --- core/plasma/benches/criterion/ops.rs | 77 +++++++++++++++++++++++++--- 1 file changed, 71 insertions(+), 6 deletions(-) diff --git a/core/plasma/benches/criterion/ops.rs b/core/plasma/benches/criterion/ops.rs index 0a63ed42e0..d80fa8c55d 100644 --- a/core/plasma/benches/criterion/ops.rs +++ b/core/plasma/benches/criterion/ops.rs @@ -1,19 +1,84 @@ // External uses -use criterion::{criterion_group, Criterion}; +use criterion::{black_box, criterion_group, BatchSize, Bencher, Criterion}; // Workspace uses -use models::node::AccountTree; +use crypto_exports::rand::{thread_rng, Rng}; +use models::node::{ + account::{Account, PubKeyHash}, + operations::TransferToNewOp, + priv_key_from_fs, + tx::{Transfer, TxSignature}, + AccountId, AccountMap, Address, BlockNumber, TokenId, +}; // Local uses use plasma::state::PlasmaState; +const ETH_TOKEN_ID: TokenId = 0x00; +const ACCOUNTS_AMOUNT: AccountId = 1_000; +const CURRENT_BLOCK: BlockNumber = 1_000; + +fn generate_account() -> Account { + let default_balance = 1_000_000.into(); + + let rng = &mut thread_rng(); + let sk = priv_key_from_fs(rng.gen()); + + let mut account = Account::default(); + account.pub_key_hash = PubKeyHash::from_privkey(&sk); + account.address = Address::random(); + account.set_balance(ETH_TOKEN_ID, default_balance); + + account +} + /// Creates a `PlasmaState` object and fills it with accounts. fn generate_state() -> PlasmaState { - let depth = models::params::account_tree_depth() as u32; + let mut accounts = AccountMap::default(); - let mut accounts = AccountTree::new(depth); + for account_id in 0..ACCOUNTS_AMOUNT { + let new_account = generate_account(); - PlasmaState::empty() + accounts.insert(account_id, new_account); + } + + PlasmaState::new(accounts, CURRENT_BLOCK) } -pub fn bench_ops(_c: &mut Criterion) {} +fn apply_transfer_to_new_op(b: &mut Bencher<'_>) { + let state = generate_state(); + + let from_account = state.get_account(0).expect("Can't get the account"); + + let transfer = Transfer { + from: from_account.address, + to: Address::random(), + token: ETH_TOKEN_ID, + amount: 10.into(), + fee: 1.into(), + nonce: 0, + signature: TxSignature::default(), + }; + + let transfer_op = TransferToNewOp { + tx: transfer, + from: 0, + to: ACCOUNTS_AMOUNT, + }; + + let setup = || (state.clone(), transfer_op.clone()); + + b.iter_batched( + setup, + |(mut state, transfer_op)| { + state + .apply_transfer_to_new_op(&black_box(transfer_op)) + .expect("Failed transfer operation"); + }, + BatchSize::SmallInput, + ); +} + +pub fn bench_ops(c: &mut Criterion) { + c.bench_function("apply_transfer_to_new_op bench", apply_transfer_to_new_op); +} criterion_group!(ops_benches, bench_ops); From c00bf2a2b76a6683061a3d44aa2567763338094f Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 10:21:34 +0300 Subject: [PATCH 161/186] Add bench for PlasmaState::apply_transfer_op --- core/plasma/benches/criterion/ops.rs | 40 ++++++++++++++++++++++++++-- 1 file changed, 38 insertions(+), 2 deletions(-) diff --git a/core/plasma/benches/criterion/ops.rs b/core/plasma/benches/criterion/ops.rs index d80fa8c55d..b68576914e 100644 --- a/core/plasma/benches/criterion/ops.rs +++ b/core/plasma/benches/criterion/ops.rs @@ -4,7 +4,7 @@ use criterion::{black_box, criterion_group, BatchSize, Bencher, Criterion}; use crypto_exports::rand::{thread_rng, Rng}; use models::node::{ account::{Account, PubKeyHash}, - operations::TransferToNewOp, + operations::{TransferOp, TransferToNewOp}, priv_key_from_fs, tx::{Transfer, TxSignature}, AccountId, AccountMap, Address, BlockNumber, TokenId, @@ -13,7 +13,7 @@ use models::node::{ use plasma::state::PlasmaState; const ETH_TOKEN_ID: TokenId = 0x00; -const ACCOUNTS_AMOUNT: AccountId = 1_000; +const ACCOUNTS_AMOUNT: AccountId = 10; const CURRENT_BLOCK: BlockNumber = 1_000; fn generate_account() -> Account { @@ -77,8 +77,44 @@ fn apply_transfer_to_new_op(b: &mut Bencher<'_>) { ); } +fn apply_transfer_op(b: &mut Bencher<'_>) { + let state = generate_state(); + + let from_account = state.get_account(0).expect("Can't get the account"); + let to_account = state.get_account(1).expect("Can't get the account"); + + let transfer = Transfer { + from: from_account.address, + to: to_account.address, + token: ETH_TOKEN_ID, + amount: 10.into(), + fee: 1.into(), + nonce: 0, + signature: TxSignature::default(), + }; + + let transfer_op = TransferOp { + tx: transfer, + from: 0, + to: 1, + }; + + let setup = || (state.clone(), transfer_op.clone()); + + b.iter_batched( + setup, + |(mut state, transfer_op)| { + state + .apply_transfer_op(&black_box(transfer_op)) + .expect("Failed transfer operation"); + }, + BatchSize::SmallInput, + ); +} + pub fn bench_ops(c: &mut Criterion) { c.bench_function("apply_transfer_to_new_op bench", apply_transfer_to_new_op); + c.bench_function("apply_transfer_op bench", apply_transfer_op); } criterion_group!(ops_benches, bench_ops); From d27ad6c1853b0b7bfb5a411315385333a7f01bf3 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Tue, 31 Mar 2020 12:16:49 +0300 Subject: [PATCH 162/186] Show more txs in expolrer --- core/storage/src/chain/operations_ext/mod.rs | 25 +++++++-- js/explorer/src/Account.vue | 10 +++- js/explorer/src/Block.vue | 12 +++-- js/explorer/src/Client.js | 56 +++++++++++++++++--- js/explorer/src/Transaction.vue | 1 + js/tests/full-exit-inegration-test.ts | 1 - 6 files changed, 87 insertions(+), 18 deletions(-) diff --git a/core/storage/src/chain/operations_ext/mod.rs b/core/storage/src/chain/operations_ext/mod.rs index eeb20a9738..9e9b5d477c 100644 --- a/core/storage/src/chain/operations_ext/mod.rs +++ b/core/storage/src/chain/operations_ext/mod.rs @@ -218,11 +218,8 @@ impl<'a> OperationsExtSchema<'a> { let tx_token = operation["priority_op"]["token"] .as_i64() .expect("must be here"); - let tx_amount = operation["priority_op"]["amount"] - .as_str() - .unwrap_or("unknown amount"); - let (tx_from, tx_to, tx_fee) = match tx_type { + let (tx_from, tx_to, tx_fee, tx_amount) = match tx_type { "Deposit" => ( operation["priority_op"]["from"] .as_str() @@ -235,11 +232,31 @@ impl<'a> OperationsExtSchema<'a> { operation["priority_op"]["fee"] .as_str() .map(|v| v.to_string()), + operation["priority_op"]["amount"] + .as_str() + .unwrap_or("unknown amount"), + ), + "FullExit" => ( + operation["priority_op"]["eth_address"] + .as_str() + .unwrap_or("unknown from") + .to_string(), + operation["priority_op"]["eth_address"] + .as_str() + .unwrap_or("unknown to") + .to_string(), + operation["priority_op"]["fee"] + .as_str() + .map(|v| v.to_string()), + operation["withdraw_amount"] + .as_str() + .unwrap_or("unknown amount") ), &_ => ( "unknown from".to_string(), "unknown to".to_string(), Some("unknown fee".to_string()), + "unknown amount", ), }; diff --git a/js/explorer/src/Account.vue b/js/explorer/src/Account.vue index b0c6b82168..aa55b6b58c 100644 --- a/js/explorer/src/Account.vue +++ b/js/explorer/src/Account.vue @@ -236,12 +236,18 @@ export default { const To = ` - ${shortenHash(tx.data.to, 'unknown! to')} + ${ + tx.data.type == "ChangePubKey" + ? '' + : shortenHash(tx.data.to, 'unknown! to') + } `; const Type = `${tx.data.type}`; - const Amount = `${tx.data.token} ${tx.data.amount}`; + const Amount + = tx.data.type == "ChangePubKey" ? '' + : `${tx.data.token} ${tx.data.amount}`; return { Type, diff --git a/js/explorer/src/Block.vue b/js/explorer/src/Block.vue index 3715210c45..fdbdb2905b 100644 --- a/js/explorer/src/Block.vue +++ b/js/explorer/src/Block.vue @@ -199,19 +199,21 @@ export default { to_onchain_icon = `onchain`; token = tx.priority_op.data.token; token = tokens[token].syncSymbol; - amount = `${formatToken(tx.op.withdraw_amount, token)} ${token}`; + amount = `${formatToken(tx.priority_op.withdraw_amount || 0, token)} ${token}`; fee = `${formatToken(tx.priority_op.eth_fee, "ETH")} ETH`; break; default: throw new Error('switch reached default'); } - const from_target = from_explorer_link.startsWith('/') - ? '' + const from_target + = from_explorer_link.startsWith('/') ? '' + : from_explorer_link == '' ? '' : `target="_blank" rel="noopener noreferrer"`; - const to_target = to_explorer_link.startsWith('/') - ? '' + const to_target + = to_explorer_link.startsWith('/') ? '' + : to_explorer_link == '' ? '' : `target="_blank" rel="noopener noreferrer"`; return { diff --git a/js/explorer/src/Client.js b/js/explorer/src/Client.js index e6c57ff8e0..42829fb4c0 100644 --- a/js/explorer/src/Client.js +++ b/js/explorer/src/Client.js @@ -147,11 +147,14 @@ export class Client { const type = tx.tx.type || ''; const hash = tx.hash; - const receiver_address = type == 'Deposit' - ? tx.tx.priority_op.to + const to + = type == 'Deposit' ? tx.tx.priority_op.to + : type == 'FullExit' ? tx.tx.priority_op.eth_address + : type == 'Close' ? tx.tx.account + : type == 'ChangePubKey' ? tx.tx.account : tx.tx.to; - const direction = receiver_address == address + const direction = to == address ? 'incoming' : 'outcoming'; @@ -197,7 +200,26 @@ export class Client { data: { ...data, from: tx.tx.priority_op.from, - to: tx.tx.priority_op.to, + to, + pq_id: tx.pq_id, + token, amount, + }, + }; + } + case type == 'FullExit': { + console.log(tx); + const token = await this.tokenNameFromId(tx.tx.priority_op.token); + const amount = readableEther(tx.tx.withdraw_amount || 0); + return { + fields: [ + { key: 'amount', label: 'Amount' }, + { key: 'row_status', label: 'Status' }, + { key: 'pq_id', label: 'Priority op' }, + ], + data: { + ...data, + from: tx.tx.priority_op.eth_address, + to, pq_id: tx.pq_id, token, amount, }, @@ -216,7 +238,7 @@ export class Client { data: { ...data, from: tx.tx.from, - to: tx.tx.to, + to, token, amount, }, }; @@ -233,11 +255,33 @@ export class Client { data: { ...data, from: tx.tx.from, - to: tx.tx.to, + to, token, amount, }, }; } + case type == 'Close': { + return { + fields: [ + ], + data: { + ...data, + from: tx.tx.account, + to: '', + }, + }; + } + case type == 'ChangePubKey': { + return { + fields: [ + ], + data: { + ...data, + from: tx.tx.account, + to: '', + }, + }; + } } }); diff --git a/js/explorer/src/Transaction.vue b/js/explorer/src/Transaction.vue index d643326e5c..d0100c5c96 100644 --- a/js/explorer/src/Transaction.vue +++ b/js/explorer/src/Transaction.vue @@ -119,6 +119,7 @@ export default { const link_to = this.txData.tx_type == 'Withdraw' ? `${this.blockchainExplorerAddress}/${this.txData.to}` + : this.txData.tx_type == 'ChangePubKeyOffchain' ? '' : `${this.routerBase}accounts/${this.txData.to}`; const onchain_from diff --git a/js/tests/full-exit-inegration-test.ts b/js/tests/full-exit-inegration-test.ts index 8a536e6203..4b2d961420 100644 --- a/js/tests/full-exit-inegration-test.ts +++ b/js/tests/full-exit-inegration-test.ts @@ -62,7 +62,6 @@ async function testWrongETHWalletFullExit(ethWallet: ethers.Wallet, syncWallet: syncWallet.ethSigner = ethWallet; const fullExit = await syncWallet.emergencyWithdraw({ token, - nonce: 12341 }); await fullExit.awaitReceipt(); syncWallet.ethSigner = oldWallet; From 9de05abf066eb7f92d031fafb98b7f81d21338d9 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Tue, 31 Mar 2020 12:26:37 +0300 Subject: [PATCH 163/186] Fix balance display in client --- js/client/src/utils.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/js/client/src/utils.js b/js/client/src/utils.js index fdab48ec73..61470065de 100644 --- a/js/client/src/utils.js +++ b/js/client/src/utils.js @@ -27,13 +27,13 @@ export function readableEther(wei) { } export function getDisplayableBalanceDict(dict) { - let res = Object.assign({}, dict); - for (let token of readablyPrintableTokens) { - if (res[token] != undefined) { - res[token] = readableEther(dict[token]); - } - } - return res; + return Object.assign({}, + ...Object.entries(dict).map( + ([token, balance]) => ({ + [token]: readableEther(balance), + }) + ) + ); } export function getDisplayableBalanceList(list) { From 273494c7a1a796ff4d6db75a7265620b4c911cba Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 12:40:34 +0300 Subject: [PATCH 164/186] Add remaining benches for PlasmaState::apply_* methods --- core/plasma/benches/criterion/ops.rs | 160 ++++++++++++++++++++++++++- core/plasma/src/state.rs | 3 +- 2 files changed, 160 insertions(+), 3 deletions(-) diff --git a/core/plasma/benches/criterion/ops.rs b/core/plasma/benches/criterion/ops.rs index b68576914e..1ed02e81a1 100644 --- a/core/plasma/benches/criterion/ops.rs +++ b/core/plasma/benches/criterion/ops.rs @@ -4,9 +4,12 @@ use criterion::{black_box, criterion_group, BatchSize, Bencher, Criterion}; use crypto_exports::rand::{thread_rng, Rng}; use models::node::{ account::{Account, PubKeyHash}, - operations::{TransferOp, TransferToNewOp}, + operations::{ + ChangePubKeyOp, CloseOp, DepositOp, FullExitOp, TransferOp, TransferToNewOp, WithdrawOp, + }, + priority_ops::{Deposit, FullExit}, priv_key_from_fs, - tx::{Transfer, TxSignature}, + tx::{ChangePubKey, Close, Transfer, TxSignature, Withdraw}, AccountId, AccountMap, Address, BlockNumber, TokenId, }; // Local uses @@ -112,9 +115,162 @@ fn apply_transfer_op(b: &mut Bencher<'_>) { ); } +fn apply_full_exit_op(b: &mut Bencher<'_>) { + let state = generate_state(); + + let to_account = state.get_account(0).expect("Can't get the account"); + + let full_exit = FullExit { + account_id: 0, + eth_address: Address::random(), + token: ETH_TOKEN_ID, + }; + + let full_exit_op = FullExitOp { + priority_op: full_exit, + withdraw_amount: Some(to_account.get_balance(ETH_TOKEN_ID)), + }; + + let setup = || (state.clone(), full_exit_op.clone()); + + b.iter_batched( + setup, + |(mut state, full_exit_op)| { + let _ = state.apply_full_exit_op(&black_box(full_exit_op)); + }, + BatchSize::SmallInput, + ); +} + +fn apply_deposit_op(b: &mut Bencher<'_>) { + let state = generate_state(); + + let to_account = state.get_account(0).expect("Can't get the account"); + + let deposit = Deposit { + from: Address::random(), + to: to_account.address, + token: ETH_TOKEN_ID, + amount: 10.into(), + }; + + let deposit_op = DepositOp { + priority_op: deposit, + account_id: 0, + }; + + let setup = || (state.clone(), deposit_op.clone()); + + b.iter_batched( + setup, + |(mut state, deposit_op)| { + let _ = state.apply_deposit_op(&black_box(deposit_op)); + }, + BatchSize::SmallInput, + ); +} + +fn apply_withdraw_op(b: &mut Bencher<'_>) { + let state = generate_state(); + + let from_account = state.get_account(0).expect("Can't get the account"); + + let withdraw = Withdraw { + from: from_account.address, + to: Address::random(), + token: ETH_TOKEN_ID, + amount: 10.into(), + fee: 1.into(), + nonce: 0, + signature: TxSignature::default(), + }; + + let withdraw_op = WithdrawOp { + tx: withdraw, + account_id: 0, + }; + + let setup = || (state.clone(), withdraw_op.clone()); + + b.iter_batched( + setup, + |(mut state, withdraw_op)| { + let _ = state.apply_withdraw_op(&black_box(withdraw_op)); + }, + BatchSize::SmallInput, + ); +} + +fn apply_close_op(b: &mut Bencher<'_>) { + let mut state = generate_state(); + + let mut to_remove = state.get_account(0).expect("Can't get the account"); + + // Remove balance from the account to close. + to_remove.set_balance(ETH_TOKEN_ID, 0.into()); + state.insert_account(0, to_remove.clone()); + + let close = Close { + account: to_remove.address, + nonce: 0, + signature: TxSignature::default(), + }; + + let close_op = CloseOp { + tx: close, + account_id: 0, + }; + + let setup = || (state.clone(), close_op.clone()); + + b.iter_batched( + setup, + |(mut state, close_op)| { + let _ = state.apply_close_op(&black_box(close_op)); + }, + BatchSize::SmallInput, + ); +} + +fn apply_change_pubkey_op(b: &mut Bencher<'_>) { + let state = generate_state(); + + let to_change = state.get_account(0).expect("Can't get the account"); + + let rng = &mut thread_rng(); + let new_sk = priv_key_from_fs(rng.gen()); + + let change_pubkey = ChangePubKey { + account: to_change.address, + new_pk_hash: PubKeyHash::from_privkey(&new_sk), + nonce: 0, + eth_signature: None, + }; + + let change_pubkey_op = ChangePubKeyOp { + tx: change_pubkey, + account_id: 0, + }; + + let setup = || (state.clone(), change_pubkey_op.clone()); + + b.iter_batched( + setup, + |(mut state, change_pubkey_op)| { + let _ = state.apply_change_pubkey_op(&black_box(change_pubkey_op)); + }, + BatchSize::SmallInput, + ); +} + pub fn bench_ops(c: &mut Criterion) { c.bench_function("apply_transfer_to_new_op bench", apply_transfer_to_new_op); c.bench_function("apply_transfer_op bench", apply_transfer_op); + c.bench_function("apply_withdraw_op bench", apply_withdraw_op); + c.bench_function("apply_apply_close_op bench", apply_close_op); + c.bench_function("apply_change_pubkey_op bench", apply_change_pubkey_op); + c.bench_function("apply_deposit_op bench", apply_deposit_op); + c.bench_function("apply_full_exit_op bench", apply_full_exit_op); } criterion_group!(ops_benches, bench_ops); diff --git a/core/plasma/src/state.rs b/core/plasma/src/state.rs index 5d23e4f97a..b3b5b16feb 100644 --- a/core/plasma/src/state.rs +++ b/core/plasma/src/state.rs @@ -343,7 +343,8 @@ impl PlasmaState { )) } - fn insert_account(&mut self, id: AccountId, account: Account) { + #[doc(hidden)] // Public for benches. + pub fn insert_account(&mut self, id: AccountId, account: Account) { self.account_id_by_address .insert(account.address.clone(), id); self.balance_tree.insert(id, account); From 852bed36384b40737f2f6a3c1c3e37f286c1f63b Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 12:40:58 +0300 Subject: [PATCH 165/186] Better names for benches --- core/plasma/benches/criterion/ops.rs | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/core/plasma/benches/criterion/ops.rs b/core/plasma/benches/criterion/ops.rs index 1ed02e81a1..a4b8ebf84c 100644 --- a/core/plasma/benches/criterion/ops.rs +++ b/core/plasma/benches/criterion/ops.rs @@ -264,13 +264,19 @@ fn apply_change_pubkey_op(b: &mut Bencher<'_>) { } pub fn bench_ops(c: &mut Criterion) { - c.bench_function("apply_transfer_to_new_op bench", apply_transfer_to_new_op); - c.bench_function("apply_transfer_op bench", apply_transfer_op); - c.bench_function("apply_withdraw_op bench", apply_withdraw_op); - c.bench_function("apply_apply_close_op bench", apply_close_op); - c.bench_function("apply_change_pubkey_op bench", apply_change_pubkey_op); - c.bench_function("apply_deposit_op bench", apply_deposit_op); - c.bench_function("apply_full_exit_op bench", apply_full_exit_op); + c.bench_function( + "PlasmaState::apply_transfer_to_new_op bench", + apply_transfer_to_new_op, + ); + c.bench_function("PlasmaState::apply_transfer_op bench", apply_transfer_op); + c.bench_function("PlasmaState::apply_withdraw_op bench", apply_withdraw_op); + c.bench_function("PlasmaState::apply_apply_close_op bench", apply_close_op); + c.bench_function( + "PlasmaState::apply_change_pubkey_op bench", + apply_change_pubkey_op, + ); + c.bench_function("PlasmaState::apply_deposit_op bench", apply_deposit_op); + c.bench_function("PlasmaState::apply_full_exit_op bench", apply_full_exit_op); } criterion_group!(ops_benches, bench_ops); From 06e7c0e77acb1c447ba5b9a5db261ad2bd036588 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 12:46:55 +0300 Subject: [PATCH 166/186] Add bench for PlasmaState::insert_account --- core/plasma/benches/criterion/ops.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/core/plasma/benches/criterion/ops.rs b/core/plasma/benches/criterion/ops.rs index a4b8ebf84c..65d0b12a06 100644 --- a/core/plasma/benches/criterion/ops.rs +++ b/core/plasma/benches/criterion/ops.rs @@ -263,6 +263,21 @@ fn apply_change_pubkey_op(b: &mut Bencher<'_>) { ); } +fn insert_account(b: &mut Bencher<'_>) { + let state = generate_state(); + + let to_insert = generate_account(); + let setup = || (state.clone(), to_insert.clone()); + + b.iter_batched( + setup, + |(mut state, to_insert)| { + state.insert_account(black_box(ACCOUNTS_AMOUNT), to_insert); + }, + BatchSize::SmallInput, + ); +} + pub fn bench_ops(c: &mut Criterion) { c.bench_function( "PlasmaState::apply_transfer_to_new_op bench", @@ -277,6 +292,7 @@ pub fn bench_ops(c: &mut Criterion) { ); c.bench_function("PlasmaState::apply_deposit_op bench", apply_deposit_op); c.bench_function("PlasmaState::apply_full_exit_op bench", apply_full_exit_op); + c.bench_function("PlasmaState::insert_account bench", insert_account); } criterion_group!(ops_benches, bench_ops); From 11940843c179a967a9c5c1f2a2c7c8040b411751 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 13:01:21 +0300 Subject: [PATCH 167/186] Add doc-comments and report the throughput --- core/plasma/benches/criterion/ops.rs | 43 ++++++++++++++++++++++------ 1 file changed, 34 insertions(+), 9 deletions(-) diff --git a/core/plasma/benches/criterion/ops.rs b/core/plasma/benches/criterion/ops.rs index 65d0b12a06..26759b44cf 100644 --- a/core/plasma/benches/criterion/ops.rs +++ b/core/plasma/benches/criterion/ops.rs @@ -1,5 +1,7 @@ +//! Benchmarks for the `PlasmaState` operations execution time. + // External uses -use criterion::{black_box, criterion_group, BatchSize, Bencher, Criterion}; +use criterion::{black_box, criterion_group, BatchSize, Bencher, Criterion, Throughput}; // Workspace uses use crypto_exports::rand::{thread_rng, Rng}; use models::node::{ @@ -16,9 +18,12 @@ use models::node::{ use plasma::state::PlasmaState; const ETH_TOKEN_ID: TokenId = 0x00; +// The amount is not important, since we always work with 1 account. +// We use some small non-zero value, so the overhead for cloning will not be big. const ACCOUNTS_AMOUNT: AccountId = 10; const CURRENT_BLOCK: BlockNumber = 1_000; +/// Creates a random ZKSync account. fn generate_account() -> Account { let default_balance = 1_000_000.into(); @@ -46,6 +51,7 @@ fn generate_state() -> PlasmaState { PlasmaState::new(accounts, CURRENT_BLOCK) } +/// Bench for `PlasmaState::apply_transfer_to_new_op`. fn apply_transfer_to_new_op(b: &mut Bencher<'_>) { let state = generate_state(); @@ -80,6 +86,7 @@ fn apply_transfer_to_new_op(b: &mut Bencher<'_>) { ); } +/// Bench for `PlasmaState::apply_transfer_op`. fn apply_transfer_op(b: &mut Bencher<'_>) { let state = generate_state(); @@ -115,6 +122,7 @@ fn apply_transfer_op(b: &mut Bencher<'_>) { ); } +/// Bench for `PlasmaState::apply_full_exit_op`. fn apply_full_exit_op(b: &mut Bencher<'_>) { let state = generate_state(); @@ -142,6 +150,7 @@ fn apply_full_exit_op(b: &mut Bencher<'_>) { ); } +/// Bench for `PlasmaState::apply_deposit_op`. fn apply_deposit_op(b: &mut Bencher<'_>) { let state = generate_state(); @@ -170,6 +179,7 @@ fn apply_deposit_op(b: &mut Bencher<'_>) { ); } +/// Bench for `PlasmaState::apply_withdraw_op`. fn apply_withdraw_op(b: &mut Bencher<'_>) { let state = generate_state(); @@ -201,6 +211,7 @@ fn apply_withdraw_op(b: &mut Bencher<'_>) { ); } +/// Bench for `PlasmaState::apply_close_op`. fn apply_close_op(b: &mut Bencher<'_>) { let mut state = generate_state(); @@ -232,6 +243,7 @@ fn apply_close_op(b: &mut Bencher<'_>) { ); } +/// Bench for `PlasmaState::apply_change_pubkey_op`. fn apply_change_pubkey_op(b: &mut Bencher<'_>) { let state = generate_state(); @@ -263,6 +275,10 @@ fn apply_change_pubkey_op(b: &mut Bencher<'_>) { ); } +/// Bench for `PlasmaState::insert_account`. +/// +/// While this method is not directly performing an operation, it is used in every operation, +/// and it seems to be the most expensive part of all the methods above. fn insert_account(b: &mut Bencher<'_>) { let state = generate_state(); @@ -279,20 +295,29 @@ fn insert_account(b: &mut Bencher<'_>) { } pub fn bench_ops(c: &mut Criterion) { - c.bench_function( + const INPUT_SIZE: Throughput = Throughput::Elements(1); + + let mut group = c.benchmark_group("PlasmaState operations"); + + // Setup the input size so the throughput will be reported. + group.throughput(INPUT_SIZE); + + group.bench_function( "PlasmaState::apply_transfer_to_new_op bench", apply_transfer_to_new_op, ); - c.bench_function("PlasmaState::apply_transfer_op bench", apply_transfer_op); - c.bench_function("PlasmaState::apply_withdraw_op bench", apply_withdraw_op); - c.bench_function("PlasmaState::apply_apply_close_op bench", apply_close_op); - c.bench_function( + group.bench_function("PlasmaState::apply_transfer_op bench", apply_transfer_op); + group.bench_function("PlasmaState::apply_withdraw_op bench", apply_withdraw_op); + group.bench_function("PlasmaState::apply_apply_close_op bench", apply_close_op); + group.bench_function( "PlasmaState::apply_change_pubkey_op bench", apply_change_pubkey_op, ); - c.bench_function("PlasmaState::apply_deposit_op bench", apply_deposit_op); - c.bench_function("PlasmaState::apply_full_exit_op bench", apply_full_exit_op); - c.bench_function("PlasmaState::insert_account bench", insert_account); + group.bench_function("PlasmaState::apply_deposit_op bench", apply_deposit_op); + group.bench_function("PlasmaState::apply_full_exit_op bench", apply_full_exit_op); + group.bench_function("PlasmaState::insert_account bench", insert_account); + + group.finish(); } criterion_group!(ops_benches, bench_ops); From b840c8d7593c6ab02cd44632a0bc33b211a3f9b7 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 13:04:12 +0300 Subject: [PATCH 168/186] Add skeleton for models::primitives benches --- core/models/benches/criterion/lib.rs | 4 +++- core/models/benches/criterion/primitives/mod.rs | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 core/models/benches/criterion/primitives/mod.rs diff --git a/core/models/benches/criterion/lib.rs b/core/models/benches/criterion/lib.rs index 7d336d13b3..faf1e284ca 100644 --- a/core/models/benches/criterion/lib.rs +++ b/core/models/benches/criterion/lib.rs @@ -1,7 +1,9 @@ use criterion::criterion_main; use merkle_tree::merkle_tree_benches; +use primitives::primitives_benches; mod merkle_tree; +mod primitives; -criterion_main!(merkle_tree_benches); +criterion_main!(merkle_tree_benches, primitives_benches); diff --git a/core/models/benches/criterion/primitives/mod.rs b/core/models/benches/criterion/primitives/mod.rs new file mode 100644 index 0000000000..d8935bc574 --- /dev/null +++ b/core/models/benches/criterion/primitives/mod.rs @@ -0,0 +1,5 @@ +use criterion::{criterion_group, Criterion}; + +pub fn bench_primitives(_c: &mut Criterion) {} + +criterion_group!(primitives_benches, bench_primitives); From e5cd1ffbe00c1a01b4654cf9ff8a0d7dccdf9055 Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Tue, 31 Mar 2020 13:06:39 +0300 Subject: [PATCH 169/186] Add TransferToNew to transactions history test --- .../storage/src/tests/chain/operations_ext.rs | 34 +++++++++++++++++-- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/core/storage/src/tests/chain/operations_ext.rs b/core/storage/src/tests/chain/operations_ext.rs index 054cb3d3ca..02aed35658 100644 --- a/core/storage/src/tests/chain/operations_ext.rs +++ b/core/storage/src/tests/chain/operations_ext.rs @@ -7,7 +7,9 @@ use crypto_exports::franklin_crypto::bellman::pairing::ff::Field; use models::node::block::{Block, ExecutedOperations, ExecutedPriorityOp, ExecutedTx}; use models::node::operations::{ChangePubKeyOp, FranklinOp}; use models::node::priority_ops::PriorityOp; -use models::node::{CloseOp, Deposit, DepositOp, Fr, FullExit, FullExitOp, TransferOp, WithdrawOp}; +use models::node::{ + CloseOp, Deposit, DepositOp, Fr, FullExit, FullExitOp, TransferOp, TransferToNewOp, WithdrawOp, +}; use testkit::zksync_account::ZksyncAccount; // Local imports use crate::tests::db_test; @@ -82,6 +84,31 @@ fn get_account_transactions_history() { ExecutedOperations::PriorityOp(Box::new(executed_op)) }; + let executed_transfer_to_new_op = { + let transfer_to_new_op = FranklinOp::TransferToNew(Box::new(TransferToNewOp { + tx: from_zksync_account.sign_transfer( + token, + amount.clone(), + BigDecimal::from(0), + &to_account_address, + None, + true, + ), + from: from_account_id, + to: to_account_id, + })); + + let executed_transfer_to_new_op = ExecutedTx { + tx: transfer_to_new_op.try_get_tx().unwrap(), + success: true, + op: Some(transfer_to_new_op), + fail_reason: None, + block_index: None, + }; + + ExecutedOperations::Tx(Box::new(executed_transfer_to_new_op)) + }; + let executed_transfer_op = { let transfer_op = FranklinOp::Transfer(Box::new(TransferOp { tx: from_zksync_account.sign_transfer( @@ -172,6 +199,7 @@ fn get_account_transactions_history() { block_transactions: vec![ executed_deposit_op, executed_full_exit_op, + executed_transfer_to_new_op, executed_transfer_op, executed_withdraw_op, executed_close_op, @@ -249,8 +277,8 @@ fn get_account_transactions_history() { .operations_ext_schema() .get_account_transactions_history(&to_account_address, 0, 10)?; - assert_eq!(from_history.len(), 6); - assert_eq!(to_history.len(), 3); + assert_eq!(from_history.len(), 7); + assert_eq!(to_history.len(), 4); Ok(()) }); From 78bba681a37d2af4f16c9cb3690d9c112049b963 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 15:54:25 +0300 Subject: [PATCH 170/186] Add benches for primitives --- .../benches/criterion/primitives/mod.rs | 117 +++++++++++++++++- 1 file changed, 115 insertions(+), 2 deletions(-) diff --git a/core/models/benches/criterion/primitives/mod.rs b/core/models/benches/criterion/primitives/mod.rs index d8935bc574..5631f743c7 100644 --- a/core/models/benches/criterion/primitives/mod.rs +++ b/core/models/benches/criterion/primitives/mod.rs @@ -1,5 +1,118 @@ -use criterion::{criterion_group, Criterion}; +// External uses +use bigdecimal::BigDecimal; +use criterion::{black_box, criterion_group, BatchSize, Bencher, Criterion, Throughput}; +// Local uses +use models::primitives::{ + big_decimal_to_u128, bytes_into_be_bits, get_bits_le_fixed_u128, pack_bits_into_bytes, + pack_bits_into_bytes_in_order, u128_to_bigdecimal, BitIteratorLe, GetBits, +}; -pub fn bench_primitives(_c: &mut Criterion) {} +/// Input size for byte slices (module-wide for calculating the throughput). +const BYTE_SLICE_SIZE: usize = 512; + +fn bench_u64_get_bits_le(b: &mut Bencher<'_>) { + let value: u64 = 0xDEAD_BEEF_DEAD_BEEF; + + b.iter(|| { + let _ = black_box(value).get_bits_le(); + }); +} + +fn bench_get_bits_le_fixed_u128(b: &mut Bencher<'_>) { + let value: u128 = 0xDEAD_BEEF_DEAD_BEEF_DEAD_BEEF_DEAD_BEEF; + let n = 128; + + b.iter(|| { + let _ = get_bits_le_fixed_u128(black_box(value), n); + }); +} + +fn bench_big_decimal_to_u128(b: &mut Bencher<'_>) { + let value: BigDecimal = 0x0EAD_BEEF.into(); + + b.iter(|| { + let _ = big_decimal_to_u128(&black_box(value.clone())); + }); +} + +fn bench_u128_to_bigdecimal(b: &mut Bencher<'_>) { + let value: u128 = 0xDEAD_BEEF_DEAD_BEEF_DEAD_BEEF_DEAD_BEEF; + + b.iter(|| { + let _ = u128_to_bigdecimal(black_box(value)); + }); +} + +fn bench_bytes_into_be_bits(b: &mut Bencher<'_>) { + let value: Vec = vec![0xAB; BYTE_SLICE_SIZE]; + + let value_ref: &[u8] = value.as_ref(); + + b.iter(|| { + let _ = bytes_into_be_bits(black_box(value_ref)); + }); +} + +fn bench_pack_bits_into_bytes(b: &mut Bencher<'_>) { + let value: Vec = vec![true; BYTE_SLICE_SIZE * 8]; + + let setup = || value.clone(); + + b.iter_batched( + setup, + |value| { + let _ = pack_bits_into_bytes(black_box(value)); + }, + BatchSize::SmallInput, + ); +} + +fn bench_pack_bits_into_bytes_in_order(b: &mut Bencher<'_>) { + let value: Vec = vec![true; BYTE_SLICE_SIZE * 8]; + + let setup = || value.clone(); + + b.iter_batched( + setup, + |value| { + let _ = pack_bits_into_bytes_in_order(black_box(value)); + }, + BatchSize::SmallInput, + ); +} + +fn bench_bit_iterator_le_next(b: &mut Bencher<'_>) { + let value: Vec = vec![0xDEAD_BEEF_DEAD_BEEF; BYTE_SLICE_SIZE / 8]; + + let setup = || BitIteratorLe::new(&value); + + b.iter_batched( + setup, + |mut bit_iterator| { + while let Some(_) = bit_iterator.next() { + // Do nothing, we're just draining the iterator. + } + }, + BatchSize::SmallInput, + ); +} + +pub fn bench_primitives(c: &mut Criterion) { + c.bench_function("u64_get_bits_le", bench_u64_get_bits_le); + c.bench_function("get_bits_le_fixed_u128", bench_get_bits_le_fixed_u128); + c.bench_function("big_decimal_to_u128", bench_big_decimal_to_u128); + c.bench_function("u128_to_bigdecimal", bench_u128_to_bigdecimal); + + let mut group = c.benchmark_group("Bit Converters"); + + group.throughput(Throughput::Bytes(BYTE_SLICE_SIZE as u64)); + group.bench_function("bytes_into_be_bits", bench_bytes_into_be_bits); + group.bench_function("pack_bits_into_bytes", bench_pack_bits_into_bytes); + group.bench_function( + "pack_bits_into_bytes_in_order", + bench_pack_bits_into_bytes_in_order, + ); + group.bench_function("BitIterator::next", bench_bit_iterator_le_next); +} criterion_group!(primitives_benches, bench_primitives); From b2864933b4376bb80a2f34bc9cff72fe05e253ec Mon Sep 17 00:00:00 2001 From: Oleg Syniakevych Date: Tue, 31 Mar 2020 16:42:08 +0300 Subject: [PATCH 171/186] Add TxAddError::EIP1271SignatureVerificationFail --- core/models/src/misc/constants.rs | 3 +- core/models/src/node/tx.rs | 17 +++++++-- core/server/src/api_server/rpc_server.rs | 26 ++++++++------ core/server/src/eth_watch.rs | 6 ++-- core/server/src/mempool.rs | 46 ++++++++++++++++-------- 5 files changed, 67 insertions(+), 31 deletions(-) diff --git a/core/models/src/misc/constants.rs b/core/models/src/misc/constants.rs index ea64b19df1..e4f33e5923 100644 --- a/core/models/src/misc/constants.rs +++ b/core/models/src/misc/constants.rs @@ -4,5 +4,6 @@ pub const ETH_SIGNATURE_LENGTH: usize = 65; /// Two bytes for "0x", and two for each byte of the signature. pub const ETH_SIGNATURE_HEX_LENGTH: usize = (ETH_SIGNATURE_LENGTH * 2) + 2; -/// EIP1271 isValidSignature return value +/// isValidSignature return value according to EIP1271 standard +/// bytes4(keccak256("isValidSignature(bytes,bytes)") pub const EIP1271_SUCCESS_RETURN_VALUE: [u8; 4] = [0x20, 0xc1, 0x3b, 0x0b]; diff --git a/core/models/src/node/tx.rs b/core/models/src/node/tx.rs index fe8f50d532..a2fb897b2f 100644 --- a/core/models/src/node/tx.rs +++ b/core/models/src/node/tx.rs @@ -23,6 +23,7 @@ use ethsign::{SecretKey, Signature as ETHSignature}; use failure::{ensure, format_err}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::convert::TryInto; +use std::fmt; use std::str::FromStr; use web3::types::{Address, H256}; @@ -126,7 +127,10 @@ impl Transfer { /// Get message that should be signed by Ethereum keys of the account for 2F authentication. pub fn get_ethereum_sign_message(&self, token_symbol: &str) -> String { format!( - "Transfer {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", + "Transfer {amount} {token}\n\ + To: {to:?}\n\ + Nonce: {nonce}\n\ + Fee: {fee} {token}", amount = format_ether(&self.amount), token = token_symbol, to = self.to, @@ -180,7 +184,10 @@ impl Withdraw { /// Get message that should be signed by Ethereum keys of the account for 2F authentication. pub fn get_ethereum_sign_message(&self, token_symbol: &str) -> String { format!( - "Withdraw {amount} {token}\nTo: {to:?}\nNonce: {nonce}\nFee: {fee} {token}", + "Withdraw {amount} {token}\n\ + To: {to:?}\n\ + Nonce: {nonce}\n\ + Fee: {fee} {token}", amount = format_ether(&self.amount), token = token_symbol, to = self.to, @@ -581,6 +588,12 @@ pub enum TxEthSignature { #[derive(Debug, Clone)] pub struct EIP1271Signature(pub Vec); +impl fmt::Display for EIP1271Signature { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "EIP1271Signature 0x{}", hex::encode(&self.0.as_slice())) + } +} + impl<'de> Deserialize<'de> for EIP1271Signature { fn deserialize(deserializer: D) -> Result where diff --git a/core/server/src/api_server/rpc_server.rs b/core/server/src/api_server/rpc_server.rs index acae68a657..4da85d6307 100644 --- a/core/server/src/api_server/rpc_server.rs +++ b/core/server/src/api_server/rpc_server.rs @@ -88,22 +88,26 @@ pub struct ContractAddressResp { enum RpcErrorCodes { NonceMismatch = 101, IncorrectTx = 103, - MissingEthSignature = 104, - IncorrectEthSignature = 105, - Other = 106, - ChangePkNotAuthorized = 107, - AccountCloseDisabled = 110, + + MissingEthSignature = 200, + EIP1271SignatureVerificationFail = 201, + IncorrectEthSignature = 202, + ChangePkNotAuthorized = 203, + + Other = 300, + AccountCloseDisabled = 301, } impl From for RpcErrorCodes { fn from(error: TxAddError) -> Self { match error { - TxAddError::NonceMismatch => RpcErrorCodes::NonceMismatch, - TxAddError::IncorrectTx => RpcErrorCodes::IncorrectTx, - TxAddError::MissingEthSignature => RpcErrorCodes::MissingEthSignature, - TxAddError::IncorrectEthSignature => RpcErrorCodes::IncorrectEthSignature, - TxAddError::ChangePkNotAuthorized => RpcErrorCodes::ChangePkNotAuthorized, - TxAddError::Other => RpcErrorCodes::Other, + TxAddError::NonceMismatch => Self::NonceMismatch, + TxAddError::IncorrectTx => Self::IncorrectTx, + TxAddError::MissingEthSignature => Self::MissingEthSignature, + TxAddError::EIP1271SignatureVerificationFail => Self::EIP1271SignatureVerificationFail, + TxAddError::IncorrectEthSignature => Self::IncorrectEthSignature, + TxAddError::ChangePkNotAuthorized => Self::ChangePkNotAuthorized, + TxAddError::Other => Self::Other, } } } diff --git a/core/server/src/eth_watch.rs b/core/server/src/eth_watch.rs index 0b60ebe81f..9f4c0422f4 100644 --- a/core/server/src/eth_watch.rs +++ b/core/server/src/eth_watch.rs @@ -50,7 +50,7 @@ pub enum EthWatchRequest { address: Address, data: Vec, signature: EIP1271Signature, - resp: oneshot::Sender, + resp: oneshot::Sender>, }, } @@ -408,8 +408,8 @@ impl EthWatch { } => { let signature_correct = self .is_eip1271_signature_correct(address, data, signature) - .await - .unwrap_or(false); + .await; + resp.send(signature_correct).unwrap_or_default(); } } diff --git a/core/server/src/mempool.rs b/core/server/src/mempool.rs index 6909ebcadc..6c6118cb2e 100644 --- a/core/server/src/mempool.rs +++ b/core/server/src/mempool.rs @@ -33,14 +33,22 @@ use web3::types::Address; pub enum TxAddError { #[fail(display = "Tx nonce is too low.")] NonceMismatch, + #[fail(display = "Tx is incorrect")] IncorrectTx, + + #[fail(display = "EIP1271 signature could not be verified")] + EIP1271SignatureVerificationFail, + #[fail(display = "MissingEthSignature")] MissingEthSignature, + #[fail(display = "Eth signature is incorrect")] IncorrectEthSignature, + #[fail(display = "Change pubkey tx is not authorized onchain")] ChangePkNotAuthorized, + #[fail(display = "Internal error")] Other, } @@ -217,8 +225,15 @@ impl Mempool { .await .expect("ETH watch req receiver dropped"); - if !eth_watch_resp.1.await.expect("Err response from eth watch") { - return Err(TxAddError::IncorrectEthSignature); + let signature_correct = eth_watch_resp + .1 + .await + .expect("Failed receiving response from eth watch") + .map_err(|e| warn!("Err in eth watch: {}", e)) + .or(Err(TxAddError::EIP1271SignatureVerificationFail))?; + + if !signature_correct { + return Err(TxAddError::IncorrectTx); } } }; @@ -337,11 +352,13 @@ impl Mempool { } } +#[derive(Debug)] struct TokenCache { db_pool: ConnectionPool, ids_to_symbols: HashMap, } +// TODO: delete tokens from cache after timeout impl TokenCache { pub fn new(db_pool: ConnectionPool) -> Self { Self { @@ -357,22 +374,23 @@ impl TokenCache { match self.ids_to_symbols.get(&token_id).cloned() { Some(token_symbol) => Ok(Some(token_symbol)), None => { - match self + let storage = self .db_pool .access_storage_fragile() - .map_err(|e| format_err!("Failed to access storage: {}", e))? + .map_err(|e| format_err!("Failed to access storage: {}", e))?; + + let loaded_tokens = storage .tokens_schema() .load_tokens() - .map_err(|e| format_err!("Tokens load failed: {}", e))? - .get(&token_id) - .cloned() - { - Some(token_info) => Ok(Some( - self.ids_to_symbols - .entry(token_id) - .or_insert(token_info.symbol) - .clone(), - )), + .map_err(|e| format_err!("Tokens load failed: {}", e))?; + + let symbol_from_db = loaded_tokens.get(&token_id).map(|t| t.symbol.clone()); + + match symbol_from_db { + Some(symbol) => { + self.ids_to_symbols.insert(token_id, symbol.clone()); + Ok(Some(symbol)) + } None => Ok(None), } } From 9d6866dac0012b8b68307e993e80ef027cd1434c Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 16:54:25 +0300 Subject: [PATCH 172/186] Calculate throughput for pederson hasher --- .../criterion/merkle_tree/pedersen_hasher.rs | 20 ++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/core/models/benches/criterion/merkle_tree/pedersen_hasher.rs b/core/models/benches/criterion/merkle_tree/pedersen_hasher.rs index 7c66591813..800da94ecf 100644 --- a/core/models/benches/criterion/merkle_tree/pedersen_hasher.rs +++ b/core/models/benches/criterion/merkle_tree/pedersen_hasher.rs @@ -1,9 +1,12 @@ //! Benchmarks for the Parallel Sparse Merkle Tree. -use criterion::{black_box, BatchSize, Bencher, Criterion}; +use criterion::{black_box, BatchSize, Bencher, Criterion, Throughput}; use models::franklin_crypto::bellman::pairing::bn256::Bn256; use models::merkle_tree::{hasher::Hasher, PedersenHasher}; +const SMALL_INPUT_SIZE: usize = 16; // 16 bits / 2 bytes +const BIG_INPUT_SIZE: usize = models::params::MAX_CIRCUIT_PEDERSEN_HASH_BITS; // Biggest supported size. + /// Creates a boolean vector for `PedersonHasher` input. fn generate_input(size: usize) -> Vec { (0..size).map(|i| i % 2 == 0).collect() @@ -11,7 +14,7 @@ fn generate_input(size: usize) -> Vec { /// Measures the hashing time for a small input. fn pedersen_small(b: &mut Bencher<'_>) { - const INPUT_SIZE: usize = 8; // 1 byte. + const INPUT_SIZE: usize = SMALL_INPUT_SIZE; let hasher = PedersenHasher::::default(); let input: Vec = generate_input(INPUT_SIZE); @@ -29,7 +32,7 @@ fn pedersen_small(b: &mut Bencher<'_>) { /// Measures the hashing time for a (relatively) big input. fn pedersen_big(b: &mut Bencher<'_>) { - const INPUT_SIZE: usize = models::params::MAX_CIRCUIT_PEDERSEN_HASH_BITS; // Biggest supported size. + const INPUT_SIZE: usize = BIG_INPUT_SIZE; let hasher = PedersenHasher::::default(); let input: Vec = generate_input(INPUT_SIZE); @@ -46,6 +49,13 @@ fn pedersen_big(b: &mut Bencher<'_>) { } pub fn bench_pedersen_hasher(c: &mut Criterion) { - c.bench_function("Pedersen Hasher small input", pedersen_small); - c.bench_function("Pedersen Hasher big input", pedersen_big); + let mut small_input_group = c.benchmark_group("Small input"); + small_input_group.throughput(Throughput::Bytes((SMALL_INPUT_SIZE / 8) as u64)); + small_input_group.bench_function("Pedersen Hasher", pedersen_small); + small_input_group.finish(); + + let mut big_input_group = c.benchmark_group("Big input"); + big_input_group.throughput(Throughput::Bytes((BIG_INPUT_SIZE / 8) as u64)); + big_input_group.bench_function("Pedersen Hasher", pedersen_big); + big_input_group.finish(); } From b283e9e61802066fa5a4c2dd5670c3d46d7f5def Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 16:54:39 +0300 Subject: [PATCH 173/186] Finish the group for primitives --- core/models/benches/criterion/primitives/mod.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/models/benches/criterion/primitives/mod.rs b/core/models/benches/criterion/primitives/mod.rs index 5631f743c7..d0aa79a3d3 100644 --- a/core/models/benches/criterion/primitives/mod.rs +++ b/core/models/benches/criterion/primitives/mod.rs @@ -113,6 +113,8 @@ pub fn bench_primitives(c: &mut Criterion) { bench_pack_bits_into_bytes_in_order, ); group.bench_function("BitIterator::next", bench_bit_iterator_le_next); + + group.finish(); } criterion_group!(primitives_benches, bench_primitives); From f952781b1502588c7c6f170525a5565b06f74ab1 Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 17:28:44 +0300 Subject: [PATCH 174/186] Make clippy happy again --- core/models/benches/criterion/primitives/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/models/benches/criterion/primitives/mod.rs b/core/models/benches/criterion/primitives/mod.rs index d0aa79a3d3..d64611bb4a 100644 --- a/core/models/benches/criterion/primitives/mod.rs +++ b/core/models/benches/criterion/primitives/mod.rs @@ -88,8 +88,8 @@ fn bench_bit_iterator_le_next(b: &mut Bencher<'_>) { b.iter_batched( setup, - |mut bit_iterator| { - while let Some(_) = bit_iterator.next() { + |bit_iterator| { + for _ in bit_iterator { // Do nothing, we're just draining the iterator. } }, From 6cdb9a2743e098189afd1764d61b77c84ba98d5b Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 17:51:31 +0300 Subject: [PATCH 175/186] Add the testring entry to the readme --- README.md | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/README.md b/README.md index 358725e787..fd9408f704 100644 --- a/README.md +++ b/README.md @@ -130,6 +130,32 @@ zksync dockerhub-push zksync db-tests ``` +## Testing + +- Running all the `rust` tests: + + ```sh + f cargo test + ``` + +- Running the database tests: + + ```sh + zksync db-tests + ``` +- Running the integration test: + ```sh + zksync server # Has to be run in the 1st terminal + zksync prover # Has to be run in the 2nd terminal + zksync integration-simple # Has to be run in the 3rd terminal + ``` + +- Running the benchmarks: + + ```sh + f cargo bench + ``` + ## Generating keys To generate a proving key, from `server` dir run: From 606dff06fca5ceaebc2bf4c1d5c5409038ffa79e Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Tue, 31 Mar 2020 18:02:31 +0300 Subject: [PATCH 176/186] Add more entries to the testing chapter --- README.md | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index fd9408f704..eedeb76d60 100644 --- a/README.md +++ b/README.md @@ -139,19 +139,40 @@ zksync dockerhub-push ``` - Running the database tests: - + ```sh zksync db-tests ``` - Running the integration test: + ```sh zksync server # Has to be run in the 1st terminal zksync prover # Has to be run in the 2nd terminal zksync integration-simple # Has to be run in the 3rd terminal ``` -- Running the benchmarks: +- Running the full integration tests (similar to `integration-simple`, but performs different full exits) + + ```sh + zksync server # Has to be run in the 1st terminal + zksync prover # Has to be run in the 2nd terminal + zksync integration-full-exit # Has to be run in the 3rd terminal + ``` +- Running the circuit tests: + + ```sh + zksync circuit-tests + ``` + +- Running the prover tests: + + ```sh + zksync prover-tests + ``` + +- Running the benchmarks: + ```sh f cargo bench ``` From 8db106a56625d4c2beb828ebf1b496dbd145dc8c Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Tue, 31 Mar 2020 18:06:33 +0300 Subject: [PATCH 177/186] Remove build of zksync-crypto for init --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index aa75dd14ec..626e763bfd 100644 --- a/Makefile +++ b/Makefile @@ -15,7 +15,7 @@ init: @bin/init yarn: - @cd js/zksync-crypto && yarn build + @cd js/zksync-crypto @cd js/zksync.js && yarn && yarn build @cd js/client && yarn @cd js/explorer && yarn From 8f9261fc9c3a931c03ee1ac9320d81ffea125e7e Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Tue, 31 Mar 2020 18:14:39 +0300 Subject: [PATCH 178/186] Return exit code for full exit integration --- js/tests/full-exit-inegration-test.ts | 82 ++++++++++++++------------- 1 file changed, 43 insertions(+), 39 deletions(-) diff --git a/js/tests/full-exit-inegration-test.ts b/js/tests/full-exit-inegration-test.ts index 4b2d961420..b1143d075a 100644 --- a/js/tests/full-exit-inegration-test.ts +++ b/js/tests/full-exit-inegration-test.ts @@ -74,45 +74,49 @@ async function testWrongETHWalletFullExit(ethWallet: ethers.Wallet, syncWallet: } (async () => { - const WEB3_URL = process.env.WEB3_URL; + try { + const WEB3_URL = process.env.WEB3_URL; // Mnemonic for eth wallet. - const MNEMONIC = process.env.MNEMONIC; - const ERC_20TOKEN = process.env.TEST_ERC20; - const network = process.env.ETH_NETWORK == "localhost" ? "localhost" : "testnet"; - console.log("Running integration test on the ", network, " network"); - - syncProvider = await getDefaultProvider(network); - - const ethersProvider = new ethers.providers.JsonRpcProvider(WEB3_URL); - - const ethWallet = ethers.Wallet.fromMnemonic( - MNEMONIC, - "m/44'/60'/0'/0/1" - ).connect(ethersProvider); - const depositWallet = await Wallet.fromEthSignerNoKeys(ethWallet, syncProvider); - - - for (let token of ["ETH", ERC_20TOKEN]) { - let amount = utils.parseEther("0.089"); - const ethWallet2 = ethers.Wallet.createRandom().connect(ethersProvider); - const syncWallet2 = await Wallet.fromEthSigner( - ethWallet2, - syncProvider, - ); - await (await ethWallet.sendTransaction({to: ethWallet2.address, value: parseEther("0.5")})).wait(); - - await testRandomAccountFullExit(syncWallet2, token); - const deposit = await depositWallet.depositToSyncFromEthereum({ - depositTo: syncWallet2.address(), - token, - amount, - }); - await deposit.awaitReceipt(); - await testWrongETHWalletFullExit(ethWallet, syncWallet2, token); - await testNormalFullExit(syncWallet2, token); - await testEmptyBalanceFullExit(syncWallet2, token); + const MNEMONIC = process.env.MNEMONIC; + const ERC_20TOKEN = process.env.TEST_ERC20; + const network = process.env.ETH_NETWORK == "localhost" ? "localhost" : "testnet"; + console.log("Running integration test on the ", network, " network"); + + syncProvider = await getDefaultProvider(network); + + const ethersProvider = new ethers.providers.JsonRpcProvider(WEB3_URL); + + const ethWallet = ethers.Wallet.fromMnemonic( + MNEMONIC, + "m/44'/60'/0'/0/1" + ).connect(ethersProvider); + const depositWallet = await Wallet.fromEthSignerNoKeys(ethWallet, syncProvider); + + + for (let token of ["ETH", ERC_20TOKEN]) { + let amount = utils.parseEther("0.089"); + const ethWallet2 = ethers.Wallet.createRandom().connect(ethersProvider); + const syncWallet2 = await Wallet.fromEthSigner( + ethWallet2, + syncProvider, + ); + await (await ethWallet.sendTransaction({to: ethWallet2.address, value: parseEther("0.5")})).wait(); + + await testRandomAccountFullExit(syncWallet2, token); + const deposit = await depositWallet.depositToSyncFromEthereum({ + depositTo: syncWallet2.address(), + token, + amount, + approveDepositAmountForERC20: true, + }); + await deposit.awaitReceipt(); + await testWrongETHWalletFullExit(ethWallet, syncWallet2, token); + await testNormalFullExit(syncWallet2, token); + await testEmptyBalanceFullExit(syncWallet2, token); + } + await syncProvider.disconnect(); + } catch (e) { + console.error("Error:", e); + process.exit(1); } - - - await syncProvider.disconnect(); })(); From abb97515f00abebade003f90abe432f1ad6ba6c5 Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Tue, 31 Mar 2020 18:16:29 +0300 Subject: [PATCH 179/186] integration full exit on ci --- .drone.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.drone.yml b/.drone.yml index 24259eab35..bcdcf213e9 100644 --- a/.drone.yml +++ b/.drone.yml @@ -107,7 +107,7 @@ steps: --- kind: pipeline type: docker -name: integration-simple +name: integration-simple-fullexit clone: depth: 10 volumes: @@ -181,6 +181,7 @@ steps: - export CARGO_HOME=$ZKSYNC_HOME/target/cargo - sleep 15 - zksync integration-simple + - zksync integration-full-exit depends_on: - start-server-detached - start-prover-detached From c6c6afff7dcb761abbe76180004abae5c244025e Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Tue, 31 Mar 2020 18:55:01 +0300 Subject: [PATCH 180/186] correct ws provider --- js/tests/full-exit-inegration-test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/tests/full-exit-inegration-test.ts b/js/tests/full-exit-inegration-test.ts index b1143d075a..43b8cdb079 100644 --- a/js/tests/full-exit-inegration-test.ts +++ b/js/tests/full-exit-inegration-test.ts @@ -82,7 +82,7 @@ async function testWrongETHWalletFullExit(ethWallet: ethers.Wallet, syncWallet: const network = process.env.ETH_NETWORK == "localhost" ? "localhost" : "testnet"; console.log("Running integration test on the ", network, " network"); - syncProvider = await getDefaultProvider(network); + syncProvider = await Provider.newWebsocketProvider(process.env.WS_API_ADDR); const ethersProvider = new ethers.providers.JsonRpcProvider(WEB3_URL); From 115ea8b204091619d198cd4e9c903753d28fc39f Mon Sep 17 00:00:00 2001 From: Igor Aleksanov Date: Wed, 1 Apr 2020 08:23:26 +0300 Subject: [PATCH 181/186] Benchmark transactions, not operations --- core/plasma/benches/criterion/ops.rs | 178 ++++++++++++--------------- 1 file changed, 78 insertions(+), 100 deletions(-) diff --git a/core/plasma/benches/criterion/ops.rs b/core/plasma/benches/criterion/ops.rs index 26759b44cf..e4d592cfb7 100644 --- a/core/plasma/benches/criterion/ops.rs +++ b/core/plasma/benches/criterion/ops.rs @@ -1,18 +1,19 @@ //! Benchmarks for the `PlasmaState` operations execution time. +// Built-in deps +use std::collections::HashMap; // External uses use criterion::{black_box, criterion_group, BatchSize, Bencher, Criterion, Throughput}; +use web3::types::H256; // Workspace uses use crypto_exports::rand::{thread_rng, Rng}; use models::node::{ account::{Account, PubKeyHash}, - operations::{ - ChangePubKeyOp, CloseOp, DepositOp, FullExitOp, TransferOp, TransferToNewOp, WithdrawOp, - }, priority_ops::{Deposit, FullExit}, priv_key_from_fs, - tx::{ChangePubKey, Close, Transfer, TxSignature, Withdraw}, - AccountId, AccountMap, Address, BlockNumber, TokenId, + tx::{ChangePubKey, PackedEthSignature, Transfer, TxSignature, Withdraw}, + AccountId, AccountMap, Address, BlockNumber, FranklinPriorityOp, FranklinTx, PrivateKey, + TokenId, }; // Local uses use plasma::state::PlasmaState; @@ -24,40 +25,49 @@ const ACCOUNTS_AMOUNT: AccountId = 10; const CURRENT_BLOCK: BlockNumber = 1_000; /// Creates a random ZKSync account. -fn generate_account() -> Account { +fn generate_account() -> (H256, PrivateKey, Account) { let default_balance = 1_000_000.into(); let rng = &mut thread_rng(); let sk = priv_key_from_fs(rng.gen()); + let eth_sk = H256::random(); + let address = PackedEthSignature::address_from_private_key(ð_sk) + .expect("Can't get address from the ETH secret key"); + let mut account = Account::default(); account.pub_key_hash = PubKeyHash::from_privkey(&sk); - account.address = Address::random(); + account.address = address; account.set_balance(ETH_TOKEN_ID, default_balance); - account + (eth_sk, sk, account) } /// Creates a `PlasmaState` object and fills it with accounts. -fn generate_state() -> PlasmaState { +fn generate_state() -> (HashMap, PlasmaState) { let mut accounts = AccountMap::default(); + let mut keys = HashMap::new(); for account_id in 0..ACCOUNTS_AMOUNT { - let new_account = generate_account(); + let (eth_sk, sk, new_account) = generate_account(); accounts.insert(account_id, new_account); + keys.insert(account_id, (sk, eth_sk)); } - PlasmaState::new(accounts, CURRENT_BLOCK) + let state = PlasmaState::new(accounts, CURRENT_BLOCK); + + (keys, state) } /// Bench for `PlasmaState::apply_transfer_to_new_op`. fn apply_transfer_to_new_op(b: &mut Bencher<'_>) { - let state = generate_state(); + let (keys, state) = generate_state(); + let (private_key, _) = keys.get(&0).expect("Can't key the private key"); let from_account = state.get_account(0).expect("Can't get the account"); - let transfer = Transfer { + let mut transfer = Transfer { from: from_account.address, to: Address::random(), token: ETH_TOKEN_ID, @@ -67,20 +77,18 @@ fn apply_transfer_to_new_op(b: &mut Bencher<'_>) { signature: TxSignature::default(), }; - let transfer_op = TransferToNewOp { - tx: transfer, - from: 0, - to: ACCOUNTS_AMOUNT, - }; + transfer.signature = TxSignature::sign_musig_sha256(&private_key, &transfer.get_bytes()); - let setup = || (state.clone(), transfer_op.clone()); + let transfer_tx = FranklinTx::Transfer(Box::new(transfer)); + + let setup = || (state.clone(), transfer_tx.clone()); b.iter_batched( setup, - |(mut state, transfer_op)| { + |(mut state, transfer_tx)| { state - .apply_transfer_to_new_op(&black_box(transfer_op)) - .expect("Failed transfer operation"); + .execute_tx(black_box(transfer_tx)) + .expect("Failed to execute tx"); }, BatchSize::SmallInput, ); @@ -88,12 +96,13 @@ fn apply_transfer_to_new_op(b: &mut Bencher<'_>) { /// Bench for `PlasmaState::apply_transfer_op`. fn apply_transfer_op(b: &mut Bencher<'_>) { - let state = generate_state(); + let (keys, state) = generate_state(); + let (private_key, _) = keys.get(&0).expect("Can't key the private key"); let from_account = state.get_account(0).expect("Can't get the account"); let to_account = state.get_account(1).expect("Can't get the account"); - let transfer = Transfer { + let mut transfer = Transfer { from: from_account.address, to: to_account.address, token: ETH_TOKEN_ID, @@ -103,20 +112,18 @@ fn apply_transfer_op(b: &mut Bencher<'_>) { signature: TxSignature::default(), }; - let transfer_op = TransferOp { - tx: transfer, - from: 0, - to: 1, - }; + transfer.signature = TxSignature::sign_musig_sha256(&private_key, &transfer.get_bytes()); - let setup = || (state.clone(), transfer_op.clone()); + let transfer_tx = FranklinTx::Transfer(Box::new(transfer)); + + let setup = || (state.clone(), transfer_tx.clone()); b.iter_batched( setup, - |(mut state, transfer_op)| { + |(mut state, transfer_tx)| { state - .apply_transfer_op(&black_box(transfer_op)) - .expect("Failed transfer operation"); + .execute_tx(black_box(transfer_tx)) + .expect("Failed to execute tx"); }, BatchSize::SmallInput, ); @@ -124,27 +131,24 @@ fn apply_transfer_op(b: &mut Bencher<'_>) { /// Bench for `PlasmaState::apply_full_exit_op`. fn apply_full_exit_op(b: &mut Bencher<'_>) { - let state = generate_state(); + let (_, state) = generate_state(); - let to_account = state.get_account(0).expect("Can't get the account"); + let from_account = state.get_account(0).expect("Can't get the account"); let full_exit = FullExit { account_id: 0, - eth_address: Address::random(), + eth_address: from_account.address, token: ETH_TOKEN_ID, }; - let full_exit_op = FullExitOp { - priority_op: full_exit, - withdraw_amount: Some(to_account.get_balance(ETH_TOKEN_ID)), - }; + let full_exit_op = FranklinPriorityOp::FullExit(full_exit); let setup = || (state.clone(), full_exit_op.clone()); b.iter_batched( setup, |(mut state, full_exit_op)| { - let _ = state.apply_full_exit_op(&black_box(full_exit_op)); + let _ = state.execute_priority_op(black_box(full_exit_op)); }, BatchSize::SmallInput, ); @@ -152,7 +156,7 @@ fn apply_full_exit_op(b: &mut Bencher<'_>) { /// Bench for `PlasmaState::apply_deposit_op`. fn apply_deposit_op(b: &mut Bencher<'_>) { - let state = generate_state(); + let (_, state) = generate_state(); let to_account = state.get_account(0).expect("Can't get the account"); @@ -163,17 +167,14 @@ fn apply_deposit_op(b: &mut Bencher<'_>) { amount: 10.into(), }; - let deposit_op = DepositOp { - priority_op: deposit, - account_id: 0, - }; + let deposit_op = FranklinPriorityOp::Deposit(deposit); let setup = || (state.clone(), deposit_op.clone()); b.iter_batched( setup, |(mut state, deposit_op)| { - let _ = state.apply_deposit_op(&black_box(deposit_op)); + let _ = state.execute_priority_op(black_box(deposit_op)); }, BatchSize::SmallInput, ); @@ -181,11 +182,12 @@ fn apply_deposit_op(b: &mut Bencher<'_>) { /// Bench for `PlasmaState::apply_withdraw_op`. fn apply_withdraw_op(b: &mut Bencher<'_>) { - let state = generate_state(); + let (keys, state) = generate_state(); let from_account = state.get_account(0).expect("Can't get the account"); + let (private_key, _) = keys.get(&0).expect("Can't key the private key"); - let withdraw = Withdraw { + let mut withdraw = Withdraw { from: from_account.address, to: Address::random(), token: ETH_TOKEN_ID, @@ -195,81 +197,58 @@ fn apply_withdraw_op(b: &mut Bencher<'_>) { signature: TxSignature::default(), }; - let withdraw_op = WithdrawOp { - tx: withdraw, - account_id: 0, - }; + withdraw.signature = TxSignature::sign_musig_sha256(&private_key, &withdraw.get_bytes()); - let setup = || (state.clone(), withdraw_op.clone()); + let withdraw_tx = FranklinTx::Withdraw(Box::new(withdraw)); - b.iter_batched( - setup, - |(mut state, withdraw_op)| { - let _ = state.apply_withdraw_op(&black_box(withdraw_op)); - }, - BatchSize::SmallInput, - ); -} - -/// Bench for `PlasmaState::apply_close_op`. -fn apply_close_op(b: &mut Bencher<'_>) { - let mut state = generate_state(); - - let mut to_remove = state.get_account(0).expect("Can't get the account"); - - // Remove balance from the account to close. - to_remove.set_balance(ETH_TOKEN_ID, 0.into()); - state.insert_account(0, to_remove.clone()); - - let close = Close { - account: to_remove.address, - nonce: 0, - signature: TxSignature::default(), - }; - - let close_op = CloseOp { - tx: close, - account_id: 0, - }; - - let setup = || (state.clone(), close_op.clone()); + let setup = || (state.clone(), withdraw_tx.clone()); b.iter_batched( setup, - |(mut state, close_op)| { - let _ = state.apply_close_op(&black_box(close_op)); + |(mut state, withdraw_tx)| { + let _ = state.execute_tx(black_box(withdraw_tx)); }, BatchSize::SmallInput, ); } +// There is no bench for `PlasmaState::apply_close_op`, since closing accounts is currently disabled. + /// Bench for `PlasmaState::apply_change_pubkey_op`. fn apply_change_pubkey_op(b: &mut Bencher<'_>) { - let state = generate_state(); + let (keys, state) = generate_state(); let to_change = state.get_account(0).expect("Can't get the account"); + let (_, eth_private_key) = keys.get(&0).expect("Can't key the private key"); let rng = &mut thread_rng(); let new_sk = priv_key_from_fs(rng.gen()); + let nonce = 0; + + let eth_signature = { + let sign_bytes = ChangePubKey::get_eth_signed_data(nonce, &to_change.pub_key_hash) + .expect("Failed to construct ChangePubKey signed message."); + let eth_signature = + PackedEthSignature::sign(eth_private_key, &sign_bytes).expect("Signing failed"); + Some(eth_signature) + }; + let change_pubkey = ChangePubKey { account: to_change.address, new_pk_hash: PubKeyHash::from_privkey(&new_sk), - nonce: 0, - eth_signature: None, + nonce, + eth_signature, }; - let change_pubkey_op = ChangePubKeyOp { - tx: change_pubkey, - account_id: 0, - }; + let change_pubkey_tx = FranklinTx::ChangePubKey(Box::new(change_pubkey)); - let setup = || (state.clone(), change_pubkey_op.clone()); + let setup = || (state.clone(), change_pubkey_tx.clone()); b.iter_batched( setup, - |(mut state, change_pubkey_op)| { - let _ = state.apply_change_pubkey_op(&black_box(change_pubkey_op)); + |(mut state, change_pubkey_tx)| { + let _ = state.execute_tx(black_box(change_pubkey_tx)); }, BatchSize::SmallInput, ); @@ -280,9 +259,9 @@ fn apply_change_pubkey_op(b: &mut Bencher<'_>) { /// While this method is not directly performing an operation, it is used in every operation, /// and it seems to be the most expensive part of all the methods above. fn insert_account(b: &mut Bencher<'_>) { - let state = generate_state(); + let (_, state) = generate_state(); - let to_insert = generate_account(); + let (_, _, to_insert) = generate_account(); let setup = || (state.clone(), to_insert.clone()); b.iter_batched( @@ -308,7 +287,6 @@ pub fn bench_ops(c: &mut Criterion) { ); group.bench_function("PlasmaState::apply_transfer_op bench", apply_transfer_op); group.bench_function("PlasmaState::apply_withdraw_op bench", apply_withdraw_op); - group.bench_function("PlasmaState::apply_apply_close_op bench", apply_close_op); group.bench_function( "PlasmaState::apply_change_pubkey_op bench", apply_change_pubkey_op, From 2e0b48ad1c7e561b0ec97370e56a783e45d85be6 Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Wed, 1 Apr 2020 16:41:52 +0300 Subject: [PATCH 182/186] Add fee endpoint --- core/models/src/lib.rs | 4 +- core/models/src/node/mod.rs | 114 ++++++++++++++++++ core/models/src/node/tx.rs | 4 + core/models/src/params.rs | 4 +- core/models/src/primitives.rs | 11 ++ core/server/src/api_server/rpc_server.rs | 67 +++++++++- core/server/src/bin/generate_exit_proof.rs | 37 ++---- core/server/src/eth_watch.rs | 14 +-- core/server/src/mempool.rs | 74 ++++++------ .../2019-06-24-124654_change_accounts/up.sql | 2 +- core/storage/src/data_restore/mod.rs | 14 +-- core/storage/src/tests/tokens.rs | 77 ++++++------ core/storage/src/tokens/mod.rs | 69 ++++++++--- core/storage/src/tokens/records.rs | 24 +++- etc/env/dev.env.example | 2 +- js/tests/simple-integration-test.ts | 27 +++-- js/zksync.js/src/provider.ts | 13 ++ 17 files changed, 403 insertions(+), 154 deletions(-) diff --git a/core/models/src/lib.rs b/core/models/src/lib.rs index a90e59d5f6..e09557b115 100644 --- a/core/models/src/lib.rs +++ b/core/models/src/lib.rs @@ -159,7 +159,7 @@ impl std::str::FromStr for ActionType { #[derive(Debug)] pub struct TokenAddedEvent { pub address: Address, - pub id: u32, + pub id: u16, } impl TryFrom for TokenAddedEvent { @@ -174,7 +174,7 @@ impl TryFrom for TokenAddedEvent { .remove(0) .to_uint() .as_ref() - .map(U256::as_u32) + .map(|id| id.as_u32() as u16) .unwrap(), }) } diff --git a/core/models/src/node/mod.rs b/core/models/src/node/mod.rs index b2edb6b9ba..d709fd1921 100644 --- a/core/models/src/node/mod.rs +++ b/core/models/src/node/mod.rs @@ -37,6 +37,22 @@ pub type PrivateKey = PrivateKeyImport; pub type PublicKey = PublicKeyImport; pub type Address = web3::types::Address; +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +#[serde(rename_all = "camelCase")] +#[serde(untagged)] +pub enum TokenLike { + Id(TokenId), + Address(Address), + Symbol(String), +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub struct Token { + pub id: TokenId, + pub address: Address, + pub symbol: String, +} + pub fn priv_key_from_fs(fs: Fs) -> PrivateKey { PrivateKeyImport(fs) } @@ -106,6 +122,16 @@ pub fn unpack_fee_amount(data: &[u8]) -> Option { .map(u128_to_bigdecimal) } +pub fn closest_packable_fee_amount(amount: &BigDecimal) -> BigDecimal { + let fee_packed = pack_fee_amount(&amount); + unpack_fee_amount(&fee_packed).expect("fee repacking") +} + +pub fn closest_packable_token_amount(amount: &BigDecimal) -> BigDecimal { + let fee_packed = pack_token_amount(&amount); + unpack_token_amount(&fee_packed).expect("token amount repacking") +} + #[cfg(test)] mod test { use super::*; @@ -142,4 +168,92 @@ mod test { &(max_mantissa_fee + BigDecimal::from(1)) )); } + + #[test] + fn pack_to_closest_packable() { + let fee = BigDecimal::from(1_234_123_424); + assert!( + !is_fee_amount_packable(&fee), + "fee should not be packable for this test" + ); + let closest_packable_fee = closest_packable_fee_amount(&fee); + assert!( + is_fee_amount_packable(&closest_packable_fee), + "repacked fee should be packable" + ); + assert_ne!( + closest_packable_fee, + BigDecimal::from(0), + "repacked fee should not be 0" + ); + assert!( + closest_packable_fee < fee, + "packable fee should be less then original" + ); + println!( + "fee: original: {}, truncated: {}", + fee, closest_packable_fee + ); + + let token = BigDecimal::from(123_456_789_123_456_789u64); + assert!( + !is_token_amount_packable(&token), + "token should not be packable for this test" + ); + let closest_packable_token = closest_packable_token_amount(&token); + assert!( + is_token_amount_packable(&closest_packable_token), + "repacked token amount should be packable" + ); + assert_ne!( + closest_packable_token, + BigDecimal::from(0), + "repacked token should not be 0" + ); + assert!( + closest_packable_token < token, + "packable token should be less then original" + ); + println!( + "token: original: {}, packable: {}", + token, closest_packable_token + ); + } + + #[test] + fn token_like_serialization() { + #[derive(Debug, Serialize, Deserialize, PartialEq)] + struct Query { + token: TokenLike, + } + let test_cases = vec![ + ( + Query { + token: TokenLike::Address( + "c919467ee96806d584cae8d0b11504b26fedfbab".parse().unwrap(), + ), + }, + r#"{"token":"0xc919467ee96806d584cae8d0b11504b26fedfbab"}"#, + ), + ( + Query { + token: TokenLike::Symbol("ETH".to_string()), + }, + r#"{"token":"ETH"}"#, + ), + ( + Query { + token: TokenLike::Id(14), + }, + r#"{"token":14}"#, + ), + ]; + + for (query, json_str) in test_cases { + let ser = serde_json::to_string(&query).expect("ser"); + assert_eq!(ser, json_str); + let de = serde_json::from_str(&ser).expect("de"); + assert_eq!(query, de); + } + } } diff --git a/core/models/src/node/tx.rs b/core/models/src/node/tx.rs index a2fb897b2f..d10a27eebc 100644 --- a/core/models/src/node/tx.rs +++ b/core/models/src/node/tx.rs @@ -112,6 +112,8 @@ impl Transfer { pub fn check_correctness(&self) -> bool { self.from != self.to + && self.amount.is_integer() // TODO: remove after # 366 + && self.fee.is_integer() && is_token_amount_packable(&self.amount) && is_fee_amount_packable(&self.fee) && self.verify_signature().is_some() @@ -171,6 +173,8 @@ impl Withdraw { is_fee_amount_packable(&self.fee) && self.amount <= u128_to_bigdecimal(u128::max_value()) && self.verify_signature().is_some() + && self.amount.is_integer() // TODO: remove after # 366 + && self.fee.is_integer() } pub fn verify_signature(&self) -> Option { diff --git a/core/models/src/params.rs b/core/models/src/params.rs index e70370666a..b37df1e4ec 100644 --- a/core/models/src/params.rs +++ b/core/models/src/params.rs @@ -9,7 +9,6 @@ use crate::merkle_tree::pedersen_hasher::BabyPedersenHasher; use crate::node::TokenId; static mut ACCOUNT_TREE_DEPTH_VALUE: usize = 0; -// static mut ACCOUNT_TREE_DEPTH_VALUE: usize = 24; /// account_tree_depth. /// Value must be specified as environment variable at compile time under `ACCOUNT_TREE_DEPTH_VALUE` key. pub fn account_tree_depth() -> usize { @@ -47,7 +46,8 @@ pub const INPUT_DATA_EMPTY_BYTES_WIDTH: usize = 64; pub const INPUT_DATA_ROOT_HASH_BYTES_WIDTH: usize = 32; /// Balance tree depth -pub const BALANCE_TREE_DEPTH: usize = 5; +pub const BALANCE_TREE_DEPTH: usize = 8; +pub const MAX_SUPPORTED_TOKENS: usize = 1 << BALANCE_TREE_DEPTH; pub const TOKEN_BIT_WIDTH: usize = 16; /// Account tree depth diff --git a/core/models/src/primitives.rs b/core/models/src/primitives.rs index 8bed23d6bb..2ca9df388b 100644 --- a/core/models/src/primitives.rs +++ b/core/models/src/primitives.rs @@ -16,6 +16,7 @@ use web3::types::U256; use crate::circuit::utils::append_le_fixed_width; use crate::merkle_tree::{hasher::Hasher, pedersen_hasher::BabyPedersenHasher}; use crate::params; +use failure::_core::ops::Rem; // TODO: replace Vec with Iterator? @@ -410,9 +411,19 @@ pub fn pedersen_hash_tx_msg(msg: &[u8]) -> Vec { /// Its important to use this, instead of bit_decimal.to_u128() pub fn big_decimal_to_u128(big_decimal: &BigDecimal) -> u128 { + assert!(big_decimal.is_integer(), "big decimal should be integer"); big_decimal.to_string().parse().unwrap() } +// TODO: HACK remove after task #366 +pub fn floor_big_decimal(big_decimal: &BigDecimal) -> BigDecimal { + BigDecimal::from( + (big_decimal - big_decimal.rem(BigDecimal::from(1))) + .as_bigint_and_exponent() + .0, + ) +} + /// Its important to use this, instead of BigDecimal::from_u128() pub fn u128_to_bigdecimal(n: u128) -> BigDecimal { n.to_string().parse().unwrap() diff --git a/core/server/src/api_server/rpc_server.rs b/core/server/src/api_server/rpc_server.rs index 4da85d6307..5e6d23c3e8 100644 --- a/core/server/src/api_server/rpc_server.rs +++ b/core/server/src/api_server/rpc_server.rs @@ -11,10 +11,14 @@ use jsonrpc_http_server::ServerBuilder; use models::config_options::ThreadPanicNotify; use models::node::tx::TxEthSignature; use models::node::tx::TxHash; -use models::node::{Account, AccountId, FranklinTx, Nonce, PubKeyHash, TokenId}; +use models::node::{ + closest_packable_fee_amount, Account, AccountId, FranklinTx, Nonce, PubKeyHash, Token, TokenId, + TokenLike, +}; +use models::primitives::floor_big_decimal; use std::collections::HashMap; use std::net::SocketAddr; -use storage::{tokens::records::Token, ConnectionPool, StorageProcessor}; +use storage::{ConnectionPool, StorageProcessor}; use web3::types::Address; #[derive(Debug, Clone, Serialize, Deserialize, Default)] @@ -85,6 +89,12 @@ pub struct ContractAddressResp { pub gov_contract: String, } +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub enum TxFeeTypes { + Withdraw, + Transfer, +} + enum RpcErrorCodes { NonceMismatch = 101, IncorrectTx = 103, @@ -140,6 +150,13 @@ pub trait Rpc { /// "ETH" | #ERC20_ADDRESS => {Token} #[rpc(name = "tokens")] fn tokens(&self) -> Result>; + #[rpc(name = "get_tx_fee")] + fn get_tx_fee( + &self, + tx_type: TxFeeTypes, + amount: BigDecimal, + token_like: TokenLike, + ) -> Result; } pub struct RpcApp { @@ -345,6 +362,18 @@ impl Rpc for RpcApp { }) .collect()) } + + fn get_tx_fee( + &self, + _tx_type: TxFeeTypes, + amount: BigDecimal, + _token_like: TokenLike, + ) -> Result { + // first approximation - just give 1 percent + Ok(closest_packable_fee_amount(&floor_big_decimal( + &(amount / BigDecimal::from(100)), + ))) + } } pub fn start_rpc_server( @@ -373,3 +402,37 @@ pub fn start_rpc_server( }) .expect("JSON-RPC http thread"); } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn tx_fee_type_serialization() { + #[derive(Debug, Serialize, Deserialize, PartialEq)] + struct Query { + tx_type: TxFeeTypes, + } + + let cases = vec![ + ( + Query { + tx_type: TxFeeTypes::Withdraw, + }, + r#"{"tx_type":"Withdraw"}"#, + ), + ( + Query { + tx_type: TxFeeTypes::Transfer, + }, + r#"{"tx_type":"Transfer"}"#, + ), + ]; + for (query, json_str) in cases { + let ser = serde_json::to_string(&query).expect("ser"); + assert_eq!(ser, json_str); + let de = serde_json::from_str::(&ser).expect("de"); + assert_eq!(query, de); + } + } +} diff --git a/core/server/src/bin/generate_exit_proof.rs b/core/server/src/bin/generate_exit_proof.rs index 61fa0991d0..73aa9923dc 100644 --- a/core/server/src/bin/generate_exit_proof.rs +++ b/core/server/src/bin/generate_exit_proof.rs @@ -4,7 +4,7 @@ use bigdecimal::BigDecimal; use clap::{App, Arg}; use log::info; -use models::node::{Address, TokenId}; +use models::node::{Address, TokenId, TokenLike}; use models::EncodedProof; use serde::Serialize; use std::time::Instant; @@ -51,21 +51,9 @@ fn main() { .expect("Address should be valid account address") }; - let target_token_address = { + let target_token = { let token = cli.value_of("Token").expect("required argument"); - if token == "ETH" { - token.to_string() - } else { - let token_address_to_parse = if token.starts_with("0x") { - &token[2..] - } else { - token - }; - let address: Address = token_address_to_parse - .parse() - .expect("Token address should be valid ERC20 address"); - format!("0x{:x}", address) - } + serde_json::from_str::(token).expect("invalid token argument") }; let timer = Instant::now(); @@ -75,19 +63,12 @@ fn main() { .access_storage() .expect("Storage access failed"); - let token_id = if target_token_address == "ETH" { - 0 - } else { - let tokens = storage - .tokens_schema() - .load_tokens() - .expect("Failed to load token"); - tokens - .into_iter() - .find(|(_, token)| token.address == target_token_address) - .expect("Token not found") - .0 - }; + let token_id = storage + .tokens_schema() + .get_token(target_token) + .expect("Db access fail") + .expect("Token not found") + .id; let accounts = storage .chain() .state_schema() diff --git a/core/server/src/eth_watch.rs b/core/server/src/eth_watch.rs index 9f4c0422f4..ea7e7c2dfa 100644 --- a/core/server/src/eth_watch.rs +++ b/core/server/src/eth_watch.rs @@ -24,7 +24,7 @@ use models::abi::{eip1271_contract, governance_contract, zksync_contract}; use models::config_options::ConfigurationOptions; use models::misc::constants::EIP1271_SUCCESS_RETURN_VALUE; use models::node::tx::EIP1271Signature; -use models::node::{Nonce, PriorityOp, PubKeyHash, TokenId}; +use models::node::{Nonce, PriorityOp, PubKeyHash, Token, TokenId}; use models::params::PRIORITY_EXPIRATION; use models::TokenAddedEvent; use storage::ConnectionPool; @@ -273,12 +273,12 @@ impl EthWatch { self.db_pool .access_storage() .map(|storage| { - for (id, address) in &self.eth_state.tokens { - if let Err(e) = storage.tokens_schema().store_token( - *id, - &format!("0x{:x}", address), - &format!("ERC20-{}", id), - ) { + for (&id, &address) in &self.eth_state.tokens { + if let Err(e) = storage.tokens_schema().store_token(Token { + id, + address, + symbol: format!("ERC20-{}", id), + }) { warn!("Failed to add token to db: {:?}", e); } } diff --git a/core/server/src/mempool.rs b/core/server/src/mempool.rs index 6c6118cb2e..a10c53ecb7 100644 --- a/core/server/src/mempool.rs +++ b/core/server/src/mempool.rs @@ -20,10 +20,10 @@ use futures::channel::{mpsc, oneshot}; use futures::{SinkExt, StreamExt}; use models::node::tx::TxEthSignature; use models::node::{ - AccountId, AccountUpdate, AccountUpdates, FranklinTx, Nonce, PriorityOp, TokenId, TransferOp, - TransferToNewOp, + AccountId, AccountUpdate, AccountUpdates, FranklinTx, Nonce, PriorityOp, Token, TokenId, + TokenLike, TransferOp, TransferToNewOp, }; -use models::params::max_block_chunk_size; +use models::params::{max_block_chunk_size, MAX_SUPPORTED_TOKENS}; use std::collections::{HashMap, VecDeque}; use storage::ConnectionPool; use tokio::runtime::Runtime; @@ -150,15 +150,16 @@ struct Mempool { mempool_state: MempoolState, requests: mpsc::Receiver, eth_watch_req: mpsc::Sender, - token_cache: TokenCache, + token_cache: TokenDBCache, } impl Mempool { fn token_symbol_from_id(&mut self, token_id: TokenId) -> Result { self.token_cache - .token_symbol_from_id(token_id) + .get_token(TokenLike::Id(token_id)) .or(Err(TxAddError::Other))? .ok_or(TxAddError::IncorrectTx) + .map(|t| t.symbol) } async fn add_tx( @@ -353,47 +354,44 @@ impl Mempool { } #[derive(Debug)] -struct TokenCache { +struct TokenDBCache { db_pool: ConnectionPool, - ids_to_symbols: HashMap, + // TODO: handle stale entries. (edge case when we rename token after adding it) + tokens: HashMap, } -// TODO: delete tokens from cache after timeout -impl TokenCache { +impl TokenDBCache { pub fn new(db_pool: ConnectionPool) -> Self { Self { db_pool, - ids_to_symbols: HashMap::new(), + tokens: HashMap::with_capacity(MAX_SUPPORTED_TOKENS), } } - pub fn token_symbol_from_id( - &mut self, - token_id: TokenId, - ) -> Result, failure::Error> { - match self.ids_to_symbols.get(&token_id).cloned() { - Some(token_symbol) => Ok(Some(token_symbol)), - None => { - let storage = self - .db_pool - .access_storage_fragile() - .map_err(|e| format_err!("Failed to access storage: {}", e))?; - - let loaded_tokens = storage - .tokens_schema() - .load_tokens() - .map_err(|e| format_err!("Tokens load failed: {}", e))?; - - let symbol_from_db = loaded_tokens.get(&token_id).map(|t| t.symbol.clone()); - - match symbol_from_db { - Some(symbol) => { - self.ids_to_symbols.insert(token_id, symbol.clone()); - Ok(Some(symbol)) - } - None => Ok(None), - } - } + pub fn get_token(&mut self, token_like: TokenLike) -> Result, failure::Error> { + let cached_value = match &token_like { + TokenLike::Id(token_id) => self.tokens.get(token_id), + TokenLike::Address(address) => self.tokens.values().find(|t| &t.address == address), + TokenLike::Symbol(symbol) => self.tokens.values().find(|t| &t.symbol == symbol), + }; + + if let Some(cached_value) = cached_value { + Ok(Some(cached_value.clone())) + } else { + let storage = self + .db_pool + .access_storage_fragile() + .map_err(|e| format_err!("Failed to access storage: {}", e))?; + + let db_token = storage + .tokens_schema() + .get_token(token_like) + .map_err(|e| format_err!("Tokens load failed: {}", e))?; + + Ok(db_token.map(|t| { + self.tokens.insert(t.id, t.clone()); + t + })) } } } @@ -406,7 +404,7 @@ pub fn run_mempool_task( ) { let mempool_state = MempoolState::restore_from_db(&db_pool); - let token_cache = TokenCache::new(db_pool); + let token_cache = TokenDBCache::new(db_pool); let mempool = Mempool { mempool_state, diff --git a/core/storage/migrations/2019-06-24-124654_change_accounts/up.sql b/core/storage/migrations/2019-06-24-124654_change_accounts/up.sql index 889ce9c161..b5fd1cd83a 100644 --- a/core/storage/migrations/2019-06-24-124654_change_accounts/up.sql +++ b/core/storage/migrations/2019-06-24-124654_change_accounts/up.sql @@ -12,7 +12,7 @@ CREATE TABLE tokens -- Add ETH token INSERT INTO tokens -values (0, '0000000000000000000000000000000000000000', 'ETH'); +values (0, '0x0000000000000000000000000000000000000000', 'ETH'); CREATE TABLE balances ( diff --git a/core/storage/src/data_restore/mod.rs b/core/storage/src/data_restore/mod.rs index e32752d75c..c2b0523ff8 100644 --- a/core/storage/src/data_restore/mod.rs +++ b/core/storage/src/data_restore/mod.rs @@ -4,7 +4,7 @@ use diesel::prelude::*; use itertools::Itertools; // Workspace imports use models::node::block::Block; -use models::node::{AccountId, AccountUpdate, BlockNumber, FranklinOp}; +use models::node::{AccountId, AccountUpdate, BlockNumber, FranklinOp, Token}; use models::{Operation, TokenAddedEvent}; // Local imports use self::records::{ @@ -62,12 +62,12 @@ impl<'a> DataRestoreSchema<'a> { self.0.conn().transaction(|| { StateSchema(self.0).update_block_events(block_events)?; - for token in token_events.iter() { - TokensSchema(self.0).store_token( - token.id as u16, - &format!("0x{:x}", token.address), - &format!("ERC20-{}", token.id), - )?; + for &TokenAddedEvent { id, address } in token_events.iter() { + TokensSchema(self.0).store_token(Token { + id, + address, + symbol: format!("ERC20-{}", id), + })?; } self.update_last_watched_block_number(last_watched_eth_number)?; diff --git a/core/storage/src/tests/tokens.rs b/core/storage/src/tests/tokens.rs index 032dbb141f..e136bcb1c4 100644 --- a/core/storage/src/tests/tokens.rs +++ b/core/storage/src/tests/tokens.rs @@ -2,10 +2,8 @@ // Workspace imports // Local imports use crate::tests::db_test; -use crate::{ - tokens::{records::Token, TokensSchema}, - StorageProcessor, -}; +use crate::{tokens::TokensSchema, StorageProcessor}; +use models::node::{Token, TokenLike}; /// Verifies the token save & load mechanism. #[test] @@ -18,28 +16,30 @@ fn tokens_storage() { .load_tokens() .expect("Load tokens query failed"); assert_eq!(tokens.len(), 1); - assert_eq!( - tokens[&0], - Token { - id: 0, - address: "0000000000000000000000000000000000000000".into(), - symbol: "ETH".into(), - } - ); + let eth_token = Token { + id: 0, + address: "0000000000000000000000000000000000000000".parse().unwrap(), + symbol: "ETH".into(), + }; + assert_eq!(tokens[&0], eth_token); // Add two tokens. - let token_a_id = 1; - let token_a_symbol = "ABC"; - let token_a_addr = "0000000000000000000000000000000000000001"; - let token_b_id = 2; - let token_b_symbol = "DEF"; - let token_b_addr = "0000000000000000000000000000000000000002"; + let token_a = Token { + id: 1, + address: "0000000000000000000000000000000000000001".parse().unwrap(), + symbol: "ABC".into(), + }; + let token_b = Token { + id: 2, + address: "0000000000000000000000000000000000000002".parse().unwrap(), + symbol: "DEF".into(), + }; TokensSchema(&conn) - .store_token(token_a_id, token_a_addr, token_a_symbol) + .store_token(token_a.clone()) .expect("Store tokens query failed"); TokensSchema(&conn) - .store_token(token_b_id, token_b_addr, token_b_symbol) + .store_token(token_b.clone()) .expect("Store tokens query failed"); // Load tokens again. @@ -48,22 +48,27 @@ fn tokens_storage() { .expect("Load tokens query failed"); assert_eq!(tokens.len(), 3); - assert_eq!( - tokens[&token_a_id], - Token { - id: token_a_id as i32, - address: token_a_addr.into(), - symbol: token_a_symbol.into(), - } - ); - assert_eq!( - tokens[&token_b_id], - Token { - id: token_b_id as i32, - address: token_b_addr.into(), - symbol: token_b_symbol.into(), - } - ); + assert_eq!(tokens[ð_token.id], eth_token); + assert_eq!(tokens[&token_a.id], token_a); + assert_eq!(tokens[&token_b.id], token_b); + + let token_b_by_id = TokensSchema(&conn) + .get_token(TokenLike::Id(token_b.id)) + .expect("get token query failed") + .expect("token by id not found"); + assert_eq!(token_b, token_b_by_id); + + let token_b_by_address = TokensSchema(&conn) + .get_token(TokenLike::Address(token_b.address)) + .expect("get token query failed") + .expect("token by address not found"); + assert_eq!(token_b, token_b_by_address); + + let token_b_by_symbol = TokensSchema(&conn) + .get_token(TokenLike::Symbol(token_b.symbol.clone())) + .expect("get token query failed") + .expect("token by symbol not found"); + assert_eq!(token_b, token_b_by_symbol); Ok(()) }); diff --git a/core/storage/src/tokens/mod.rs b/core/storage/src/tokens/mod.rs index db9be1a592..74517e912c 100644 --- a/core/storage/src/tokens/mod.rs +++ b/core/storage/src/tokens/mod.rs @@ -3,9 +3,9 @@ use std::collections::HashMap; // External imports use diesel::prelude::*; // Workspace imports -use models::node::TokenId; +use models::node::{Address, Token, TokenId, TokenLike}; // Local imports -use self::records::Token; +use self::records::DbToken; use crate::schema::*; use crate::StorageProcessor; @@ -16,14 +16,20 @@ pub mod records; #[derive(Debug)] pub struct TokensSchema<'a>(pub &'a StorageProcessor); +pub(self) fn address_to_stored_string(address: &Address) -> String { + format!("0x{:x}", address) +} + +pub(self) fn stored_str_address_to_address(address: &str) -> Address { + address[2..] + .parse() + .expect("failed to parse stored db address") +} + impl<'a> TokensSchema<'a> { /// Persists the token in the database. - pub fn store_token(&self, id: TokenId, address: &str, symbol: &str) -> QueryResult<()> { - let new_token = Token { - id: i32::from(id), - address: address.to_string(), - symbol: symbol.to_string(), - }; + pub fn store_token(&self, token: Token) -> QueryResult<()> { + let new_token: DbToken = token.into(); diesel::insert_into(tokens::table) .values(&new_token) .on_conflict(tokens::id) @@ -40,16 +46,45 @@ impl<'a> TokensSchema<'a> { pub fn load_tokens(&self) -> QueryResult> { let tokens = tokens::table .order(tokens::id.asc()) - .load::(self.0.conn())?; - Ok(tokens.into_iter().map(|t| (t.id as TokenId, t)).collect()) + .load::(self.0.conn())?; + Ok(tokens + .into_iter() + .map(|t| { + let token: Token = t.into(); + (token.id, token) + }) + .collect()) } - /// Given the numeric token ID, returns it's string symbol. - pub fn token_symbol_from_id(&self, token: TokenId) -> QueryResult> { - Ok(tokens::table - .find(i32::from(token)) - .first::(self.0.conn()) - .optional()? - .map(|t| t.symbol)) + /// Given the numeric token ID, symbol or address, returns token. + pub fn get_token(&self, token_like: TokenLike) -> QueryResult> { + let db_token = match token_like { + TokenLike::Id(token_id) => tokens::table + .find(i32::from(token_id)) + .first::(self.0.conn()) + .optional(), + TokenLike::Address(token_address) => tokens::table + .filter(tokens::address.eq(address_to_stored_string(&token_address))) + .first::(self.0.conn()) + .optional(), + TokenLike::Symbol(token_symbol) => tokens::table + .filter(tokens::symbol.eq(token_symbol)) + .first::(self.0.conn()) + .optional(), + }?; + Ok(db_token.map(|t| t.into())) + } +} + +#[cfg(test)] +pub mod test { + use crate::tokens::{address_to_stored_string, stored_str_address_to_address}; + use models::node::Address; + + #[test] + fn address_store_roundtrip() { + let address = Address::random(); + let stored_address = address_to_stored_string(&address); + assert_eq!(address, stored_str_address_to_address(&stored_address)); } } diff --git a/core/storage/src/tokens/records.rs b/core/storage/src/tokens/records.rs index 628a584392..1ca12c394a 100644 --- a/core/storage/src/tokens/records.rs +++ b/core/storage/src/tokens/records.rs @@ -3,6 +3,7 @@ use serde_derive::{Deserialize, Serialize}; // Workspace imports // Local imports use crate::schema::*; +use models::node::{Token, TokenId}; #[derive( Debug, @@ -16,8 +17,29 @@ use crate::schema::*; PartialEq, )] #[table_name = "tokens"] -pub struct Token { + +pub struct DbToken { pub id: i32, pub address: String, pub symbol: String, } + +impl From for DbToken { + fn from(token: Token) -> Self { + Self { + id: token.id as i32, + address: super::address_to_stored_string(&token.address), + symbol: token.symbol, + } + } +} + +impl Into for DbToken { + fn into(self) -> Token { + Token { + id: self.id as TokenId, + address: super::stored_str_address_to_address(&self.address), + symbol: self.symbol, + } + } +} diff --git a/etc/env/dev.env.example b/etc/env/dev.env.example index 3cc9b71517..d54cb6f00e 100755 --- a/etc/env/dev.env.example +++ b/etc/env/dev.env.example @@ -69,7 +69,7 @@ BELLMAN_VERBOSE=1 KEY_DIR=keys/f3edd BLOCK_CHUNK_SIZES=10,50 -ACCOUNT_TREE_DEPTH=16 +ACCOUNT_TREE_DEPTH=24 TX_BATCH_SIZE=50 # Prover container kubernetes resources. diff --git a/js/tests/simple-integration-test.ts b/js/tests/simple-integration-test.ts index 0171d33327..b935631924 100644 --- a/js/tests/simple-integration-test.ts +++ b/js/tests/simple-integration-test.ts @@ -1,7 +1,7 @@ import { Wallet, Provider, - ETHProxy, getDefaultProvider, types, utils as zkutils + ETHProxy, types, utils as zkutils } from "zksync"; // HACK: using require as type system work-around const franklin_abi = require('../../contracts/build/Franklin.json'); @@ -13,7 +13,6 @@ import {IERC20_INTERFACE} from "zksync/build/utils"; const WEB3_URL = process.env.WEB3_URL; // Mnemonic for eth wallet. const MNEMONIC = process.env.TEST_MNEMONIC; -const ERC_20TOKEN = process.env.TEST_ERC20; const network = process.env.ETH_NETWORK == "localhost" ? "localhost" : "testnet"; console.log("Running integration test on the ", network, " network"); @@ -226,11 +225,12 @@ async function moveFunds(contract: Contract, ethProxy: ETHProxy, depositWallet: const depositAmount = utils.parseEther(depositAmountETH); // we do two transfers to test transfer to new and ordinary transfer. - const transfersFee = depositAmount.div(25); - const transfersAmount = depositAmount.div(2).sub(transfersFee); + const transfersAmount = depositAmount.div(3); + const transfersFee = await syncProvider.getTransactionFee("Transfer", transfersAmount, token); - const withdrawFee = transfersAmount.div(20); - const withdrawAmount = transfersAmount.sub(withdrawFee); + + const withdrawAmount = transfersAmount.div(3); + const withdrawFee = await syncProvider.getTransactionFee("Withdraw", withdrawAmount, token); await testAutoApprovedDeposit(depositWallet, syncWallet1, token, depositAmount.div(2)); console.log(`Auto approved deposit ok, Token: ${token}`); @@ -251,6 +251,8 @@ async function moveFunds(contract: Contract, ethProxy: ETHProxy, depositWallet: (async () => { try { syncProvider = await Provider.newWebsocketProvider(process.env.WS_API_ADDR); + const ERC20_ADDRESS = process.env.TEST_ERC20; + const ERC20_SYMBOL = syncProvider.tokenSet.resolveTokenSymbol(ERC20_ADDRESS); const ethProxy = new ETHProxy(ethersProvider, syncProvider.contractAddress); @@ -259,13 +261,13 @@ async function moveFunds(contract: Contract, ethProxy: ETHProxy, depositWallet: "m/44'/60'/0'/0/0" ).connect(ethersProvider); const syncDepositorWallet = ethers.Wallet.createRandom().connect(ethersProvider); - await (await ethWallet.sendTransaction({to: syncDepositorWallet.address, value: parseEther("0.05")})).wait(); - const erc20contract = new Contract(ERC_20TOKEN, IERC20_INTERFACE, ethWallet); - await (await erc20contract.transfer(syncDepositorWallet.address, parseEther("0.05"))).wait(); + await (await ethWallet.sendTransaction({to: syncDepositorWallet.address, value: parseEther("0.5")})).wait(); + const erc20contract = new Contract(ERC20_ADDRESS, IERC20_INTERFACE, ethWallet); + await (await erc20contract.transfer(syncDepositorWallet.address, parseEther("0.1"))).wait(); const zksyncDepositorWallet = await Wallet.fromEthSigner(syncDepositorWallet, syncProvider); const syncWalletSigner = ethers.Wallet.createRandom().connect(ethersProvider); - await (await ethWallet.sendTransaction({to: syncWalletSigner.address, value: parseEther("0.01")})); + await (await ethWallet.sendTransaction({to: syncWalletSigner.address, value: parseEther("0.05")})); const syncWallet = await Wallet.fromEthSigner( syncWalletSigner, syncProvider, @@ -278,7 +280,7 @@ async function moveFunds(contract: Contract, ethProxy: ETHProxy, depositWallet: ); const ethWallet2 = ethers.Wallet.createRandom().connect(ethersProvider); - await (await ethWallet.sendTransaction({to: ethWallet2.address, value: parseEther("0.01")})); + await (await ethWallet.sendTransaction({to: ethWallet2.address, value: parseEther("0.05")})); const syncWallet2 = await Wallet.fromEthSigner( ethWallet2, syncProvider, @@ -293,7 +295,8 @@ async function moveFunds(contract: Contract, ethProxy: ETHProxy, depositWallet: await testThrowingErrorOnTxFail(zksyncDepositorWallet); - await moveFunds(contract, ethProxy, zksyncDepositorWallet, syncWallet, syncWallet2, ERC_20TOKEN, "0.018"); + await moveFunds(contract, ethProxy, zksyncDepositorWallet, syncWallet, syncWallet2, ERC20_ADDRESS, "0.018"); + await moveFunds(contract, ethProxy, zksyncDepositorWallet, syncWallet, syncWallet2, ERC20_SYMBOL, "0.018"); await moveFunds(contract, ethProxy, zksyncDepositorWallet, syncWallet, syncWallet3, "ETH", "0.018"); await syncProvider.disconnect(); diff --git a/js/zksync.js/src/provider.ts b/js/zksync.js/src/provider.ts index e532ea0842..6bf7e02579 100644 --- a/js/zksync.js/src/provider.ts +++ b/js/zksync.js/src/provider.ts @@ -163,6 +163,19 @@ export class Provider { } } + async getTransactionFee( + txType: "Withdraw" | "Transfer", + amount: utils.BigNumberish, + tokenLike: TokenLike + ): Promise { + const transactionFee = await this.transport.request("get_tx_fee", [ + txType, + amount.toString(), + tokenLike + ]); + return utils.bigNumberify(transactionFee); + } + async disconnect() { return await this.transport.disconnect(); } From b80ddcfa4a0f1d01baf713bbcf322838a1157b1e Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Wed, 1 Apr 2020 17:33:35 +0300 Subject: [PATCH 183/186] resolve comments --- core/models/src/lib.rs | 6 +++--- core/models/src/node/mod.rs | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/core/models/src/lib.rs b/core/models/src/lib.rs index e09557b115..4fd5a9f7ed 100644 --- a/core/models/src/lib.rs +++ b/core/models/src/lib.rs @@ -21,8 +21,8 @@ pub use crypto_exports::franklin_crypto; pub use crypto_exports::rand; use crate::node::block::Block; -use crate::node::AccountUpdates; use crate::node::BlockNumber; +use crate::node::{AccountUpdates, TokenId}; use ethabi::{decode, ParamType}; use failure::format_err; use franklin_crypto::bellman::pairing::ff::{PrimeField, PrimeFieldRepr}; @@ -159,7 +159,7 @@ impl std::str::FromStr for ActionType { #[derive(Debug)] pub struct TokenAddedEvent { pub address: Address, - pub id: u16, + pub id: TokenId, } impl TryFrom for TokenAddedEvent { @@ -174,7 +174,7 @@ impl TryFrom for TokenAddedEvent { .remove(0) .to_uint() .as_ref() - .map(|id| id.as_u32() as u16) + .map(|id| id.as_u32() as TokenId) .unwrap(), }) } diff --git a/core/models/src/node/mod.rs b/core/models/src/node/mod.rs index d709fd1921..6628c08f25 100644 --- a/core/models/src/node/mod.rs +++ b/core/models/src/node/mod.rs @@ -40,6 +40,7 @@ pub type Address = web3::types::Address; #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "camelCase")] #[serde(untagged)] +/// Order of the fields are important (from more specific types to less specific types) pub enum TokenLike { Id(TokenId), Address(Address), From 75b509f8ba808f97f63597e8ca67c157ea7392f0 Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Wed, 1 Apr 2020 18:05:39 +0300 Subject: [PATCH 184/186] resolve comments part 2 --- core/models/src/node/mod.rs | 14 ++++++++++++++ core/models/src/primitives.rs | 2 +- core/server/src/eth_watch.rs | 7 ++----- core/server/src/mempool.rs | 12 ++++++------ core/storage/src/data_restore/mod.rs | 7 ++----- core/storage/src/tokens/mod.rs | 27 +++------------------------ core/storage/src/tokens/records.rs | 5 +++-- core/storage/src/tokens/utils.rs | 24 ++++++++++++++++++++++++ 8 files changed, 55 insertions(+), 43 deletions(-) create mode 100644 core/storage/src/tokens/utils.rs diff --git a/core/models/src/node/mod.rs b/core/models/src/node/mod.rs index 6628c08f25..9a3e3827ab 100644 --- a/core/models/src/node/mod.rs +++ b/core/models/src/node/mod.rs @@ -48,12 +48,26 @@ pub enum TokenLike { } #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +/// Token supported in ZK Sync protocol pub struct Token { + /// id is used for tx signature and serialization pub id: TokenId, + /// Contract address of ERC20 token or Address::zero() for "ETH" pub address: Address, + /// Token symbol (e.g. "ETH" or "USDC") pub symbol: String, } +impl Token { + pub fn new(id: TokenId, address: Address, symbol: &str) -> Self { + Self { + id, + address, + symbol: symbol.to_string(), + } + } +} + pub fn priv_key_from_fs(fs: Fs) -> PrivateKey { PrivateKeyImport(fs) } diff --git a/core/models/src/primitives.rs b/core/models/src/primitives.rs index 2ca9df388b..df63d58c58 100644 --- a/core/models/src/primitives.rs +++ b/core/models/src/primitives.rs @@ -1,5 +1,6 @@ // Built-in deps use std::convert::TryInto; +use std::ops::Rem; use std::str::FromStr; // External deps use crate::franklin_crypto::bellman::pairing::bn256::Bn256; @@ -16,7 +17,6 @@ use web3::types::U256; use crate::circuit::utils::append_le_fixed_width; use crate::merkle_tree::{hasher::Hasher, pedersen_hasher::BabyPedersenHasher}; use crate::params; -use failure::_core::ops::Rem; // TODO: replace Vec with Iterator? diff --git a/core/server/src/eth_watch.rs b/core/server/src/eth_watch.rs index ea7e7c2dfa..55d45ea833 100644 --- a/core/server/src/eth_watch.rs +++ b/core/server/src/eth_watch.rs @@ -274,11 +274,8 @@ impl EthWatch { .access_storage() .map(|storage| { for (&id, &address) in &self.eth_state.tokens { - if let Err(e) = storage.tokens_schema().store_token(Token { - id, - address, - symbol: format!("ERC20-{}", id), - }) { + let token = Token::new(id, address, &format!("ERC20-{}", id)); + if let Err(e) = storage.tokens_schema().store_token(token) { warn!("Failed to add token to db: {:?}", e); } } diff --git a/core/server/src/mempool.rs b/core/server/src/mempool.rs index a10c53ecb7..e4ae6be7ae 100644 --- a/core/server/src/mempool.rs +++ b/core/server/src/mempool.rs @@ -23,7 +23,7 @@ use models::node::{ AccountId, AccountUpdate, AccountUpdates, FranklinTx, Nonce, PriorityOp, Token, TokenId, TokenLike, TransferOp, TransferToNewOp, }; -use models::params::{max_block_chunk_size, MAX_SUPPORTED_TOKENS}; +use models::params::max_block_chunk_size; use std::collections::{HashMap, VecDeque}; use storage::ConnectionPool; use tokio::runtime::Runtime; @@ -364,7 +364,7 @@ impl TokenDBCache { pub fn new(db_pool: ConnectionPool) -> Self { Self { db_pool, - tokens: HashMap::with_capacity(MAX_SUPPORTED_TOKENS), + tokens: HashMap::new(), } } @@ -388,10 +388,10 @@ impl TokenDBCache { .get_token(token_like) .map_err(|e| format_err!("Tokens load failed: {}", e))?; - Ok(db_token.map(|t| { - self.tokens.insert(t.id, t.clone()); - t - })) + if let Some(token) = &db_token { + self.tokens.insert(token.id, token.clone()); + } + Ok(db_token) } } } diff --git a/core/storage/src/data_restore/mod.rs b/core/storage/src/data_restore/mod.rs index c2b0523ff8..ce408e68d4 100644 --- a/core/storage/src/data_restore/mod.rs +++ b/core/storage/src/data_restore/mod.rs @@ -63,11 +63,8 @@ impl<'a> DataRestoreSchema<'a> { StateSchema(self.0).update_block_events(block_events)?; for &TokenAddedEvent { id, address } in token_events.iter() { - TokensSchema(self.0).store_token(Token { - id, - address, - symbol: format!("ERC20-{}", id), - })?; + let token = Token::new(id, address, &format!("ERC20-{}", id)); + TokensSchema(self.0).store_token(token)?; } self.update_last_watched_block_number(last_watched_eth_number)?; diff --git a/core/storage/src/tokens/mod.rs b/core/storage/src/tokens/mod.rs index 74517e912c..fb47689ba6 100644 --- a/core/storage/src/tokens/mod.rs +++ b/core/storage/src/tokens/mod.rs @@ -3,29 +3,21 @@ use std::collections::HashMap; // External imports use diesel::prelude::*; // Workspace imports -use models::node::{Address, Token, TokenId, TokenLike}; +use models::node::{Token, TokenId, TokenLike}; // Local imports use self::records::DbToken; use crate::schema::*; +use crate::tokens::utils::address_to_stored_string; use crate::StorageProcessor; pub mod records; +mod utils; /// Tokens schema handles the `tokens` table, providing methods to /// get and store new tokens. #[derive(Debug)] pub struct TokensSchema<'a>(pub &'a StorageProcessor); -pub(self) fn address_to_stored_string(address: &Address) -> String { - format!("0x{:x}", address) -} - -pub(self) fn stored_str_address_to_address(address: &str) -> Address { - address[2..] - .parse() - .expect("failed to parse stored db address") -} - impl<'a> TokensSchema<'a> { /// Persists the token in the database. pub fn store_token(&self, token: Token) -> QueryResult<()> { @@ -75,16 +67,3 @@ impl<'a> TokensSchema<'a> { Ok(db_token.map(|t| t.into())) } } - -#[cfg(test)] -pub mod test { - use crate::tokens::{address_to_stored_string, stored_str_address_to_address}; - use models::node::Address; - - #[test] - fn address_store_roundtrip() { - let address = Address::random(); - let stored_address = address_to_stored_string(&address); - assert_eq!(address, stored_str_address_to_address(&stored_address)); - } -} diff --git a/core/storage/src/tokens/records.rs b/core/storage/src/tokens/records.rs index 1ca12c394a..066f5a48e2 100644 --- a/core/storage/src/tokens/records.rs +++ b/core/storage/src/tokens/records.rs @@ -3,6 +3,7 @@ use serde_derive::{Deserialize, Serialize}; // Workspace imports // Local imports use crate::schema::*; +use crate::tokens::utils::{address_to_stored_string, stored_str_address_to_address}; use models::node::{Token, TokenId}; #[derive( @@ -28,7 +29,7 @@ impl From for DbToken { fn from(token: Token) -> Self { Self { id: token.id as i32, - address: super::address_to_stored_string(&token.address), + address: address_to_stored_string(&token.address), symbol: token.symbol, } } @@ -38,7 +39,7 @@ impl Into for DbToken { fn into(self) -> Token { Token { id: self.id as TokenId, - address: super::stored_str_address_to_address(&self.address), + address: stored_str_address_to_address(&self.address), symbol: self.symbol, } } diff --git a/core/storage/src/tokens/utils.rs b/core/storage/src/tokens/utils.rs new file mode 100644 index 0000000000..8ada9ec562 --- /dev/null +++ b/core/storage/src/tokens/utils.rs @@ -0,0 +1,24 @@ +use models::node::Address; + +pub fn address_to_stored_string(address: &Address) -> String { + format!("0x{:x}", address) +} + +pub fn stored_str_address_to_address(address: &str) -> Address { + assert_eq!(address.len(), 42, "db stored token address length"); + address[2..] + .parse() + .expect("failed to parse stored db address") +} + +#[cfg(test)] +pub mod test { + use super::*; + + #[test] + fn address_store_roundtrip() { + let address = Address::random(); + let stored_address = address_to_stored_string(&address); + assert_eq!(address, stored_str_address_to_address(&stored_address)); + } +} From c13d3baefb5289b8feff24778afb220cff772c6d Mon Sep 17 00:00:00 2001 From: Vitalii Drohan Date: Wed, 1 Apr 2020 19:21:07 +0300 Subject: [PATCH 185/186] replace env chmod +x --- bin/replace-env-variable.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 bin/replace-env-variable.py diff --git a/bin/replace-env-variable.py b/bin/replace-env-variable.py old mode 100644 new mode 100755 From bc2b252e8b0a8fd0c27b845b0a9108f8267eb23e Mon Sep 17 00:00:00 2001 From: furkhat Date: Thu, 2 Apr 2020 12:08:51 +0300 Subject: [PATCH 186/186] workon DRONE_COMMIT_SHA --- .drone.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.drone.yml b/.drone.yml index bcdcf213e9..71317cb145 100644 --- a/.drone.yml +++ b/.drone.yml @@ -231,6 +231,7 @@ steps: STAGE_ENV_BASE64: from_secret: stage_env_base64 commands: + - git checkout ${DRONE_COMMIT_SHA} - export ZKSYNC_HOME=`pwd` - export PATH=$ZKSYNC_HOME/bin:$PATH - export CARGO_HOME=$ZKSYNC_HOME/target/cargo @@ -367,6 +368,10 @@ trigger: - promote steps: +- name: check out to commit + image: matterlabs/ci + commands: + - git checkout ${DRONE_COMMIT_SHA} - name: check-images-exist image: docker environment: