diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 000000000..f129e606f
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,9 @@
+# Contribution Guidelines
+
+Thank you for considering helping out with the source code! We are extremely grateful for any consideration of
+contributions to this repository. However, at this time, we generally do not accept external contributions. This policy
+will change in the future, so please check back regularly for updates.
+
+For security issues, please contact us at [security@matterlabs.dev](mailto:security@matterlabs.dev).
+
+Thank you for your support in accelerating the mass adoption of crypto for personal sovereignty!
diff --git a/LICENSE-MIT b/LICENSE-MIT
new file mode 100644
index 000000000..2739ea6e2
--- /dev/null
+++ b/LICENSE-MIT
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 Matter Labs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 000000000..90a38a316
--- /dev/null
+++ b/README.md
@@ -0,0 +1,30 @@
+# zkSync Era: Smart Contracts
+
+[](https://zksync.io/)
+
+zkSync Era is a layer 2 rollup that uses zero-knowledge proofs to scale Ethereum without compromising on security or
+decentralization. Since it's EVM compatible (Solidity/Vyper), 99% of Ethereum projects can redeploy without refactoring
+or re-auditing a single line of code. zkSync Era also uses an LLVM-based compiler that will eventually let developers
+write smart contracts in C++, Rust and other popular languages.
+
+This repository contains both L1 and L2 zkSync smart contracts. For their description see the
+[system overview](docs/Overview.md).
+
+## Disclaimer
+
+It is used as a submodule of a private repo. Compilation and test scripts should work without additional tooling, but
+others may not.
+
+## License
+
+zkSync Era contracts are distributed under the terms of the MIT license.
+
+See [LICENSE-MIT](LICENSE-MIT) for details.
+
+## Official Links
+
+- [Website](https://zksync.io/)
+- [GitHub](https://github.com/matter-labs)
+- [Twitter](https://twitter.com/zksync)
+- [Twitter for Devs](https://twitter.com/zkSyncDevs)
+- [Discord](https://discord.gg/px2ar7w)
diff --git a/SystemConfig.json b/SystemConfig.json
new file mode 100644
index 000000000..7c1e56625
--- /dev/null
+++ b/SystemConfig.json
@@ -0,0 +1,19 @@
+{
+ "L2_TX_MAX_GAS_LIMIT": 80000000,
+ "MAX_PUBDATA_PER_BLOCK": 110000,
+ "PRIORITY_TX_MAX_PUBDATA": 99000,
+ "FAIR_L2_GAS_PRICE": 500000000,
+ "L1_GAS_PER_PUBDATA_BYTE": 17,
+ "BLOCK_OVERHEAD_L2_GAS": 1200000,
+ "BLOCK_OVERHEAD_L1_GAS": 1000000,
+ "MAX_TRANSACTIONS_IN_BLOCK": 1024,
+ "BOOTLOADER_TX_ENCODING_SPACE": 519017,
+ "L1_TX_INTRINSIC_L2_GAS": 167157,
+ "L1_TX_INTRINSIC_PUBDATA": 88,
+ "L1_TX_MIN_L2_GAS_BASE": 173484,
+ "L1_TX_DELTA_544_ENCODING_BYTES": 1656,
+ "L1_TX_DELTA_FACTORY_DEPS_L2_GAS": 2473,
+ "L1_TX_DELTA_FACTORY_DEPS_PUBDATA": 64,
+ "MAX_NEW_FACTORY_DEPS": 32,
+ "DEFAULT_L2_GAS_PRICE_PER_PUBDATA": 800
+}
diff --git a/docs/Overview.md b/docs/Overview.md
new file mode 100644
index 000000000..11d789dfb
--- /dev/null
+++ b/docs/Overview.md
@@ -0,0 +1,274 @@
+# Overview
+
+zkSync Era is a permissionless general-purpose ZK rollup. Similar to many L1 blockchains and sidechains it enables
+deployment and interaction with Turing-complete smart contracts.
+
+- L2 smart contracts are executed on a zkEVM.
+- zkEVM bytecode is different from the L1 EVM.
+- There is a Solidity and Vyper compilers for L2 smart contracts.
+- There is a standard way to pass messages between L1 and L2. That is a part of the protocol.
+- There is no escape hatch mechanism yet, but there will be one.
+
+All data that is needed to restore the L2 state are also pushed on-chain. There are two approaches, publishing inputs of
+L2 transactions on-chain and publishing the state transition diff. zkSync follows the second option.
+
+See the [documentation](https://v2-docs.zksync.io/dev/fundamentals/rollups.html) to read more!
+
+## Glossary
+
+- **Governor** - privileged address that controls the upgradability of the network and sets other privileged addresses.
+- **Validator/Operator** - a privileged address that can commit/verify/execute L2 blocks.
+- **Facet** - implementation contract. The word comes from the EIP-2535.
+- **Security council** - set of trusted addresses that can decrease upgrade timelock.
+- **Gas** - a unit that measures the amount of computational effort required to execute specific operations on the
+ zkSync v2 network.
+
+### L1 Smart contracts
+
+#### Diamond
+
+Technically, this L1 smart contract acts as a connector between Ethereum (L1) and zkSync (L2). This contract checks the
+validity proof and data availability, handles L2 <-> L1 communication, finalizes L2 state transition, and more.
+
+There are also important contracts deployed on the L2 that can also execute logic called _system contracts_. Using L2
+<-> L1 communication, they can affect both the L1 and the L2.
+
+#### DiamondProxy
+
+The main contract uses [EIP-2535](https://eips.ethereum.org/EIPS/eip-2535) diamond proxy pattern. It is an in-house
+implementation that is inspired by the [mudgen reference implementation](https://github.com/mudgen/Diamond). It has no
+external functions, only the fallback that delegates a call to one of the facets (target/implementation contract). So
+even an upgrade system is a separate facet that can be replaced.
+
+One of the differences from the reference implementation is access freezability. Each of the facets has an associated
+parameter that indicates if it is possible to freeze access to the facet. Privileged actors can freeze the **diamond**
+(not a specific facet!) and all facets with the marker `isFreezable` should be inaccessible until the governor unfreezes
+the diamond. Note that it is a very dangerous thing since the diamond proxy can freeze the upgrade system and then the
+diamond will be frozen forever.
+
+#### DiamondInit
+
+It is a one-function contract that implements the logic of initializing a diamond proxy. It is called only once on the
+diamond constructor and is not saved in the diamond as a facet.
+
+Implementation detail - function returns a magic value just like it is designed in
+[EIP-1271](https://eips.ethereum.org/EIPS/eip-1271), but the magic value is 32 bytes in size.
+
+#### DiamondCutFacet
+
+These smart contracts manage the freezing/unfreezing and upgrades of the diamond proxy. That being said, the contract
+must never be frozen.
+
+Currently, freezing and unfreezing are implemented as access control functions. It is fully controlled by the governor
+but can be changed later. The governor can call `freezeDiamond` to freeze the diamond and `unfreezeDiamond` to restore
+it.
+
+Another purpose of `DiamondCutFacet` is to upgrade the facets. The upgrading is split into 2-3 phases:
+
+- `proposeTransparentUpgrade`/`proposeShadowUpgrade` - propose an upgrade with visible/hidden parameters.
+- `cancelUpgradeProposal` - cancel the upgrade proposal.
+- `securityCouncilUpgradeApprove` - approve the upgrade by the security council.
+- `executeUpgrade` - finalize the upgrade.
+
+The upgrade itself characterizes by three variables:
+
+- `facetCuts` - a set of changes to the facets (adding new facets, removing facets, and replacing them).
+- pair `(address _initAddress, bytes _calldata)` for initializing the upgrade by making a delegate call to
+ `_initAddress` with `_calldata` inputs.
+
+#### GettersFacet
+
+Separate facet, whose only function is providing `view` and `pure` methods. It also implements
+[diamond loupe](https://eips.ethereum.org/EIPS/eip-2535#diamond-loupe) which makes managing facets easier.
+
+#### GovernanceFacet
+
+Controls changing the privileged addresses such as governor and validators or one of the system parameters (L2
+bootloader bytecode hash, verifier address, verifier parameters, etc).
+
+At the current stage, the governor has permission to instantly change the key system parameters with `GovernanceFacet`.
+Later such functionality will be removed and changing system parameters will be possible only via Diamond upgrade (see
+_DiamondCutFacet_).
+
+#### MailboxFacet
+
+The facet that handles L2 <-> L1 communication, an overview for which can be found in
+[docs](https://v2-docs.zksync.io/dev/developer-guides/bridging/l1-l2-interop.html).
+
+The Mailbox performs three functions:
+
+- L1 <-> L2 communication.
+- Bridging native Ether to the L2.
+- Censorship resistance mechanism (not yet implemented).
+
+L1 -> L2 communication is implemented as requesting an L2 transaction on L1 and executing it on L2. This means a user
+can call the function on the L1 contract to save the data about the transaction in some queue. Later on, a validator can
+process it on L2 and mark them as processed on the L1 priority queue. Currently, it is used for sending information from
+L1 to L2 or implementing multi-layer protocols.
+
+_NOTE_: While user requests the transaction from L1, the initiated transaction on L2 will have such a `msg.sender`:
+
+```solidity
+ address sender = msg.sender;
+ if (sender != tx.origin) {
+ sender = AddressAliasHelper.applyL1ToL2Alias(msg.sender);
+ }
+```
+
+where
+
+```solidity
+uint160 constant offset = uint160(0x1111000000000000000000000000000000001111);
+
+function applyL1ToL2Alias(address l1Address) internal pure returns (address l2Address) {
+ unchecked {
+ l2Address = address(uint160(l1Address) + offset);
+ }
+}
+
+```
+
+The L1 -> L2 communication is also used for bridging ether. The user should include a `msg.value` when initiating a
+transaction request on the L1 contract. Before executing a transaction on L2, the specified address will be credited
+with the funds. To withdraw funds user should call `withdraw` function on the `L2EtherToken` system contracts. This will
+burn the funds on L2, allowing the user to reclaim them through the `finalizeEthWithdrawal` function on the
+`MailboxFacet`.
+
+L2 -> L1 communication, in contrast to L1 -> L2 communication, is based only on transferring the information, and not on
+the transaction execution on L1.
+
+From the L2 side, there is a special zkEVM opcode that saves `l2ToL1Log` in the L2 block. A validator will send all
+`l2ToL1Logs` when sending an L2 block to the L1 (see `ExecutorFacet`). Later on, users will be able to both read their
+`l2ToL1logs` on L1 and _prove_ that they sent it.
+
+From the L1 side, for each L2 block, a Merkle root with such logs in leaves is calculated. Thus, a user can provide
+Merkle proof for each `l2ToL1Logs`.
+
+_NOTE_: For each executed L1 -> L2 transaction, the system program necessarily sends an L2 -> L1 log. To verify the
+execution status user may use the `proveL1ToL2TransactionStatus`.
+
+_NOTE_: The `l2ToL1Log` structure consists of fixed-size fields! Because of this, it is inconvenient to send a lot of
+data from L2 and to prove that they were sent on L1 using only `l2ToL1log`. To send a variable-length message we use
+this trick:
+
+- One of the system contracts accepts an arbitrary length message and sends a fixed length message with parameters
+ `senderAddress == this`, `marker == true`, `key == msg.sender`, `value == keccak256(message)`.
+- The contract on L1 accepts all sent messages and if the message came from this system contract it requires that the
+ preimage of `value` be provided.
+
+#### ExecutorFacet
+
+A contract that accepts L2 blocks, enforces data availability and checks the validity of zk-proofs.
+
+The state transition is divided into three stages:
+
+- `commitBlocks` - check L2 block timestamp, process the L2 logs, save data for a block, and prepare data for zk-proof.
+- `proveBlocks` - validate zk-proof.
+- `executeBlocks` - finalize the state, marking L1 -> L2 communication processing, and saving Merkle tree with L2 logs.
+
+When a block is committed, we process L2 -> L1 logs. Here are the invariants that are expected there:
+
+- The only L2 -> L1 log from the `L2_SYSTEM_CONTEXT_ADDRESS`, with the `key == l2BlockTimestamp` and
+ `value == l2BlockHash`.
+- Several (or none) logs from the `L2_KNOWN_CODE_STORAGE_ADDRESS` with the `key == bytecodeHash`, where bytecode is
+ marked as a known factory dependency.
+- Several (or none) logs from the `L2_BOOTLOADER_ADDRESS` with the `key == canonicalTxHash` where `canonicalTxHash` is a
+ hash of processed L1 -> L2 transaction.
+- Several (of none) logs from the `L2_TO_L1_MESSENGER` with the `value == hashedMessage` where `hashedMessage` is a hash
+ of an arbitrary-length message that is sent from L2
+- None logs from other addresses (may be changed in future).
+
+#### Bridges
+
+Bridges are completely separate contracts from the Diamond. They are a wrapper for L1 <-> L2 communication on contracts
+on both L1 and L2. Upon locking assets on one layer, a request is sent to mint these bridged assets on the other layer.
+Upon burning assets on one layer, a request is sent to unlock them on the other.
+
+Unlike the native Ether bridging, all other assets can be bridged by the custom implementation relying on the trustless
+L1 <-> L2 communication.
+
+##### L1ERC20Bridge
+
+- `deposit` - lock funds inside the contract and send a request to mint bridged assets on L2.
+- `claimFailedDeposit` - unlock funds if the deposit was initiated but then failed on L2.
+- `finalizeWithdrawal` - unlock funds for the valid withdrawal request from L2.
+
+##### L2ERC20Bridge
+
+- `withdraw` - initiate a withdrawal by burning funds on the contract and sending a corresponding message to L1.
+- `finalizeDeposit` - finalize the deposit and mint funds on L2.
+
+#### Allowlist
+
+The auxiliary contract controls the permission access list. It is used in bridges and diamond proxies to control which
+addresses can interact with them in the Alpha release.
+
+### L2 specifics
+
+#### Deployment
+
+The L2 deployment process is different from Ethereum.
+
+In L1, the deployment always goes through two opcodes `create` and `create2`, each of which provides its address
+derivation. The parameter of these opcodes is the so-called "init bytecode" - a bytecode that returns the bytecode to be
+deployed. This works well in L1 but is suboptimal for L2.
+
+In the case of L2, there are also two ways to deploy contracts - `create` and `create2`. However, the expected input
+parameters for `create` and `create2` are different. It accepts the hash of the bytecode, rather than the full bytecode.
+Therefore, users pay less for contract creation and don't need to send the full contract code by the network upon
+deploys.
+
+A good question could be, _how does the validator know the preimage of the bytecode hashes to execute the code?_ Here
+comes the concept of factory dependencies! Factory dependencies are a list of bytecode hashes whose preimages were shown
+on L1 (data is always available). Such bytecode hashes can be deployed, others - no. Note that they can be added to the
+system by either L2 transaction or L1 -> L2 communication, where you can specify the full bytecode and the system will
+mark it as known and allow you to deploy it.
+
+Besides that, due to the bytecode differences for L1 and L2 contracts, address derivation is different. This applies to
+both `create` and `create2` and means that contracts deployed on the L1 cannot have a collision with contracts deployed
+on the L2. Please note that EOA address derivation is the same as on Ethereum.
+
+Thus:
+
+- L2 contracts are deployed by bytecode hash, not by full bytecode
+- Factory dependencies - list of bytecode hashes that can be deployed on L2
+- Address derivation for `create`/`create2` on L1 and L2 is different
+
+### Withdrawal/Deposit Limitation
+
+It is decided to have a limit on the amount of fund being deposited and withdrawn from the protocol.
+
+#### Withdrawal Limitation
+
+In case a malicious user could mint illegally some tokens on L2, there should be limitation to not allow the malicious
+user withdrwing all the funds on L1. The current plan is to put withdrawal limitation on protocol level. In other words,
+it is not allowed to withdraw more than some percent of the protocol balance for the defined tokens every day. Through
+governance transaction, it is possible to add tokens to the list of withdrawal limitation, and also define the percent
+that is allowed to withdraw daily.
+
+```solidity
+struct Withdrawal {
+ bool withdrawalLimitation;
+ uint256 withdrawalFactor;
+}
+
+```
+
+#### Deposit Limitation
+
+To be on the safe side, the amount of deposit is also going to be limited. This limitation is applied on account level,
+and is not time-based. In other words, each account can not deposit more than the cap defined. The tokens and the cap
+can be set through governance transaction. Moreover, there is a whitelisting mechanism as well (only some whitelisted
+accounts can call some specific functions). So, the combination of deposit limiation and whitelisting lead to limiting
+deposit of whitelisted account to be less than the defined cap.
+
+```solidity
+struct Deposit {
+ bool depositLimitation;
+ uint256 depositCap;
+}
+
+```
+
+See the [documentation](https://v2-docs.zksync.io/dev/developer-guides/contracts/contracts.html#solidity-vyper-support)
+to read more!
diff --git a/eraLogo.svg b/eraLogo.svg
new file mode 100644
index 000000000..5af0f3a0c
--- /dev/null
+++ b/eraLogo.svg
@@ -0,0 +1,37 @@
+
+
+
diff --git a/ethereum/.env b/ethereum/.env
new file mode 100644
index 000000000..a4612d155
--- /dev/null
+++ b/ethereum/.env
@@ -0,0 +1,3 @@
+CHAIN_ETH_NETWORK=localhost
+CONTRACTS_PRIORITY_TX_MAX_GAS_LIMIT=72000000
+ETH_CLIENT_WEB3_URL=http://127.0.0.1:8545
diff --git a/ethereum/.gitignore b/ethereum/.gitignore
new file mode 100644
index 000000000..bb5ecb310
--- /dev/null
+++ b/ethereum/.gitignore
@@ -0,0 +1,6 @@
+/build
+/artifacts
+/cache
+/typechain
+node_modules
+
diff --git a/ethereum/.solhint.json b/ethereum/.solhint.json
new file mode 100644
index 000000000..2561c314e
--- /dev/null
+++ b/ethereum/.solhint.json
@@ -0,0 +1,8 @@
+{
+ "extends": "solhint:default",
+ "plugins": ["prettier"],
+ "rules": {
+ "prettier/prettier": "error",
+ "no-inline-assembly": false
+ }
+}
diff --git a/ethereum/contracts/bridge/L1ERC20Bridge.sol b/ethereum/contracts/bridge/L1ERC20Bridge.sol
new file mode 100644
index 000000000..930a840e2
--- /dev/null
+++ b/ethereum/contracts/bridge/L1ERC20Bridge.sol
@@ -0,0 +1,381 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "@openzeppelin/contracts/token/ERC20/extensions/IERC20Metadata.sol";
+import "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol";
+
+import "./interfaces/IL1Bridge.sol";
+import "./interfaces/IL2Bridge.sol";
+
+import "../zksync/interfaces/IMailbox.sol";
+import "../common/interfaces/IAllowList.sol";
+import "../common/AllowListed.sol";
+import "../common/libraries/UnsafeBytes.sol";
+import "../common/ReentrancyGuard.sol";
+import "../common/L2ContractHelper.sol";
+import "../vendor/AddressAliasHelper.sol";
+
+/// @author Matter Labs
+/// @notice Smart contract that allows depositing ERC20 tokens from Ethereum to zkSync v2.0
+/// @dev It is standard implementation of ERC20 Bridge that can be used as a reference
+/// for any other custom token bridges.
+contract L1ERC20Bridge is IL1Bridge, AllowListed, ReentrancyGuard {
+ using SafeERC20 for IERC20;
+
+ /// @dev The smart contract that manages the list with permission to call contract functions
+ IAllowList immutable allowList;
+
+ /// @dev zkSync smart contract that used to operate with L2 via asynchronous L2 <-> L1 communication
+ IMailbox immutable zkSyncMailbox;
+
+ /// @dev The L2 gas limit for requesting L1 -> L2 transaction of deploying L2 bridge instance
+ /// NOTE: this constant will be accurately calculated in the future.
+ uint256 constant DEPLOY_L2_BRIDGE_COUNTERPART_GAS_LIMIT = $(PRIORITY_TX_MAX_GAS_LIMIT);
+
+ /// @dev The default l2GasPricePerPubdata to be used in bridges.
+ uint256 constant DEFAULT_L2_GAS_PRICE_PER_PUBDATA = $(DEFAULT_L2_GAS_PRICE_PER_PUBDATA);
+
+ /// @dev A mapping L2 block number => message number => flag
+ /// @dev Used to indicate that zkSync L2 -> L1 message was already processed
+ mapping(uint256 => mapping(uint256 => bool)) public isWithdrawalFinalized;
+
+ /// @dev A mapping account => L1 token address => L2 deposit transaction hash => amount
+ /// @dev Used for saving the number of deposited funds, to claim them in case the deposit transaction will fail
+ mapping(address => mapping(address => mapping(bytes32 => uint256))) depositAmount;
+
+ /// @dev The address of deployed L2 bridge counterpart
+ address public l2Bridge;
+
+ /// @dev The address of the factory that deploys proxy for L2 tokens
+ address public l2TokenFactory;
+
+ /// @dev The bytecode hash of the L2 token contract
+ bytes32 public l2ProxyTokenBytecodeHash;
+
+ /// @dev A mapping L1 token address => the most recent withdrawal time and amount reset
+ mapping(address => uint256) public lastWithdrawalLimitReset;
+
+ /// @dev A mapping L1 token address => the accumulated withdrawn amount during the withdrawal limit window
+ mapping(address => uint256) public withdrawnAmountInWindow;
+
+ /// @dev The accumulated deposited amount per user.
+ /// @dev A mapping L1 token address => user address => the total deposited amount by the user
+ mapping(address => mapping(address => uint256)) public totalDepositedAmountPerUser;
+
+ /// @dev Contract is expected to be used as proxy implementation.
+ /// @dev Initialize the implementation to prevent Parity hack.
+ constructor(IMailbox _mailbox, IAllowList _allowList) reentrancyGuardInitializer {
+ zkSyncMailbox = _mailbox;
+ allowList = _allowList;
+ }
+
+ /// @dev Initializes a contract bridge for later use. Expected to be used in the proxy
+ /// @dev During initialization deploys L2 bridge counterpart as well as provides some factory deps for it
+ /// @param _factoryDeps A list of raw bytecodes that are needed for deployment of the L2 bridge
+ /// @notice _factoryDeps[0] == a raw bytecode of L2 bridge implementation
+ /// @notice _factoryDeps[1] == a raw bytecode of proxy that is used as L2 bridge
+ /// @notice _factoryDeps[2] == a raw bytecode of token proxy
+ /// @param _l2TokenFactory Pre-calculated address of L2 token beacon proxy
+ /// @notice At the time of the function call, it is not yet deployed in L2, but knowledge of its address
+ /// @notice is necessary for determining L2 token address by L1 address, see `l2TokenAddress(address)` function
+ /// @param _governor Address which can change L2 token implementation and upgrade the bridge
+ function initialize(
+ bytes[] calldata _factoryDeps,
+ address _l2TokenFactory,
+ address _governor
+ ) external reentrancyGuardInitializer {
+ require(_l2TokenFactory != address(0), "nf");
+ require(_governor != address(0), "nh");
+ // We are expecting to see the exact three bytecodes that are needed to initialize the bridge
+ require(_factoryDeps.length == 3, "mk");
+ l2ProxyTokenBytecodeHash = L2ContractHelper.hashL2Bytecode(_factoryDeps[2]);
+ l2TokenFactory = _l2TokenFactory;
+
+ bytes32 l2BridgeImplementationBytecodeHash = L2ContractHelper.hashL2Bytecode(_factoryDeps[0]);
+ bytes32 l2BridgeProxyBytecodeHash = L2ContractHelper.hashL2Bytecode(_factoryDeps[1]);
+
+ // Deploy L2 bridge implementation contract
+ address bridgeImplementationAddr = _requestDeployTransaction(
+ l2BridgeImplementationBytecodeHash,
+ "", // Empty constructor data
+ _factoryDeps // All factory deps are needed for L2 bridge
+ );
+
+ // Prepare the proxy constructor data
+ bytes memory l2BridgeProxyConstructorData;
+ {
+ // Data to be used in delegate call to initialize the proxy
+ bytes memory proxyInitializationParams = abi.encodeCall(
+ IL2Bridge.initialize,
+ (address(this), l2ProxyTokenBytecodeHash, _governor)
+ );
+ l2BridgeProxyConstructorData = abi.encode(bridgeImplementationAddr, _governor, proxyInitializationParams);
+ }
+
+ // Deploy L2 bridge proxy contract
+ l2Bridge = _requestDeployTransaction(
+ l2BridgeProxyBytecodeHash,
+ l2BridgeProxyConstructorData,
+ new bytes[](0) // No factory deps are needed for L2 bridge proxy, because it is already passed in previous step
+ );
+ }
+
+ /// @notice Requests L2 transaction that will deploy a contract with a given bytecode hash and constructor data.
+ /// NOTE: it is always use deploy via create2 with ZERO salt
+ /// @param _bytecodeHash The hash of the bytecode of the contract to be deployed
+ /// @param _constructorData The data to be passed to the contract constructor
+ /// @param _factoryDeps A list of raw bytecodes that are needed for deployment
+ function _requestDeployTransaction(
+ bytes32 _bytecodeHash,
+ bytes memory _constructorData,
+ bytes[] memory _factoryDeps
+ ) internal returns (address deployedAddress) {
+ bytes memory deployCalldata = abi.encodeCall(
+ IContractDeployer.create2,
+ (bytes32(0), _bytecodeHash, _constructorData)
+ );
+ zkSyncMailbox.requestL2Transaction(
+ DEPLOYER_SYSTEM_CONTRACT_ADDRESS,
+ 0,
+ deployCalldata,
+ DEPLOY_L2_BRIDGE_COUNTERPART_GAS_LIMIT,
+ DEFAULT_L2_GAS_PRICE_PER_PUBDATA,
+ _factoryDeps,
+ msg.sender
+ );
+
+ deployedAddress = L2ContractHelper.computeCreate2Address(
+ // Apply the alias to the address of the bridge contract, to get the `msg.sender` in L2.
+ AddressAliasHelper.applyL1ToL2Alias(address(this)),
+ bytes32(0), // Zero salt
+ _bytecodeHash,
+ keccak256(_constructorData)
+ );
+ }
+
+ /// @notice Initiates a deposit by locking funds on the contract and sending the request
+ /// of processing an L2 transaction where tokens would be minted
+ /// @param _l2Receiver The account address that should receive funds on L2
+ /// @param _l1Token The L1 token address which is deposited
+ /// @param _amount The total amount of tokens to be bridged
+ /// @param _l2TxGasLimit The L2 gas limit to be used in the corresponding L2 transaction
+ /// @param _l2TxGasPerPubdataByte The gasPerPubdataByteLimit to be used in the corresponding L2 transaction
+ /// @return txHash The L2 transaction hash of deposit finalization
+ function deposit(
+ address _l2Receiver,
+ address _l1Token,
+ uint256 _amount,
+ uint256 _l2TxGasLimit,
+ uint256 _l2TxGasPerPubdataByte
+ ) external payable nonReentrant senderCanCallFunction(allowList) returns (bytes32 txHash) {
+ require(_amount != 0, "2T"); // empty deposit amount
+ uint256 amount = _depositFunds(msg.sender, IERC20(_l1Token), _amount);
+ require(amount == _amount, "1T"); // The token has non-standard transfer logic
+ // verify the deposit amount is allowed
+ _verifyDepositLimit(_l1Token, msg.sender, _amount, false);
+
+ bytes memory l2TxCalldata = _getDepositL2Calldata(msg.sender, _l2Receiver, _l1Token, amount);
+ txHash = zkSyncMailbox.requestL2Transaction{value: msg.value}(
+ l2Bridge,
+ 0, // L2 msg.value
+ l2TxCalldata,
+ _l2TxGasLimit,
+ _l2TxGasPerPubdataByte,
+ new bytes[](0),
+ msg.sender
+ );
+
+ // Save the deposited amount to claim funds on L1 if the deposit failed on L2
+ depositAmount[msg.sender][_l1Token][txHash] = amount;
+
+ emit DepositInitiated(msg.sender, _l2Receiver, _l1Token, amount);
+ }
+
+ /// @dev Transfers tokens from the depositor address to the smart contract address
+ /// @return The difference between the contract balance before and after the transferring funds
+ function _depositFunds(
+ address _from,
+ IERC20 _token,
+ uint256 _amount
+ ) internal returns (uint256) {
+ uint256 balanceBefore = _token.balanceOf(address(this));
+ _token.safeTransferFrom(_from, address(this), _amount);
+ uint256 balanceAfter = _token.balanceOf(address(this));
+
+ return balanceAfter - balanceBefore;
+ }
+
+ /// @dev Generate a calldata for calling the deposit finalization on the L2 bridge contract
+ function _getDepositL2Calldata(
+ address _l1Sender,
+ address _l2Receiver,
+ address _l1Token,
+ uint256 _amount
+ ) internal view returns (bytes memory txCalldata) {
+ bytes memory gettersData = _getERC20Getters(_l1Token);
+
+ txCalldata = abi.encodeCall(
+ IL2Bridge.finalizeDeposit,
+ (_l1Sender, _l2Receiver, _l1Token, _amount, gettersData)
+ );
+ }
+
+ /// @dev Receives and parses (name, symbol, decimals) from the token contract
+ function _getERC20Getters(address _token) internal view returns (bytes memory data) {
+ (, bytes memory data1) = _token.staticcall(abi.encodeCall(IERC20Metadata.name, ()));
+ (, bytes memory data2) = _token.staticcall(abi.encodeCall(IERC20Metadata.symbol, ()));
+ (, bytes memory data3) = _token.staticcall(abi.encodeCall(IERC20Metadata.decimals, ()));
+ data = abi.encode(data1, data2, data3);
+ }
+
+ /// @dev Withdraw funds from the initiated deposit, that failed when finalizing on L2
+ /// @param _depositSender The address of the deposit initiator
+ /// @param _l1Token The address of the deposited L1 ERC20 token
+ /// @param _l2TxHash The L2 transaction hash of the failed deposit finalization
+ /// @param _l2BlockNumber The L2 block number where the deposit finalization was processed
+ /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message
+ /// @param _l2TxNumberInBlock The L2 transaction number in a block, in which the log was sent
+ /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction with deposit finalization
+ function claimFailedDeposit(
+ address _depositSender,
+ address _l1Token,
+ bytes32 _l2TxHash,
+ uint256 _l2BlockNumber,
+ uint256 _l2MessageIndex,
+ uint16 _l2TxNumberInBlock,
+ bytes32[] calldata _merkleProof
+ ) external nonReentrant senderCanCallFunction(allowList) {
+ bool proofValid = zkSyncMailbox.proveL1ToL2TransactionStatus(
+ _l2TxHash,
+ _l2BlockNumber,
+ _l2MessageIndex,
+ _l2TxNumberInBlock,
+ _merkleProof,
+ TxStatus.Failure
+ );
+ require(proofValid, "yn");
+
+ uint256 amount = depositAmount[_depositSender][_l1Token][_l2TxHash];
+ require(amount > 0, "y1");
+
+ // Change the total deposited amount by the user
+ _verifyDepositLimit(_l1Token, _depositSender, amount, true);
+
+ delete depositAmount[_depositSender][_l1Token][_l2TxHash];
+ // Withdraw funds
+ IERC20(_l1Token).safeTransfer(_depositSender, amount);
+
+ emit ClaimedFailedDeposit(_depositSender, _l1Token, amount);
+ }
+
+ /// @notice Finalize the withdrawal and release funds
+ /// @param _l2BlockNumber The L2 block number where the withdrawal was processed
+ /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message
+ /// @param _l2TxNumberInBlock The L2 transaction number in a block, in which the log was sent
+ /// @param _message The L2 withdraw data, stored in an L2 -> L1 message
+ /// @param _merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization
+ function finalizeWithdrawal(
+ uint256 _l2BlockNumber,
+ uint256 _l2MessageIndex,
+ uint16 _l2TxNumberInBlock,
+ bytes calldata _message,
+ bytes32[] calldata _merkleProof
+ ) external nonReentrant senderCanCallFunction(allowList) {
+ require(!isWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex], "pw");
+
+ L2Message memory l2ToL1Message = L2Message({
+ txNumberInBlock: _l2TxNumberInBlock,
+ sender: l2Bridge,
+ data: _message
+ });
+
+ (address l1Receiver, address l1Token, uint256 amount) = _parseL2WithdrawalMessage(l2ToL1Message.data);
+ // Verifying that the withdrawal limit is reached to its cap or not
+ _verifyWithdrawalLimit(l1Token, amount);
+ // Preventing the stack too deep error
+ {
+ bool success = zkSyncMailbox.proveL2MessageInclusion(
+ _l2BlockNumber,
+ _l2MessageIndex,
+ l2ToL1Message,
+ _merkleProof
+ );
+ require(success, "nq");
+ }
+
+ isWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex] = true;
+ // Withdraw funds
+ IERC20(l1Token).safeTransfer(l1Receiver, amount);
+
+ emit WithdrawalFinalized(l1Receiver, l1Token, amount);
+ }
+
+ /// @dev Decode the withdraw message that came from L2
+ function _parseL2WithdrawalMessage(bytes memory _l2ToL1message)
+ internal
+ pure
+ returns (
+ address l1Receiver,
+ address l1Token,
+ uint256 amount
+ )
+ {
+ // Check that the message length is correct.
+ // It should be equal to the length of the function signature + address + address + uint256 = 4 + 20 + 20 + 32 = 76 (bytes).
+ require(_l2ToL1message.length == 76, "kk");
+
+ (uint32 functionSignature, uint256 offset) = UnsafeBytes.readUint32(_l2ToL1message, 0);
+ require(bytes4(functionSignature) == this.finalizeWithdrawal.selector, "nt");
+
+ (l1Receiver, offset) = UnsafeBytes.readAddress(_l2ToL1message, offset);
+ (l1Token, offset) = UnsafeBytes.readAddress(_l2ToL1message, offset);
+ (amount, offset) = UnsafeBytes.readUint256(_l2ToL1message, offset);
+ }
+
+ /// @dev Verify the withdrawal limit is reached to its cap or not
+ function _verifyWithdrawalLimit(address _l1Token, uint256 _amount) internal {
+ IAllowList.Withdrawal memory limitData = IAllowList(allowList).getTokenWithdrawalLimitData(_l1Token);
+ if (!limitData.withdrawalLimitation) return; // no withdrwawal limitation is placed for this token
+ if (block.timestamp > lastWithdrawalLimitReset[_l1Token] + 1 days) {
+ // The _amount should be <= %withdrawalFactor of balance
+ require(_amount <= (limitData.withdrawalFactor * IERC20(_l1Token).balanceOf(address(this))) / 100, "w1");
+ withdrawnAmountInWindow[_l1Token] = _amount; // reseting the withdrawn amount
+ lastWithdrawalLimitReset[_l1Token] = block.timestamp;
+ } else {
+ // The _amount + withdrawn amount should be <= %withdrawalFactor of balance
+ require(
+ _amount + withdrawnAmountInWindow[_l1Token] <=
+ (limitData.withdrawalFactor * IERC20(_l1Token).balanceOf(address(this))) / 100,
+ "w2"
+ );
+ withdrawnAmountInWindow[_l1Token] += _amount; // accumulate the withdrawn amount for this token
+ }
+ }
+
+ /// @dev Verify the deposit limit is reached to its cap or not
+ function _verifyDepositLimit(
+ address _l1Token,
+ address _depositor,
+ uint256 _amount,
+ bool _claiming
+ ) internal {
+ IAllowList.Deposit memory limitData = IAllowList(allowList).getTokenDepositLimitData(_l1Token);
+ if (!limitData.depositLimitation) return; // no deposit limitation is placed for this token
+
+ if (_claiming) {
+ totalDepositedAmountPerUser[_l1Token][_depositor] -= _amount;
+ } else {
+ require(totalDepositedAmountPerUser[_l1Token][_depositor] + _amount <= limitData.depositCap, "d1");
+ totalDepositedAmountPerUser[_l1Token][_depositor] += _amount;
+ }
+ }
+
+ /// @return The L2 token address that would be minted for deposit of the given L1 token
+ function l2TokenAddress(address _l1Token) public view returns (address) {
+ bytes32 constructorInputHash = keccak256(abi.encode(address(l2TokenFactory), ""));
+ bytes32 salt = bytes32(uint256(uint160(_l1Token)));
+
+ return L2ContractHelper.computeCreate2Address(l2Bridge, salt, l2ProxyTokenBytecodeHash, constructorInputHash);
+ }
+}
diff --git a/ethereum/contracts/bridge/interfaces/IL1Bridge.sol b/ethereum/contracts/bridge/interfaces/IL1Bridge.sol
new file mode 100644
index 000000000..56e2b8307
--- /dev/null
+++ b/ethereum/contracts/bridge/interfaces/IL1Bridge.sol
@@ -0,0 +1,44 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import {IMailbox, L2Log, L2Message} from "../../zksync/interfaces/IZkSync.sol";
+
+/// @author Matter Labs
+interface IL1Bridge {
+ event DepositInitiated(address indexed from, address indexed to, address indexed l1Token, uint256 amount);
+
+ event WithdrawalFinalized(address indexed to, address indexed l1Token, uint256 amount);
+
+ event ClaimedFailedDeposit(address indexed to, address indexed l1Token, uint256 amount);
+
+ function isWithdrawalFinalized(uint256 _l2BlockNumber, uint256 _l2MessageIndex) external view returns (bool);
+
+ function deposit(
+ address _l2Receiver,
+ address _l1Token,
+ uint256 _amount,
+ uint256 _l2TxGasLimit,
+ uint256 _l2TxGasPerPubdataByte
+ ) external payable returns (bytes32 txHash);
+
+ function claimFailedDeposit(
+ address _depositSender,
+ address _l1Token,
+ bytes32 _l2TxHash,
+ uint256 _l2BlockNumber,
+ uint256 _l2MessageIndex,
+ uint16 _l2TxNumberInBlock,
+ bytes32[] calldata _merkleProof
+ ) external;
+
+ function finalizeWithdrawal(
+ uint256 _l2BlockNumber,
+ uint256 _l2MessageIndex,
+ uint16 _l2TxNumberInBlock,
+ bytes calldata _message,
+ bytes32[] calldata _merkleProof
+ ) external;
+
+ function l2TokenAddress(address _l1Token) external view returns (address);
+}
diff --git a/ethereum/contracts/bridge/interfaces/IL2Bridge.sol b/ethereum/contracts/bridge/interfaces/IL2Bridge.sol
new file mode 100644
index 000000000..859f7015d
--- /dev/null
+++ b/ethereum/contracts/bridge/interfaces/IL2Bridge.sol
@@ -0,0 +1,32 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+/// @author Matter Labs
+interface IL2Bridge {
+ function initialize(
+ address _l1Bridge,
+ bytes32 _l2TokenProxyBytecodeHash,
+ address _governor
+ ) external;
+
+ function finalizeDeposit(
+ address _l1Sender,
+ address _l2Receiver,
+ address _l1Token,
+ uint256 _amount,
+ bytes calldata _data
+ ) external;
+
+ function withdraw(
+ address _l1Receiver,
+ address _l2Token,
+ uint256 _amount
+ ) external;
+
+ function l1TokenAddress(address _l2Token) external view returns (address);
+
+ function l2TokenAddress(address _l1Token) external view returns (address);
+
+ function l1Bridge() external view returns (address);
+}
diff --git a/ethereum/contracts/common/AllowList.sol b/ethereum/contracts/common/AllowList.sol
new file mode 100644
index 000000000..8bb5d1132
--- /dev/null
+++ b/ethereum/contracts/common/AllowList.sol
@@ -0,0 +1,172 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "@openzeppelin/contracts/access/Ownable2Step.sol";
+
+import "./interfaces/IAllowList.sol";
+import "./libraries/UncheckedMath.sol";
+
+/// @author Matter Labs
+/// @notice The smart contract that stores the permissions to call the function on different contracts.
+/// @dev The contract is fully controlled by the owner, that can grant and revoke any permissions at any time.
+/// @dev The permission list has three different modes:
+/// - Closed. The contract can NOT be called by anyone.
+/// - SpecialAccessOnly. Only some contract functions can be called by specifically granted addresses.
+/// - Public. Access list to call any function from the target contract by any caller
+contract AllowList is IAllowList, Ownable2Step {
+ using UncheckedMath for uint256;
+
+ /// @notice The Access mode by which it is decided whether the caller has access
+ mapping(address => AccessMode) public getAccessMode;
+
+ /// @notice The mapping that stores permissions to call the function on the target address by the caller
+ /// @dev caller => target => function signature => permission to call target function for the given caller address
+ mapping(address => mapping(address => mapping(bytes4 => bool))) public hasSpecialAccessToCall;
+
+ /// @dev The mapping L1 token address => struct Withdrawal
+ mapping(address => Withdrawal) public tokenWithdrawal;
+
+ /// @dev The mapping L1 token address => struct Deposit
+ mapping(address => Deposit) public tokenDeposit;
+
+ constructor(address _owner) {
+ _transferOwnership(_owner);
+ }
+
+ /// @return Whether the caller can call the specific function on the target contract
+ /// @param _caller The caller address, who is granted access
+ /// @param _target The address of the smart contract which is called
+ /// @param _functionSig The function signature (selector), access to which need to check
+ function canCall(
+ address _caller,
+ address _target,
+ bytes4 _functionSig
+ ) external view returns (bool) {
+ AccessMode accessMode = getAccessMode[_target];
+ return
+ accessMode == AccessMode.Public ||
+ (accessMode == AccessMode.SpecialAccessOnly && hasSpecialAccessToCall[_caller][_target][_functionSig]);
+ }
+
+ /// @notice Set the permission mode to call the target contract
+ /// @param _target The address of the smart contract, of which access to the call is to be changed
+ /// @param _accessMode Whether no one, any or only some addresses can call the target contract
+ function setAccessMode(address _target, AccessMode _accessMode) external onlyOwner {
+ _setAccessMode(_target, _accessMode);
+ }
+
+ /// @notice Set many permission modes to call the target contracts
+ /// @dev Analogous to function `setAccessMode` but performs a batch of changes
+ /// @param _targets The array of smart contract addresses, of which access to the call is to be changed
+ /// @param _accessModes The array of new permission modes, whether no one, any or only some addresses can call the target contract
+ function setBatchAccessMode(address[] calldata _targets, AccessMode[] calldata _accessModes) external onlyOwner {
+ uint256 targetsLength = _targets.length;
+ require(targetsLength == _accessModes.length, "yg"); // The size of arrays should be equal
+
+ for (uint256 i = 0; i < targetsLength; i = i.uncheckedInc()) {
+ _setAccessMode(_targets[i], _accessModes[i]);
+ }
+ }
+
+ /// @dev Changes access mode and emit the event if the access was changed
+ function _setAccessMode(address _target, AccessMode _accessMode) internal {
+ AccessMode accessMode = getAccessMode[_target];
+
+ if (accessMode != _accessMode) {
+ getAccessMode[_target] = _accessMode;
+ emit UpdateAccessMode(_target, accessMode, _accessMode);
+ }
+ }
+
+ /// @notice Set many permissions to call the function on the contract to the specified caller address
+ /// @param _callers The array of caller addresses, who are granted access
+ /// @param _targets The array of smart contract addresses, of which access to the call are to be changed
+ /// @param _functionSigs The array of function signatures (selectors), access to which need to be changed
+ /// @param _enables The array of boolean flags, whether enable or disable the function access to the corresponding target address
+ function setBatchPermissionToCall(
+ address[] calldata _callers,
+ address[] calldata _targets,
+ bytes4[] calldata _functionSigs,
+ bool[] calldata _enables
+ ) external onlyOwner {
+ uint256 callersLength = _callers.length;
+
+ // The size of arrays should be equal
+ require(callersLength == _targets.length, "yw");
+ require(callersLength == _functionSigs.length, "yx");
+ require(callersLength == _enables.length, "yy");
+
+ for (uint256 i = 0; i < callersLength; i = i.uncheckedInc()) {
+ _setPermissionToCall(_callers[i], _targets[i], _functionSigs[i], _enables[i]);
+ }
+ }
+
+ /// @notice Set the permission to call the function on the contract to the specified caller address
+ /// @param _caller The caller address, who is granted access
+ /// @param _target The address of the smart contract, of which access to the call is to be changed
+ /// @param _functionSig The function signature (selector), access to which need to be changed
+ /// @param _enable Whether enable or disable the permission
+ function setPermissionToCall(
+ address _caller,
+ address _target,
+ bytes4 _functionSig,
+ bool _enable
+ ) external onlyOwner {
+ _setPermissionToCall(_caller, _target, _functionSig, _enable);
+ }
+
+ /// @dev Changes permission to call and emits the event if the permission was changed
+ function _setPermissionToCall(
+ address _caller,
+ address _target,
+ bytes4 _functionSig,
+ bool _enable
+ ) internal {
+ bool currentPermission = hasSpecialAccessToCall[_caller][_target][_functionSig];
+
+ if (currentPermission != _enable) {
+ hasSpecialAccessToCall[_caller][_target][_functionSig] = _enable;
+ emit UpdateCallPermission(_caller, _target, _functionSig, _enable);
+ }
+ }
+
+ /// @dev Set withdrwal limit data for a token
+ /// @param _l1Token The address of L1 token
+ /// @param _withdrawalLimitation withdrawal limitation is active or not
+ /// @param _withdrawalFactor The percentage of allowed withdrawal. A withdrawalFactor of 10 means maximum %10 of bridge balance can be withdrawn
+ function setWithdrawalLimit(
+ address _l1Token,
+ bool _withdrawalLimitation,
+ uint256 _withdrawalFactor
+ ) external onlyOwner {
+ require(_withdrawalFactor <= 100, "wf");
+ tokenWithdrawal[_l1Token].withdrawalLimitation = _withdrawalLimitation;
+ tokenWithdrawal[_l1Token].withdrawalFactor = _withdrawalFactor;
+ }
+
+ /// @dev Get withdrawal limit data of a token
+ /// @param _l1Token The address of L1 token
+ function getTokenWithdrawalLimitData(address _l1Token) external view returns (Withdrawal memory) {
+ return tokenWithdrawal[_l1Token];
+ }
+
+ /// @dev Set deposit limit data for a token
+ /// @param _l1Token The address of L1 token
+ /// @param _depositLimitation deposit limitation is active or not
+ /// @param _depositCap The maximum amount that can be deposited.
+ function setDepositLimit(
+ address _l1Token,
+ bool _depositLimitation,
+ uint256 _depositCap
+ ) external onlyOwner {
+ tokenDeposit[_l1Token].depositLimitation = _depositLimitation;
+ tokenDeposit[_l1Token].depositCap = _depositCap;
+ }
+
+ /// @dev Get deposit limit data of a token
+ /// @param _l1Token The address of L1 token
+ function getTokenDepositLimitData(address _l1Token) external view returns (Deposit memory) {
+ return tokenDeposit[_l1Token];
+ }
+}
diff --git a/ethereum/contracts/common/AllowListed.sol b/ethereum/contracts/common/AllowListed.sol
new file mode 100644
index 000000000..315af6b2a
--- /dev/null
+++ b/ethereum/contracts/common/AllowListed.sol
@@ -0,0 +1,16 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./interfaces/IAllowList.sol";
+
+/// @author Matter Labs
+abstract contract AllowListed {
+ modifier senderCanCallFunction(IAllowList _allowList) {
+ // Preventing the stack too deep error
+ {
+ require(_allowList.canCall(msg.sender, address(this), msg.sig), "nr");
+ }
+ _;
+ }
+}
diff --git a/ethereum/contracts/common/Dependencies.sol b/ethereum/contracts/common/Dependencies.sol
new file mode 100644
index 000000000..57a299c9d
--- /dev/null
+++ b/ethereum/contracts/common/Dependencies.sol
@@ -0,0 +1,5 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol";
diff --git a/ethereum/contracts/common/L2ContractHelper.sol b/ethereum/contracts/common/L2ContractHelper.sol
new file mode 100644
index 000000000..d4c61892b
--- /dev/null
+++ b/ethereum/contracts/common/L2ContractHelper.sol
@@ -0,0 +1,87 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+interface IL2Messenger {
+ function sendToL1(bytes memory _message) external returns (bytes32);
+}
+
+interface IContractDeployer {
+ struct ForceDeployment {
+ bytes32 bytecodeHash;
+ address newAddress;
+ bool callConstructor;
+ uint256 value;
+ bytes input;
+ }
+
+ function forceDeployOnAddresses(ForceDeployment[] calldata _deployParams) external;
+
+ function create2(
+ bytes32 _salt,
+ bytes32 _bytecodeHash,
+ bytes calldata _input
+ ) external;
+}
+
+uint160 constant SYSTEM_CONTRACTS_OFFSET = 0x8000; // 2^15
+
+address constant BOOTLOADER_ADDRESS = address(SYSTEM_CONTRACTS_OFFSET + 0x01);
+
+address constant DEPLOYER_SYSTEM_CONTRACT_ADDRESS = address(SYSTEM_CONTRACTS_OFFSET + 0x06);
+
+// A contract that is allowed to deploy any codehash
+// on any address. To be used only during an upgrade.
+address constant FORCE_DEPLOYER = address(SYSTEM_CONTRACTS_OFFSET + 0x07);
+
+IL2Messenger constant L2_MESSENGER = IL2Messenger(address(SYSTEM_CONTRACTS_OFFSET + 0x08));
+
+library L2ContractHelper {
+ bytes32 constant CREATE2_PREFIX = keccak256("zksyncCreate2");
+
+ function sendMessageToL1(bytes memory _message) internal returns (bytes32) {
+ return L2_MESSENGER.sendToL1(_message);
+ }
+
+ function hashL2Bytecode(bytes memory _bytecode) internal pure returns (bytes32 hashedBytecode) {
+ // Note that the length of the bytecode
+ // must be provided in 32-byte words.
+ require(_bytecode.length % 32 == 0, "po");
+
+ uint256 bytecodeLenInWords = _bytecode.length / 32;
+ require(bytecodeLenInWords < 2**16, "pp"); // bytecode length must be less than 2^16 words
+ require(bytecodeLenInWords % 2 == 1, "pr"); // bytecode length in words must be odd
+ hashedBytecode = sha256(_bytecode) & 0x00000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF;
+ // Setting the version of the hash
+ hashedBytecode = (hashedBytecode | bytes32(uint256(1 << 248)));
+ // Setting the length
+ hashedBytecode = hashedBytecode | bytes32(bytecodeLenInWords << 224);
+ }
+
+ /// @notice Validates the bytecodehash
+ function validateBytecodeHash(bytes32 _bytecodeHash) internal pure {
+ uint8 version = uint8(_bytecodeHash[0]);
+ require(version == 1 && _bytecodeHash[1] == bytes1(0), "zf"); // Incorrectly formatted bytecodeHash
+
+ require(bytecodeLen(_bytecodeHash) % 2 == 1, "uy"); // Code length in words must be odd
+ }
+
+ /// @notice returns the length of the bytecode
+ function bytecodeLen(bytes32 _bytecodeHash) internal pure returns (uint256 codeLengthInWords) {
+ codeLengthInWords = uint256(uint8(_bytecodeHash[2])) * 256 + uint256(uint8(_bytecodeHash[3]));
+ }
+
+ function computeCreate2Address(
+ address _sender,
+ bytes32 _salt,
+ bytes32 _bytecodeHash,
+ bytes32 _constructorInputHash
+ ) internal pure returns (address) {
+ bytes32 senderBytes = bytes32(uint256(uint160(_sender)));
+ bytes32 data = keccak256(
+ bytes.concat(CREATE2_PREFIX, senderBytes, _salt, _bytecodeHash, _constructorInputHash)
+ );
+
+ return address(uint160(uint256(data)));
+ }
+}
diff --git a/ethereum/contracts/common/ReentrancyGuard.sol b/ethereum/contracts/common/ReentrancyGuard.sol
new file mode 100644
index 000000000..96449984a
--- /dev/null
+++ b/ethereum/contracts/common/ReentrancyGuard.sol
@@ -0,0 +1,87 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+/**
+ * @dev Contract module that helps prevent reentrant calls to a function.
+ *
+ * Inheriting from `ReentrancyGuard` will make the {nonReentrant} modifier
+ * available, which can be applied to functions to make sure there are no nested
+ * (reentrant) calls to them.
+ *
+ * Note that because there is a single `nonReentrant` guard, functions marked as
+ * `nonReentrant` may not call one another. This can be worked around by making
+ * those functions `private`, and then adding `external` `nonReentrant` entry
+ * points to them.
+ *
+ * TIP: If you would like to learn more about reentrancy and alternative ways
+ * to protect against it, check out our blog post
+ * https://blog.openzeppelin.com/reentrancy-after-istanbul/[Reentrancy After Istanbul].
+ *
+ * _Since v2.5.0:_ this module is now much more gas efficient, given net gas
+ * metering changes introduced in the Istanbul hardfork.
+ */
+abstract contract ReentrancyGuard {
+ /// @dev Address of lock flag variable.
+ /// @dev Flag is placed at random memory location to not interfere with Storage contract.
+ uint256 private constant LOCK_FLAG_ADDRESS = 0x8e94fed44239eb2314ab7a406345e6c5a8f0ccedf3b600de3d004e672c33abf4; // keccak256("ReentrancyGuard") - 1;
+
+ // https://github.com/OpenZeppelin/openzeppelin-contracts/blob/566a774222707e424896c0c390a84dc3c13bdcb2/contracts/security/ReentrancyGuard.sol
+ // The values being non-zero value makes deployment a bit more expensive,
+ // but in exchange the refund on every call to nonReentrant will be lower in
+ // amount. Since refunds are capped to a percentage of the total
+ // transaction's gas, it is best to keep them low in cases like this one, to
+ // increase the likelihood of the full refund coming into effect.
+ uint256 private constant _NOT_ENTERED = 1;
+ uint256 private constant _ENTERED = 2;
+
+ modifier reentrancyGuardInitializer() {
+ _initializeReentrancyGuard();
+ _;
+ }
+
+ function _initializeReentrancyGuard() private {
+ uint256 lockSlotOldValue;
+
+ // Storing an initial non-zero value makes deployment a bit more
+ // expensive but in exchange every call to nonReentrant
+ // will be cheaper.
+ assembly {
+ lockSlotOldValue := sload(LOCK_FLAG_ADDRESS)
+ sstore(LOCK_FLAG_ADDRESS, _NOT_ENTERED)
+ }
+
+ // Check that storage slot for reentrancy guard is empty to rule out possibility of slot conflict
+ require(lockSlotOldValue == 0, "1B");
+ }
+
+ /**
+ * @dev Prevents a contract from calling itself, directly or indirectly.
+ * Calling a `nonReentrant` function from another `nonReentrant`
+ * function is not supported. It is possible to prevent this from happening
+ * by making the `nonReentrant` function external, and make it call a
+ * `private` function that does the actual work.
+ */
+ modifier nonReentrant() {
+ uint256 _status;
+ assembly {
+ _status := sload(LOCK_FLAG_ADDRESS)
+ }
+
+ // On the first call to nonReentrant, _notEntered will be true
+ require(_status == _NOT_ENTERED, "r1");
+
+ // Any calls to nonReentrant after this point will fail
+ assembly {
+ sstore(LOCK_FLAG_ADDRESS, _ENTERED)
+ }
+
+ _;
+
+ // By storing the original value once again, a refund is triggered (see
+ // https://eips.ethereum.org/EIPS/eip-2200)
+ assembly {
+ sstore(LOCK_FLAG_ADDRESS, _NOT_ENTERED)
+ }
+ }
+}
diff --git a/ethereum/contracts/common/interfaces/IAllowList.sol b/ethereum/contracts/common/interfaces/IAllowList.sol
new file mode 100644
index 000000000..b56ba491f
--- /dev/null
+++ b/ethereum/contracts/common/interfaces/IAllowList.sol
@@ -0,0 +1,105 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+interface IAllowList {
+ /*//////////////////////////////////////////////////////////////
+ EVENTS
+ //////////////////////////////////////////////////////////////*/
+
+ /// @notice Access mode of target contract is changed
+ event UpdateAccessMode(address indexed target, AccessMode previousMode, AccessMode newMode);
+
+ /// @notice Permission to call is changed
+ event UpdateCallPermission(address indexed caller, address indexed target, bytes4 indexed functionSig, bool status);
+
+ /// @notice Type of access to a specific contract includes three different modes
+ /// @param Closed No one has access to the contract
+ /// @param SpecialAccessOnly Any address with granted special access can interact with a contract (see `hasSpecialAccessToCall`)
+ /// @param Public Everyone can interact with a contract
+ enum AccessMode {
+ Closed,
+ SpecialAccessOnly,
+ Public
+ }
+
+ /// @dev A struct that contains withdrawal limit data of a token
+ /// @param withdrawalLimitation Whether any withdrawal limitation is placed or not
+ /// @param withdrawalFactor Percentage of allowed withdrawal. A withdrawalFactor of 10 means maximum %10 of bridge balance can be withdrawn
+ struct Withdrawal {
+ bool withdrawalLimitation;
+ uint256 withdrawalFactor;
+ }
+
+ /// @dev A struct that contains deposit limit data of a token
+ /// @param depositLimitation Whether any deposit limitation is placed or not
+ /// @param depositCap The maximum amount that can be deposited.
+ struct Deposit {
+ bool depositLimitation;
+ uint256 depositCap;
+ }
+
+ /*//////////////////////////////////////////////////////////////
+ GETTERS
+ //////////////////////////////////////////////////////////////*/
+
+ function getAccessMode(address _target) external view returns (AccessMode);
+
+ function hasSpecialAccessToCall(
+ address _caller,
+ address _target,
+ bytes4 _functionSig
+ ) external view returns (bool);
+
+ function canCall(
+ address _caller,
+ address _target,
+ bytes4 _functionSig
+ ) external view returns (bool);
+
+ function getTokenWithdrawalLimitData(address _l1Token) external view returns (Withdrawal memory);
+
+ function getTokenDepositLimitData(address _l1Token) external view returns (Deposit memory);
+
+ /*//////////////////////////////////////////////////////////////
+ ALLOW LIST LOGIC
+ //////////////////////////////////////////////////////////////*/
+
+ function setBatchAccessMode(address[] calldata _targets, AccessMode[] calldata _accessMode) external;
+
+ function setAccessMode(address _target, AccessMode _accessMode) external;
+
+ function setBatchPermissionToCall(
+ address[] calldata _callers,
+ address[] calldata _targets,
+ bytes4[] calldata _functionSigs,
+ bool[] calldata _enables
+ ) external;
+
+ function setPermissionToCall(
+ address _caller,
+ address _target,
+ bytes4 _functionSig,
+ bool _enable
+ ) external;
+
+ /*//////////////////////////////////////////////////////////////
+ WITHDRAWAL LIMIT LOGIC
+ //////////////////////////////////////////////////////////////*/
+
+ function setWithdrawalLimit(
+ address _l1Token,
+ bool _withdrawalLimitation,
+ uint256 _withdrawalFactor
+ ) external;
+
+ /*//////////////////////////////////////////////////////////////
+ DEPOSIT LIMIT LOGIC
+ //////////////////////////////////////////////////////////////*/
+
+ function setDepositLimit(
+ address _l1Token,
+ bool _depositLimitation,
+ uint256 _depositCap
+ ) external;
+}
diff --git a/ethereum/contracts/common/libraries/UncheckedMath.sol b/ethereum/contracts/common/libraries/UncheckedMath.sol
new file mode 100644
index 000000000..118e88e2a
--- /dev/null
+++ b/ethereum/contracts/common/libraries/UncheckedMath.sol
@@ -0,0 +1,17 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+library UncheckedMath {
+ function uncheckedInc(uint256 _number) internal pure returns (uint256) {
+ unchecked {
+ return _number + 1;
+ }
+ }
+
+ function uncheckedAdd(uint256 _lhs, uint256 _rhs) internal pure returns (uint256) {
+ unchecked {
+ return _lhs + _rhs;
+ }
+ }
+}
diff --git a/ethereum/contracts/common/libraries/UnsafeBytes.sol b/ethereum/contracts/common/libraries/UnsafeBytes.sol
new file mode 100644
index 000000000..9d3c5d68f
--- /dev/null
+++ b/ethereum/contracts/common/libraries/UnsafeBytes.sol
@@ -0,0 +1,45 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+/**
+ * @author Matter Labs
+ * @dev The library provides a set of functions that help read data from an "abi.encodePacked" byte array.
+ * @dev Each of the functions accepts the `bytes memory` and the offset where data should be read and returns a value of a certain type.
+ *
+ * @dev WARNING!
+ * 1) Functions don't check the length of the bytes array, so it can go out of bounds.
+ * The user of the library must check for bytes length before using any functions from the library!
+ *
+ * 2) Read variables are not cleaned up - https://docs.soliditylang.org/en/v0.8.16/internals/variable_cleanup.html.
+ * Using data in inline assembly can lead to unexpected behavior!
+ */
+library UnsafeBytes {
+ function readUint32(bytes memory _bytes, uint256 _start) internal pure returns (uint32 result, uint256 offset) {
+ assembly {
+ offset := add(_start, 4)
+ result := mload(add(_bytes, offset))
+ }
+ }
+
+ function readAddress(bytes memory _bytes, uint256 _start) internal pure returns (address result, uint256 offset) {
+ assembly {
+ offset := add(_start, 20)
+ result := mload(add(_bytes, offset))
+ }
+ }
+
+ function readUint256(bytes memory _bytes, uint256 _start) internal pure returns (uint256 result, uint256 offset) {
+ assembly {
+ offset := add(_start, 32)
+ result := mload(add(_bytes, offset))
+ }
+ }
+
+ function readBytes32(bytes memory _bytes, uint256 _start) internal pure returns (bytes32 result, uint256 offset) {
+ assembly {
+ offset := add(_start, 32)
+ result := mload(add(_bytes, offset))
+ }
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/Multicall.sol b/ethereum/contracts/dev-contracts/Multicall.sol
new file mode 100644
index 000000000..ee78dbf89
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/Multicall.sol
@@ -0,0 +1,68 @@
+// SPDX-License-Identifier: MIT
+
+/*
+
+MIT License
+
+Copyright (c) 2018 Maker Foundation
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+*/
+
+pragma solidity ^0.8.0;
+
+/// @title Multicall - Aggregate results from multiple read-only function calls
+contract Multicall {
+ struct Call {
+ address target;
+ bytes callData;
+ }
+
+ function aggregate(Call[] memory calls) public returns (uint256 blockNumber, bytes[] memory returnData) {
+ blockNumber = block.number;
+ returnData = new bytes[](calls.length);
+ for (uint256 i = 0; i < calls.length; ++i) {
+ (bool success, bytes memory ret) = calls[i].target.call(calls[i].callData);
+ require(success);
+ returnData[i] = ret;
+ }
+ }
+
+ // Helper functions
+ function getEthBalance(address addr) public view returns (uint256 balance) {
+ balance = addr.balance;
+ }
+
+ function getBlockHash(uint256 blockNumber) public view returns (bytes32 blockHash) {
+ blockHash = blockhash(blockNumber);
+ }
+
+ function getLastBlockHash() public view returns (bytes32 blockHash) {
+ blockHash = blockhash(block.number - 1);
+ }
+
+ function getCurrentBlockTimestamp() public view returns (uint256 timestamp) {
+ timestamp = block.timestamp;
+ }
+
+ function getCurrentBlockDifficulty() public view returns (uint256 difficulty) {
+ difficulty = block.difficulty;
+ }
+
+ function getCurrentBlockGasLimit() public view returns (uint256 gaslimit) {
+ gaslimit = block.gaslimit;
+ }
+
+ function getCurrentBlockCoinbase() public view returns (address coinbase) {
+ coinbase = block.coinbase;
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/ReturnSomething.sol b/ethereum/contracts/dev-contracts/ReturnSomething.sol
new file mode 100644
index 000000000..e25d5878c
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/ReturnSomething.sol
@@ -0,0 +1,11 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+contract ReturnSomething {
+ fallback() external payable {
+ assembly {
+ return(0, 0x20)
+ }
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/RevertFallback.sol b/ethereum/contracts/dev-contracts/RevertFallback.sol
new file mode 100644
index 000000000..39eda8f3d
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/RevertFallback.sol
@@ -0,0 +1,9 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+contract RevertFallback {
+ fallback() external payable {
+ revert();
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/RevertReceiveAccount.sol b/ethereum/contracts/dev-contracts/RevertReceiveAccount.sol
new file mode 100644
index 000000000..2a3a8c394
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/RevertReceiveAccount.sol
@@ -0,0 +1,23 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+/// @title RevertReceiveAccount - An account which reverts receiving funds depending on the flag
+/// @dev Used for testing failed withdrawals from the zkSync smart contract
+contract RevertReceiveAccount {
+ bool public revertReceive;
+
+ constructor() {
+ revertReceive = false;
+ }
+
+ function setRevertReceive(bool newValue) public {
+ revertReceive = newValue;
+ }
+
+ receive() external payable {
+ // Assert is used here to also simulate the out-of-gas error, since failed asserion
+ // consumes up all the remaining gas
+ assert(!revertReceive);
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/RevertTransferERC20.sol b/ethereum/contracts/dev-contracts/RevertTransferERC20.sol
new file mode 100644
index 000000000..28e7abed3
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/RevertTransferERC20.sol
@@ -0,0 +1,32 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./TestnetERC20Token.sol";
+
+/// @title RevertTransferERC20Token - A ERC20 token contract which can revert transfers depending on a flag
+/// @dev Used for testing failed ERC-20 withdrawals from the zkSync smart contract
+contract RevertTransferERC20 is TestnetERC20Token {
+ bool public revertTransfer;
+
+ constructor(
+ string memory name,
+ string memory symbol,
+ uint8 decimals
+ ) TestnetERC20Token(name, symbol, decimals) {
+ revertTransfer = false;
+ }
+
+ function setRevertTransfer(bool newValue) public {
+ revertTransfer = newValue;
+ }
+
+ function transfer(address recipient, uint256 amount) public virtual override returns (bool) {
+ // Assert is used here to also simulate the out-of-gas error, since failed asserion
+ // consumes up all the remaining gas
+ assert(!revertTransfer);
+
+ _transfer(_msgSender(), recipient, amount);
+ return true;
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/SingletonFactory.sol b/ethereum/contracts/dev-contracts/SingletonFactory.sol
new file mode 100644
index 000000000..85a279946
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/SingletonFactory.sol
@@ -0,0 +1,20 @@
+pragma solidity ^0.8.0;
+
+/**
+ * @title Singleton Factory (EIP-2470)
+ * @notice Exposes CREATE2 (EIP-1014) to deploy bytecode on deterministic addresses based on initialization code and salt.
+ * @author Ricardo Guilherme Schmidt (Status Research & Development GmbH)
+ */
+contract SingletonFactory {
+ /**
+ * @notice Deploys `_initCode` using `_salt` for defining the deterministic address.
+ * @param _initCode Initialization code.
+ * @param _salt Arbitrary value to modify resulting address.
+ * @return createdContract Created contract address.
+ */
+ function deploy(bytes memory _initCode, bytes32 _salt) public returns (address payable createdContract) {
+ assembly {
+ createdContract := create2(0, add(_initCode, 0x20), mload(_initCode), _salt)
+ }
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/TestnetERC20Token.sol b/ethereum/contracts/dev-contracts/TestnetERC20Token.sol
new file mode 100644
index 000000000..44d30b18d
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/TestnetERC20Token.sol
@@ -0,0 +1,26 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "@openzeppelin/contracts/token/ERC20/ERC20.sol";
+
+contract TestnetERC20Token is ERC20 {
+ uint8 private _decimals;
+
+ constructor(
+ string memory name_,
+ string memory symbol_,
+ uint8 decimals_
+ ) ERC20(name_, symbol_) {
+ _decimals = decimals_;
+ }
+
+ function mint(address _to, uint256 _amount) public returns (bool) {
+ _mint(_to, _amount);
+ return true;
+ }
+
+ function decimals() public view override returns (uint8) {
+ return _decimals;
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/test/DiamondCutTest.sol b/ethereum/contracts/dev-contracts/test/DiamondCutTest.sol
new file mode 100644
index 000000000..85d6b86f9
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/test/DiamondCutTest.sol
@@ -0,0 +1,12 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../../zksync/libraries/Diamond.sol";
+import "../../zksync/facets/Getters.sol";
+
+contract DiamondCutTest is GettersFacet {
+ function diamondCut(Diamond.DiamondCutData memory _diamondCut) external {
+ Diamond.diamondCut(_diamondCut);
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/test/DiamondProxyTest.sol b/ethereum/contracts/dev-contracts/test/DiamondProxyTest.sol
new file mode 100644
index 000000000..13e3904f1
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/test/DiamondProxyTest.sol
@@ -0,0 +1,14 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../../zksync/libraries/Diamond.sol";
+import "../../zksync/facets/Base.sol";
+
+contract DiamondProxyTest is Base {
+ function setFreezability(bool _freeze) external returns (bytes32) {
+ Diamond.DiamondStorage storage diamondStorage = Diamond.getDiamondStorage();
+ diamondStorage.isFrozen = _freeze;
+ return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE;
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/test/DummyERC20BytesTransferReturnValue.sol b/ethereum/contracts/dev-contracts/test/DummyERC20BytesTransferReturnValue.sol
new file mode 100644
index 000000000..0db0dce54
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/test/DummyERC20BytesTransferReturnValue.sol
@@ -0,0 +1,19 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+contract DummyERC20BytesTransferReturnValue {
+ bytes returnValue;
+
+ constructor(bytes memory _returnValue) {
+ returnValue = _returnValue;
+ }
+
+ function transfer(address _recipient, uint256 _amount) external view returns (bytes memory) {
+ // Hack to prevent Solidity warnings
+ _recipient;
+ _amount;
+
+ return returnValue;
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/test/DummyERC20NoTransferReturnValue.sol b/ethereum/contracts/dev-contracts/test/DummyERC20NoTransferReturnValue.sol
new file mode 100644
index 000000000..a4c925e5f
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/test/DummyERC20NoTransferReturnValue.sol
@@ -0,0 +1,7 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+contract DummyERC20NoTransferReturnValue {
+ function transfer(address recipient, uint256 amount) external {}
+}
diff --git a/ethereum/contracts/dev-contracts/test/GovernanceFacetTest.sol b/ethereum/contracts/dev-contracts/test/GovernanceFacetTest.sol
new file mode 100644
index 000000000..31f140171
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/test/GovernanceFacetTest.sol
@@ -0,0 +1,23 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../../zksync/facets/Governance.sol";
+
+contract GovernanceFacetTest is GovernanceFacet {
+ constructor() {
+ s.governor = msg.sender;
+ }
+
+ function isValidator(address _validator) external view returns (bool) {
+ return s.validators[_validator];
+ }
+
+ function getPendingGovernor() external view returns (address) {
+ return s.pendingGovernor;
+ }
+
+ function getGovernor() external view returns (address) {
+ return s.governor;
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/test/L1ERC20BridgeTest.sol b/ethereum/contracts/dev-contracts/test/L1ERC20BridgeTest.sol
new file mode 100644
index 000000000..451f3c4dd
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/test/L1ERC20BridgeTest.sol
@@ -0,0 +1,18 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../../bridge/L1ERC20Bridge.sol";
+
+/// @author Matter Labs
+contract L1ERC20BridgeTest is L1ERC20Bridge {
+ constructor(IMailbox _mailbox, IAllowList _allowList) L1ERC20Bridge(_mailbox, _allowList) {}
+
+ function getAllowList() public view returns (IAllowList) {
+ return allowList;
+ }
+
+ function getZkSyncMailbox() public view returns (IMailbox) {
+ return zkSyncMailbox;
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/test/MerkleTest.sol b/ethereum/contracts/dev-contracts/test/MerkleTest.sol
new file mode 100644
index 000000000..ce5efc17d
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/test/MerkleTest.sol
@@ -0,0 +1,15 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../../zksync/libraries/Merkle.sol";
+
+contract MerkleTest {
+ function calculateRoot(
+ bytes32[] calldata _path,
+ uint256 _index,
+ bytes32 _itemHash
+ ) external pure returns (bytes32) {
+ return Merkle.calculateRoot(_path, _index, _itemHash);
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/test/PriorityQueueTest.sol b/ethereum/contracts/dev-contracts/test/PriorityQueueTest.sol
new file mode 100644
index 000000000..c65cb7058
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/test/PriorityQueueTest.sol
@@ -0,0 +1,39 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../../zksync/libraries/PriorityQueue.sol";
+
+contract PriorityQueueTest {
+ using PriorityQueue for PriorityQueue.Queue;
+
+ PriorityQueue.Queue priorityQueue;
+
+ function getFirstUnprocessedPriorityTx() external view returns (uint256) {
+ return priorityQueue.getFirstUnprocessedPriorityTx();
+ }
+
+ function getTotalPriorityTxs() external view returns (uint256) {
+ return priorityQueue.getTotalPriorityTxs();
+ }
+
+ function getSize() external view returns (uint256) {
+ return priorityQueue.getSize();
+ }
+
+ function isEmpty() external view returns (bool) {
+ return priorityQueue.isEmpty();
+ }
+
+ function pushBack(PriorityOperation memory _operation) external {
+ return priorityQueue.pushBack(_operation);
+ }
+
+ function front() external view returns (PriorityOperation memory) {
+ return priorityQueue.front();
+ }
+
+ function popFront() external returns (PriorityOperation memory operation) {
+ return priorityQueue.popFront();
+ }
+}
diff --git a/ethereum/contracts/dev-contracts/test/UnsafeBytesTest.sol b/ethereum/contracts/dev-contracts/test/UnsafeBytesTest.sol
new file mode 100644
index 000000000..a46b377f9
--- /dev/null
+++ b/ethereum/contracts/dev-contracts/test/UnsafeBytesTest.sol
@@ -0,0 +1,37 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../../common/libraries/UnsafeBytes.sol";
+
+contract UnsafeBytesTest {
+ using UnsafeBytes for bytes;
+
+ function readUint32(bytes memory _bytes, uint256 _start) external pure returns (uint32 readValue, uint256 offset) {
+ return _bytes.readUint32(_start);
+ }
+
+ function readAddress(bytes memory _bytes, uint256 _start)
+ external
+ pure
+ returns (address readValue, uint256 offset)
+ {
+ return _bytes.readAddress(_start);
+ }
+
+ function readUint256(bytes memory _bytes, uint256 _start)
+ external
+ pure
+ returns (uint256 readValue, uint256 offset)
+ {
+ return _bytes.readUint256(_start);
+ }
+
+ function readBytes32(bytes memory _bytes, uint256 _start)
+ external
+ pure
+ returns (bytes32 readValue, uint256 offset)
+ {
+ return _bytes.readBytes32(_start);
+ }
+}
diff --git a/ethereum/contracts/vendor/AddressAliasHelper.sol b/ethereum/contracts/vendor/AddressAliasHelper.sol
new file mode 100644
index 000000000..7bcdbb961
--- /dev/null
+++ b/ethereum/contracts/vendor/AddressAliasHelper.sol
@@ -0,0 +1,43 @@
+// SPDX-License-Identifier: Apache-2.0
+
+/*
+ * Copyright 2019-2021, Offchain Labs, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+pragma solidity ^0.8.0;
+
+library AddressAliasHelper {
+ uint160 constant offset = uint160(0x1111000000000000000000000000000000001111);
+
+ /// @notice Utility function that converts the address in the L1 that submitted a tx to
+ /// the inbox to the msg.sender viewed in the L2
+ /// @param l1Address the address in the L1 that triggered the tx to L2
+ /// @return l2Address L2 address as viewed in msg.sender
+ function applyL1ToL2Alias(address l1Address) internal pure returns (address l2Address) {
+ unchecked {
+ l2Address = address(uint160(l1Address) + offset);
+ }
+ }
+
+ /// @notice Utility function that converts the msg.sender viewed in the L2 to the
+ /// address in the L1 that submitted a tx to the inbox
+ /// @param l2Address L2 address as viewed in msg.sender
+ /// @return l1Address the address in the L1 that triggered the tx to L2
+ function undoL1ToL2Alias(address l2Address) internal pure returns (address l1Address) {
+ unchecked {
+ l1Address = address(uint160(l2Address) - offset);
+ }
+ }
+}
diff --git a/ethereum/contracts/zksync/Config.sol b/ethereum/contracts/zksync/Config.sol
new file mode 100644
index 000000000..d0e841977
--- /dev/null
+++ b/ethereum/contracts/zksync/Config.sol
@@ -0,0 +1,135 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+/// @dev `keccak256("")`
+bytes32 constant EMPTY_STRING_KECCAK = 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470;
+
+/// @dev Bytes in raw L2 log
+/// @dev Equal to the bytes size of the tuple - (uint8 ShardId, bool isService, uint16 txNumberInBlock, address sender, bytes32 key, bytes32 value)
+uint256 constant L2_TO_L1_LOG_SERIALIZE_SIZE = 88;
+
+/// @dev The maximum length of the bytes array with L2 -> L1 logs
+uint256 constant MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES = 4 + L2_TO_L1_LOG_SERIALIZE_SIZE * 512;
+
+/// @dev L2 -> L1 logs Merkle tree height
+uint256 constant L2_TO_L1_LOG_MERKLE_TREE_HEIGHT = 9;
+
+/// @dev The value of default leaf hash for L2 -> L1 logs Merkle tree
+/// @dev An incomplete fixed-size tree is filled with this value to be a full binary tree
+/// @dev Actually equal to the `keccak256(new bytes(L2_TO_L1_LOG_SERIALIZE_SIZE))`
+bytes32 constant L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH = 0x72abee45b59e344af8a6e520241c4744aff26ed411f4c4b00f8af09adada43ba;
+
+/// @dev Number of bytes in a one initial storage change
+/// @dev Equal to the bytes size of the tuple - (bytes32 key, bytes32 value)
+uint256 constant INITIAL_STORAGE_CHANGE_SERIALIZE_SIZE = 64;
+
+/// @dev The maximum length of the bytes array with initial storage changes
+uint256 constant MAX_INITIAL_STORAGE_CHANGES_COMMITMENT_BYTES = 4 + INITIAL_STORAGE_CHANGE_SERIALIZE_SIZE * 4765;
+
+/// @dev Number of bytes in a one repeated storage change
+/// @dev Equal to the bytes size of the tuple - (bytes8 key, bytes32 value)
+uint256 constant REPEATED_STORAGE_CHANGE_SERIALIZE_SIZE = 40;
+
+/// @dev The maximum length of the bytes array with repeated storage changes
+uint256 constant MAX_REPEATED_STORAGE_CHANGES_COMMITMENT_BYTES = 4 + REPEATED_STORAGE_CHANGE_SERIALIZE_SIZE * 7564;
+
+// TODO: change constant to the real root hash of empty Merkle tree (SMA-184)
+bytes32 constant DEFAULT_L2_LOGS_TREE_ROOT_HASH = bytes32(0);
+
+/// @dev The address of the special smart contract that can send arbitrary length message as an L2 log
+address constant L2_TO_L1_MESSENGER = address(0x8008);
+
+/// @dev The address of the bootloader start program
+address constant L2_BOOTLOADER_ADDRESS = address(0x8001);
+
+/// @dev The address of the eth token system contract
+address constant L2_ETH_TOKEN_ADDRESS = address(0x800a);
+
+/// @dev The address of the known code storage system contract
+address constant L2_KNOWN_CODE_STORAGE_ADDRESS = address(0x8004);
+
+/// @dev The address of the context system contract
+address constant L2_SYSTEM_CONTEXT_ADDRESS = address(0x800b);
+
+/// @dev Denotes the first byte of the zkSync transaction that came from L1.
+uint256 constant PRIORITY_OPERATION_L2_TX_TYPE = 255;
+
+/// @dev The amount of time in seconds the validator has to process the priority transaction
+/// NOTE: The constant is set to zero for the Alpha release period
+uint256 constant PRIORITY_EXPIRATION = 0 days;
+
+/// @dev Notice period before activation preparation status of upgrade mode (in seconds)
+/// @dev NOTE: we must reserve for users enough time to send full exit operation, wait maximum time for processing this operation and withdraw funds from it.
+uint256 constant UPGRADE_NOTICE_PERIOD = $$(defined(UPGRADE_NOTICE_PERIOD) ? UPGRADE_NOTICE_PERIOD : "14 days");
+
+/// @dev Timestamp - seconds since unix epoch
+uint256 constant COMMIT_TIMESTAMP_NOT_OLDER = $$(
+ defined(COMMIT_TIMESTAMP_NOT_OLDER) ? COMMIT_TIMESTAMP_NOT_OLDER : "365 days"
+);
+
+/// @dev Maximum available error between real commit block timestamp and analog used in the verifier (in seconds)
+/// @dev Must be used cause miner's `block.timestamp` value can differ on some small value (as we know - 15 seconds)
+uint256 constant COMMIT_TIMESTAMP_APPROXIMATION_DELTA = $$(
+ defined(COMMIT_TIMESTAMP_APPROXIMATION_DELTA) ? COMMIT_TIMESTAMP_APPROXIMATION_DELTA : "365 days"
+);
+
+/// @dev Bit mask to apply for verifier public input before verifying.
+uint256 constant INPUT_MASK = $$(~uint256(0) >> 8);
+
+/// @dev The maximum number of L2 gas that a user can request for an L2 transaction
+uint256 constant L2_TX_MAX_GAS_LIMIT = $(L2_TX_MAX_GAS_LIMIT);
+
+/// @dev The maximum number of the pubdata an L2 operation should be allowed to use.
+uint256 constant MAX_PUBDATA_PER_BLOCK = $(MAX_PUBDATA_PER_BLOCK);
+
+/// @dev The maximum number of the pubdata an priority operation should be allowed to use.
+/// For now, it is somewhat lower than the maximum number of pubdata allowed for an L2 transaction,
+/// to ensure that the transaction is definitely processable on L2 despite any potential overhead.
+uint256 constant PRIORITY_TX_MAX_PUBDATA = $(PRIORITY_TX_MAX_PUBDATA);
+
+/// @dev The default price per L2 gas to be used for L1->L2 transactions
+uint256 constant FAIR_L2_GAS_PRICE = $(FAIR_L2_GAS_PRICE);
+
+/// @dev Even though the price for 1 byte of pubdata is 16 L1 gas, we have a slightly increased
+/// value.
+uint256 constant L1_GAS_PER_PUBDATA_BYTE = $(L1_GAS_PER_PUBDATA_BYTE);
+
+/// @dev The computational overhead of processing an L2 block.
+uint256 constant BLOCK_OVERHEAD_L2_GAS = $(BLOCK_OVERHEAD_L2_GAS);
+
+/// @dev The overhead in L1 gas of interacting with the L1
+uint256 constant BLOCK_OVERHEAD_L1_GAS = $(BLOCK_OVERHEAD_L1_GAS);
+
+/// @dev The equivalent in L1 pubdata of L1 gas used for working with L1
+uint256 constant BLOCK_OVERHEAD_PUBDATA = BLOCK_OVERHEAD_L1_GAS / L1_GAS_PER_PUBDATA_BYTE;
+
+/// @dev The maximum number of transactions in L2 block:
+uint256 constant MAX_TRANSACTIONS_IN_BLOCK = $(MAX_TRANSACTIONS_IN_BLOCK);
+
+/// @dev The size of the bootloader memory dedicated to the encodings of transactions
+uint256 constant BOOTLOADER_TX_ENCODING_SPACE = $(BOOTLOADER_TX_ENCODING_SPACE);
+
+/// @dev The intrinsic cost of the L1->l2 transaction in computational L2 gas
+uint256 constant L1_TX_INTRINSIC_L2_GAS = $(L1_TX_INTRINSIC_L2_GAS);
+
+/// @dev The intrinsic cost of the L1->l2 transaction in pubdata
+uint256 constant L1_TX_INTRINSIC_PUBDATA = $(L1_TX_INTRINSIC_PUBDATA);
+
+/// @dev The minimal base price for L1 transaction
+uint256 constant L1_TX_MIN_L2_GAS_BASE = $(L1_TX_MIN_L2_GAS_BASE);
+
+/// @dev The number of L2 gas the transaction starts costing more with each 544 bytes of encoding
+uint256 constant L1_TX_DELTA_544_ENCODING_BYTES = $(L1_TX_DELTA_544_ENCODING_BYTES);
+
+/// @dev The number of L2 gas an L1->L2 transaction gains with each new factory dependency
+uint256 constant L1_TX_DELTA_FACTORY_DEPS_L2_GAS = $(L1_TX_DELTA_FACTORY_DEPS_L2_GAS);
+
+/// @dev The number of L2 gas an L1->L2 transaction gains with each new factory dependency
+uint256 constant L1_TX_DELTA_FACTORY_DEPS_PUBDATA = $(L1_TX_DELTA_FACTORY_DEPS_PUBDATA);
+
+/// @dev The number of pubdata an L1->L2 transaction requires with each new factory dependency
+uint256 constant MAX_NEW_FACTORY_DEPS = $(MAX_NEW_FACTORY_DEPS);
+
+/// @dev The default L2 gasPricePerPubdata to be used in bridges.
+uint256 constant DEFAULT_L2_GAS_PRICE_PER_PUBDATA = $(DEFAULT_L2_GAS_PRICE_PER_PUBDATA);
diff --git a/ethereum/contracts/zksync/DiamondInit.sol b/ethereum/contracts/zksync/DiamondInit.sol
new file mode 100644
index 000000000..ca780462e
--- /dev/null
+++ b/ethereum/contracts/zksync/DiamondInit.sol
@@ -0,0 +1,75 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../common/interfaces/IAllowList.sol";
+import "./interfaces/IExecutor.sol";
+import "./libraries/Diamond.sol";
+import "./facets/Base.sol";
+import "./Config.sol";
+
+/// @author Matter Labs
+/// @dev The contract is used only once to initialize the diamond proxy.
+/// @dev The deployment process takes care of this contract's initialization.
+contract DiamondInit is Base {
+ /// @dev Initialize the implementation to prevent any possibility of a Parity hack.
+ constructor() reentrancyGuardInitializer {}
+
+ /// @notice zkSync contract initialization
+ /// @param _verifier address of Verifier contract
+ /// @param _governor address who can manage the contract
+ /// @param _validator address who can make blocks
+ /// @param _genesisBlockHash Block hash of the genesis (initial) block
+ /// @param _genesisIndexRepeatedStorageChanges The serial number of the shortcut storage key for genesis block
+ /// @param _genesisBlockCommitment The zk-proof commitment for the genesis block
+ /// @param _allowList The address of the allow list smart contract
+ /// @param _verifierParams Verifier config parameters that describes the circuit to be verified
+ /// @param _zkPorterIsAvailable The availability of zk porter shard
+ /// @param _l2BootloaderBytecodeHash The hash of bootloader L2 bytecode
+ /// @param _l2DefaultAccountBytecodeHash The hash of default account L2 bytecode
+ /// @param _priorityTxMaxGasLimit maximum number of the L2 gas that a user can request for L1 -> L2 transactions
+ /// @return Magic 32 bytes, which indicates that the contract logic is expected to be used as a diamond proxy initializer
+ function initialize(
+ Verifier _verifier,
+ address _governor,
+ address _validator,
+ bytes32 _genesisBlockHash,
+ uint64 _genesisIndexRepeatedStorageChanges,
+ bytes32 _genesisBlockCommitment,
+ IAllowList _allowList,
+ VerifierParams calldata _verifierParams,
+ bool _zkPorterIsAvailable,
+ bytes32 _l2BootloaderBytecodeHash,
+ bytes32 _l2DefaultAccountBytecodeHash,
+ uint256 _priorityTxMaxGasLimit
+ ) external reentrancyGuardInitializer returns (bytes32) {
+ require(address(_verifier) != address(0), "vt");
+ require(_governor != address(0), "vy");
+
+ s.verifier = _verifier;
+ s.governor = _governor;
+ s.validators[_validator] = true;
+
+ // We need to initialize the state hash because it is used in the commitment of the next block
+ IExecutor.StoredBlockInfo memory storedBlockZero = IExecutor.StoredBlockInfo(
+ 0,
+ _genesisBlockHash,
+ _genesisIndexRepeatedStorageChanges,
+ 0,
+ EMPTY_STRING_KECCAK,
+ DEFAULT_L2_LOGS_TREE_ROOT_HASH,
+ 0,
+ _genesisBlockCommitment
+ );
+
+ s.storedBlockHashes[0] = keccak256(abi.encode(storedBlockZero));
+ s.allowList = _allowList;
+ s.verifierParams = _verifierParams;
+ s.zkPorterIsAvailable = _zkPorterIsAvailable;
+ s.l2BootloaderBytecodeHash = _l2BootloaderBytecodeHash;
+ s.l2DefaultAccountBytecodeHash = _l2DefaultAccountBytecodeHash;
+ s.priorityTxMaxGasLimit = _priorityTxMaxGasLimit;
+
+ return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE;
+ }
+}
diff --git a/ethereum/contracts/zksync/DiamondProxy.sol b/ethereum/contracts/zksync/DiamondProxy.sol
new file mode 100644
index 000000000..2e7c0d069
--- /dev/null
+++ b/ethereum/contracts/zksync/DiamondProxy.sol
@@ -0,0 +1,55 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./libraries/Diamond.sol";
+
+/// @title Diamond Proxy Contract (EIP-2535)
+/// @author Matter Labs
+contract DiamondProxy {
+ constructor(uint256 _chainId, Diamond.DiamondCutData memory _diamondCut) {
+ // Check that the contract is deployed on the expected chain.
+ // Thus, the contract deployed by the same Create2 factory on the different chain will have different addresses!
+ require(_chainId == block.chainid, "pr");
+ Diamond.diamondCut(_diamondCut);
+ }
+
+ /// @dev 1. Find the facet for the function that is called.
+ /// @dev 2. Delegate the execution to the found facet via `delegatecall`.
+ fallback() external payable {
+ Diamond.DiamondStorage storage diamondStorage = Diamond.getDiamondStorage();
+ // Check whether the data contains a "full" selector or it is empty.
+ // Required because Diamond proxy finds a facet by function signature,
+ // which is not defined for data length in range [1, 3].
+ require(msg.data.length >= 4 || msg.data.length == 0, "Ut");
+ // Get facet from function selector
+ Diamond.SelectorToFacet memory facet = diamondStorage.selectorToFacet[msg.sig];
+ address facetAddress = facet.facetAddress;
+
+ require(facetAddress != address(0), "F"); // Proxy has no facet for this selector
+ require(!diamondStorage.isFrozen || !facet.isFreezable, "q1"); // Facet is frozen
+
+ assembly {
+ // The pointer to the free memory slot
+ let ptr := mload(0x40)
+ // Copy function signature and arguments from calldata at zero position into memory at pointer position
+ calldatacopy(ptr, 0, calldatasize())
+ // Delegatecall method of the implementation contract returns 0 on error
+ let result := delegatecall(gas(), facetAddress, ptr, calldatasize(), 0, 0)
+ // Get the size of the last return data
+ let size := returndatasize()
+ // Copy the size length of bytes from return data at zero position to pointer position
+ returndatacopy(ptr, 0, size)
+ // Depending on the result value
+ switch result
+ case 0 {
+ // End execution and revert state changes
+ revert(ptr, size)
+ }
+ default {
+ // Return data with length of size at pointers position
+ return(ptr, size)
+ }
+ }
+ }
+}
diff --git a/ethereum/contracts/zksync/Plonk4VerifierWithAccessToDNext.sol b/ethereum/contracts/zksync/Plonk4VerifierWithAccessToDNext.sol
new file mode 100644
index 000000000..5ee98e726
--- /dev/null
+++ b/ethereum/contracts/zksync/Plonk4VerifierWithAccessToDNext.sol
@@ -0,0 +1,705 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./libraries/PairingsBn254.sol";
+import "./libraries/TranscriptLib.sol";
+import "../common/libraries/UncheckedMath.sol";
+
+uint256 constant STATE_WIDTH = 4;
+uint256 constant NUM_G2_ELS = 2;
+
+struct VerificationKey {
+ uint256 domain_size;
+ uint256 num_inputs;
+ PairingsBn254.Fr omega;
+ PairingsBn254.G1Point[2] gate_selectors_commitments;
+ PairingsBn254.G1Point[8] gate_setup_commitments;
+ PairingsBn254.G1Point[STATE_WIDTH] permutation_commitments;
+ PairingsBn254.G1Point lookup_selector_commitment;
+ PairingsBn254.G1Point[4] lookup_tables_commitments;
+ PairingsBn254.G1Point lookup_table_type_commitment;
+ PairingsBn254.Fr[STATE_WIDTH - 1] non_residues;
+ PairingsBn254.G2Point[NUM_G2_ELS] g2_elements;
+}
+
+contract Plonk4VerifierWithAccessToDNext {
+ using PairingsBn254 for PairingsBn254.G1Point;
+ using PairingsBn254 for PairingsBn254.G2Point;
+ using PairingsBn254 for PairingsBn254.Fr;
+
+ using TranscriptLib for TranscriptLib.Transcript;
+
+ using UncheckedMath for uint256;
+
+ struct Proof {
+ uint256[] input_values;
+ // commitments
+ PairingsBn254.G1Point[STATE_WIDTH] state_polys_commitments;
+ PairingsBn254.G1Point copy_permutation_grand_product_commitment;
+ PairingsBn254.G1Point[STATE_WIDTH] quotient_poly_parts_commitments;
+ // openings
+ PairingsBn254.Fr[STATE_WIDTH] state_polys_openings_at_z;
+ PairingsBn254.Fr[1] state_polys_openings_at_z_omega; // TODO: not use array while there is only D_next
+ PairingsBn254.Fr[1] gate_selectors_openings_at_z;
+ PairingsBn254.Fr[STATE_WIDTH - 1] copy_permutation_polys_openings_at_z;
+ PairingsBn254.Fr copy_permutation_grand_product_opening_at_z_omega;
+ PairingsBn254.Fr quotient_poly_opening_at_z;
+ PairingsBn254.Fr linearization_poly_opening_at_z;
+ // lookup commitments
+ PairingsBn254.G1Point lookup_s_poly_commitment;
+ PairingsBn254.G1Point lookup_grand_product_commitment;
+ // lookup openings
+ PairingsBn254.Fr lookup_s_poly_opening_at_z_omega;
+ PairingsBn254.Fr lookup_grand_product_opening_at_z_omega;
+ PairingsBn254.Fr lookup_t_poly_opening_at_z;
+ PairingsBn254.Fr lookup_t_poly_opening_at_z_omega;
+ PairingsBn254.Fr lookup_selector_poly_opening_at_z;
+ PairingsBn254.Fr lookup_table_type_poly_opening_at_z;
+ PairingsBn254.G1Point opening_proof_at_z;
+ PairingsBn254.G1Point opening_proof_at_z_omega;
+ }
+
+ struct PartialVerifierState {
+ PairingsBn254.Fr zero;
+ PairingsBn254.Fr alpha;
+ PairingsBn254.Fr beta;
+ PairingsBn254.Fr gamma;
+ PairingsBn254.Fr[9] alpha_values;
+ PairingsBn254.Fr eta;
+ PairingsBn254.Fr beta_lookup;
+ PairingsBn254.Fr gamma_lookup;
+ PairingsBn254.Fr beta_plus_one;
+ PairingsBn254.Fr beta_gamma;
+ PairingsBn254.Fr v;
+ PairingsBn254.Fr u;
+ PairingsBn254.Fr z;
+ PairingsBn254.Fr z_omega;
+ PairingsBn254.Fr z_minus_last_omega;
+ PairingsBn254.Fr l_0_at_z;
+ PairingsBn254.Fr l_n_minus_one_at_z;
+ PairingsBn254.Fr t;
+ PairingsBn254.G1Point tp;
+ }
+
+ function evaluate_l0_at_point(uint256 domain_size, PairingsBn254.Fr memory at)
+ internal
+ view
+ returns (PairingsBn254.Fr memory num)
+ {
+ PairingsBn254.Fr memory one = PairingsBn254.new_fr(1);
+
+ PairingsBn254.Fr memory size_fe = PairingsBn254.new_fr(domain_size);
+ PairingsBn254.Fr memory den = at.copy();
+ den.sub_assign(one);
+ den.mul_assign(size_fe);
+
+ den = den.inverse();
+
+ num = at.pow(domain_size);
+ num.sub_assign(one);
+ num.mul_assign(den);
+ }
+
+ function evaluate_lagrange_poly_out_of_domain(
+ uint256 poly_num,
+ uint256 domain_size,
+ PairingsBn254.Fr memory omega,
+ PairingsBn254.Fr memory at
+ ) internal view returns (PairingsBn254.Fr memory res) {
+ // (omega^i / N) / (X - omega^i) * (X^N - 1)
+ require(poly_num < domain_size);
+ PairingsBn254.Fr memory one = PairingsBn254.new_fr(1);
+ PairingsBn254.Fr memory omega_power = omega.pow(poly_num);
+ res = at.pow(domain_size);
+ res.sub_assign(one);
+ require(res.value != 0); // Vanishing polynomial can not be zero at point `at`
+ res.mul_assign(omega_power);
+
+ PairingsBn254.Fr memory den = PairingsBn254.copy(at);
+ den.sub_assign(omega_power);
+ den.mul_assign(PairingsBn254.new_fr(domain_size));
+
+ den = den.inverse();
+
+ res.mul_assign(den);
+ }
+
+ function evaluate_vanishing(uint256 domain_size, PairingsBn254.Fr memory at)
+ internal
+ view
+ returns (PairingsBn254.Fr memory res)
+ {
+ res = at.pow(domain_size);
+ res.sub_assign(PairingsBn254.new_fr(1));
+ }
+
+ function initialize_transcript(Proof memory proof, VerificationKey memory vk)
+ internal
+ pure
+ returns (PartialVerifierState memory state)
+ {
+ TranscriptLib.Transcript memory transcript = TranscriptLib.new_transcript();
+
+ for (uint256 i = 0; i < vk.num_inputs; i = i.uncheckedInc()) {
+ transcript.update_with_u256(proof.input_values[i]);
+ }
+
+ for (uint256 i = 0; i < STATE_WIDTH; i = i.uncheckedInc()) {
+ transcript.update_with_g1(proof.state_polys_commitments[i]);
+ }
+
+ state.eta = transcript.get_challenge();
+ transcript.update_with_g1(proof.lookup_s_poly_commitment);
+
+ state.beta = transcript.get_challenge();
+ state.gamma = transcript.get_challenge();
+
+ transcript.update_with_g1(proof.copy_permutation_grand_product_commitment);
+ state.beta_lookup = transcript.get_challenge();
+ state.gamma_lookup = transcript.get_challenge();
+ transcript.update_with_g1(proof.lookup_grand_product_commitment);
+ state.alpha = transcript.get_challenge();
+
+ for (uint256 i = 0; i < proof.quotient_poly_parts_commitments.length; i = i.uncheckedInc()) {
+ transcript.update_with_g1(proof.quotient_poly_parts_commitments[i]);
+ }
+ state.z = transcript.get_challenge();
+
+ transcript.update_with_fr(proof.quotient_poly_opening_at_z);
+
+ for (uint256 i = 0; i < proof.state_polys_openings_at_z.length; i = i.uncheckedInc()) {
+ transcript.update_with_fr(proof.state_polys_openings_at_z[i]);
+ }
+
+ for (uint256 i = 0; i < proof.state_polys_openings_at_z_omega.length; i = i.uncheckedInc()) {
+ transcript.update_with_fr(proof.state_polys_openings_at_z_omega[i]);
+ }
+ for (uint256 i = 0; i < proof.gate_selectors_openings_at_z.length; i = i.uncheckedInc()) {
+ transcript.update_with_fr(proof.gate_selectors_openings_at_z[i]);
+ }
+ for (uint256 i = 0; i < proof.copy_permutation_polys_openings_at_z.length; i = i.uncheckedInc()) {
+ transcript.update_with_fr(proof.copy_permutation_polys_openings_at_z[i]);
+ }
+
+ state.z_omega = state.z.copy();
+ state.z_omega.mul_assign(vk.omega);
+
+ transcript.update_with_fr(proof.copy_permutation_grand_product_opening_at_z_omega);
+
+ transcript.update_with_fr(proof.lookup_t_poly_opening_at_z);
+ transcript.update_with_fr(proof.lookup_selector_poly_opening_at_z);
+ transcript.update_with_fr(proof.lookup_table_type_poly_opening_at_z);
+ transcript.update_with_fr(proof.lookup_s_poly_opening_at_z_omega);
+ transcript.update_with_fr(proof.lookup_grand_product_opening_at_z_omega);
+ transcript.update_with_fr(proof.lookup_t_poly_opening_at_z_omega);
+ transcript.update_with_fr(proof.linearization_poly_opening_at_z);
+
+ state.v = transcript.get_challenge();
+
+ transcript.update_with_g1(proof.opening_proof_at_z);
+ transcript.update_with_g1(proof.opening_proof_at_z_omega);
+
+ state.u = transcript.get_challenge();
+ }
+
+ // compute some powers of challenge alpha([alpha^1, .. alpha^8])
+ function compute_powers_of_alpha(PartialVerifierState memory state) public pure {
+ require(state.alpha.value != 0);
+ state.alpha_values[0] = PairingsBn254.new_fr(1);
+ state.alpha_values[1] = state.alpha.copy();
+ PairingsBn254.Fr memory current_alpha = state.alpha.copy();
+ for (uint256 i = 2; i < state.alpha_values.length; i = i.uncheckedInc()) {
+ current_alpha.mul_assign(state.alpha);
+ state.alpha_values[i] = current_alpha.copy();
+ }
+ }
+
+ function verify(Proof memory proof, VerificationKey memory vk) internal view returns (bool) {
+ // we initialize all challenges beforehand, we can draw each challenge in its own place
+ PartialVerifierState memory state = initialize_transcript(proof, vk);
+ if (verify_quotient_evaluation(vk, proof, state) == false) {
+ return false;
+ }
+ require(proof.state_polys_openings_at_z_omega.length == 1); // TODO
+
+ PairingsBn254.G1Point memory quotient_result = proof.quotient_poly_parts_commitments[0].copy_g1();
+ {
+ // block scope
+ PairingsBn254.Fr memory z_in_domain_size = state.z.pow(vk.domain_size);
+ PairingsBn254.Fr memory current_z = z_in_domain_size.copy();
+ PairingsBn254.G1Point memory tp;
+ // start from i =1
+ for (uint256 i = 1; i < proof.quotient_poly_parts_commitments.length; i = i.uncheckedInc()) {
+ tp = proof.quotient_poly_parts_commitments[i].copy_g1();
+ tp.point_mul_assign(current_z);
+ quotient_result.point_add_assign(tp);
+
+ current_z.mul_assign(z_in_domain_size);
+ }
+ }
+
+ Queries memory queries = prepare_queries(vk, proof, state);
+ queries.commitments_at_z[0] = quotient_result;
+ queries.values_at_z[0] = proof.quotient_poly_opening_at_z;
+ queries.commitments_at_z[1] = aggregated_linearization_commitment(vk, proof, state);
+ queries.values_at_z[1] = proof.linearization_poly_opening_at_z;
+
+ require(queries.commitments_at_z.length == queries.values_at_z.length);
+
+ PairingsBn254.G1Point memory aggregated_commitment_at_z = queries.commitments_at_z[0];
+
+ PairingsBn254.Fr memory aggregated_opening_at_z = queries.values_at_z[0];
+ PairingsBn254.Fr memory aggregation_challenge = PairingsBn254.new_fr(1);
+ PairingsBn254.G1Point memory scaled;
+ for (uint256 i = 1; i < queries.commitments_at_z.length; i = i.uncheckedInc()) {
+ aggregation_challenge.mul_assign(state.v);
+ scaled = queries.commitments_at_z[i].point_mul(aggregation_challenge);
+ aggregated_commitment_at_z.point_add_assign(scaled);
+
+ state.t = queries.values_at_z[i];
+ state.t.mul_assign(aggregation_challenge);
+ aggregated_opening_at_z.add_assign(state.t);
+ }
+
+ aggregation_challenge.mul_assign(state.v);
+
+ PairingsBn254.G1Point memory aggregated_commitment_at_z_omega = queries.commitments_at_z_omega[0].point_mul(
+ aggregation_challenge
+ );
+ PairingsBn254.Fr memory aggregated_opening_at_z_omega = queries.values_at_z_omega[0];
+ aggregated_opening_at_z_omega.mul_assign(aggregation_challenge);
+ for (uint256 i = 1; i < queries.commitments_at_z_omega.length; i = i.uncheckedInc()) {
+ aggregation_challenge.mul_assign(state.v);
+
+ scaled = queries.commitments_at_z_omega[i].point_mul(aggregation_challenge);
+ aggregated_commitment_at_z_omega.point_add_assign(scaled);
+
+ state.t = queries.values_at_z_omega[i];
+ state.t.mul_assign(aggregation_challenge);
+ aggregated_opening_at_z_omega.add_assign(state.t);
+ }
+
+ return
+ final_pairing(
+ vk.g2_elements,
+ proof,
+ state,
+ aggregated_commitment_at_z,
+ aggregated_commitment_at_z_omega,
+ aggregated_opening_at_z,
+ aggregated_opening_at_z_omega
+ );
+ }
+
+ function verify_quotient_evaluation(
+ VerificationKey memory vk,
+ Proof memory proof,
+ PartialVerifierState memory state
+ ) internal view returns (bool) {
+ uint256[] memory lagrange_poly_numbers = new uint256[](vk.num_inputs);
+ for (uint256 i = 0; i < lagrange_poly_numbers.length; i = i.uncheckedInc()) {
+ lagrange_poly_numbers[i] = i;
+ }
+ // require(vk.num_inputs > 0); // TODO
+
+ PairingsBn254.Fr memory inputs_term = PairingsBn254.new_fr(0);
+ for (uint256 i = 0; i < vk.num_inputs; i = i.uncheckedInc()) {
+ // TODO we may use batched lagrange compputation
+ state.t = evaluate_lagrange_poly_out_of_domain(i, vk.domain_size, vk.omega, state.z);
+ state.t.mul_assign(PairingsBn254.new_fr(proof.input_values[i]));
+ inputs_term.add_assign(state.t);
+ }
+ inputs_term.mul_assign(proof.gate_selectors_openings_at_z[0]);
+ PairingsBn254.Fr memory result = proof.linearization_poly_opening_at_z.copy();
+ result.add_assign(inputs_term);
+
+ // compute powers of alpha
+ compute_powers_of_alpha(state);
+ PairingsBn254.Fr memory factor = state.alpha_values[4].copy();
+ factor.mul_assign(proof.copy_permutation_grand_product_opening_at_z_omega);
+
+ // - alpha_0 * (a + perm(z) * beta + gamma)*()*(d + gamma) * z(z*omega)
+ require(proof.copy_permutation_polys_openings_at_z.length == STATE_WIDTH - 1);
+ PairingsBn254.Fr memory t; // TMP;
+ for (uint256 i = 0; i < proof.copy_permutation_polys_openings_at_z.length; i = i.uncheckedInc()) {
+ t = proof.copy_permutation_polys_openings_at_z[i].copy();
+ t.mul_assign(state.beta);
+ t.add_assign(proof.state_polys_openings_at_z[i]);
+ t.add_assign(state.gamma);
+
+ factor.mul_assign(t);
+ }
+
+ t = proof.state_polys_openings_at_z[3].copy();
+ t.add_assign(state.gamma);
+ factor.mul_assign(t);
+ result.sub_assign(factor);
+
+ // - L_0(z) * alpha_1
+ PairingsBn254.Fr memory l_0_at_z = evaluate_l0_at_point(vk.domain_size, state.z);
+ l_0_at_z.mul_assign(state.alpha_values[4 + 1]);
+ result.sub_assign(l_0_at_z);
+
+ PairingsBn254.Fr memory lookup_quotient_contrib = lookup_quotient_contribution(vk, proof, state);
+ result.add_assign(lookup_quotient_contrib);
+
+ PairingsBn254.Fr memory lhs = proof.quotient_poly_opening_at_z.copy();
+ lhs.mul_assign(evaluate_vanishing(vk.domain_size, state.z));
+ return lhs.value == result.value;
+ }
+
+ function lookup_quotient_contribution(
+ VerificationKey memory vk,
+ Proof memory proof,
+ PartialVerifierState memory state
+ ) internal view returns (PairingsBn254.Fr memory result) {
+ PairingsBn254.Fr memory t;
+
+ PairingsBn254.Fr memory one = PairingsBn254.new_fr(1);
+ state.beta_plus_one = state.beta_lookup.copy();
+ state.beta_plus_one.add_assign(one);
+ state.beta_gamma = state.beta_plus_one.copy();
+ state.beta_gamma.mul_assign(state.gamma_lookup);
+
+ // (s'*beta + gamma)*(zw')*alpha
+ t = proof.lookup_s_poly_opening_at_z_omega.copy();
+ t.mul_assign(state.beta_lookup);
+ t.add_assign(state.beta_gamma);
+ t.mul_assign(proof.lookup_grand_product_opening_at_z_omega);
+ t.mul_assign(state.alpha_values[6]);
+
+ // (z - omega^{n-1}) for this part
+ PairingsBn254.Fr memory last_omega = vk.omega.pow(vk.domain_size - 1);
+ state.z_minus_last_omega = state.z.copy();
+ state.z_minus_last_omega.sub_assign(last_omega);
+ t.mul_assign(state.z_minus_last_omega);
+ result.add_assign(t);
+
+ // - alpha_1 * L_{0}(z)
+ state.l_0_at_z = evaluate_lagrange_poly_out_of_domain(0, vk.domain_size, vk.omega, state.z);
+ t = state.l_0_at_z.copy();
+ t.mul_assign(state.alpha_values[6 + 1]);
+ result.sub_assign(t);
+
+ // - alpha_2 * beta_gamma_powered L_{n-1}(z)
+ PairingsBn254.Fr memory beta_gamma_powered = state.beta_gamma.pow(vk.domain_size - 1);
+ state.l_n_minus_one_at_z = evaluate_lagrange_poly_out_of_domain(
+ vk.domain_size - 1,
+ vk.domain_size,
+ vk.omega,
+ state.z
+ );
+ t = state.l_n_minus_one_at_z.copy();
+ t.mul_assign(beta_gamma_powered);
+ t.mul_assign(state.alpha_values[6 + 2]);
+
+ result.sub_assign(t);
+ }
+
+ function aggregated_linearization_commitment(
+ VerificationKey memory vk,
+ Proof memory proof,
+ PartialVerifierState memory state
+ ) internal view returns (PairingsBn254.G1Point memory result) {
+ // qMain*(Q_a * A + Q_b * B + Q_c * C + Q_d * D + Q_m * A*B + Q_const + Q_dNext * D_next)
+ result = PairingsBn254.new_g1(0, 0);
+ // Q_a * A
+ PairingsBn254.G1Point memory scaled = vk.gate_setup_commitments[0].point_mul(
+ proof.state_polys_openings_at_z[0]
+ );
+ result.point_add_assign(scaled);
+ // Q_b * B
+ scaled = vk.gate_setup_commitments[1].point_mul(proof.state_polys_openings_at_z[1]);
+ result.point_add_assign(scaled);
+ // Q_c * C
+ scaled = vk.gate_setup_commitments[2].point_mul(proof.state_polys_openings_at_z[2]);
+ result.point_add_assign(scaled);
+ // Q_d * D
+ scaled = vk.gate_setup_commitments[3].point_mul(proof.state_polys_openings_at_z[3]);
+ result.point_add_assign(scaled);
+ // Q_m* A*B or Q_ab*A*B
+ PairingsBn254.Fr memory t = proof.state_polys_openings_at_z[0].copy();
+ t.mul_assign(proof.state_polys_openings_at_z[1]);
+ scaled = vk.gate_setup_commitments[4].point_mul(t);
+ result.point_add_assign(scaled);
+ // Q_AC* A*C
+ t = proof.state_polys_openings_at_z[0].copy();
+ t.mul_assign(proof.state_polys_openings_at_z[2]);
+ scaled = vk.gate_setup_commitments[5].point_mul(t);
+ result.point_add_assign(scaled);
+ // Q_const
+ result.point_add_assign(vk.gate_setup_commitments[6]);
+ // Q_dNext * D_next
+ scaled = vk.gate_setup_commitments[7].point_mul(proof.state_polys_openings_at_z_omega[0]);
+ result.point_add_assign(scaled);
+ result.point_mul_assign(proof.gate_selectors_openings_at_z[0]);
+
+ PairingsBn254.G1Point
+ memory rescue_custom_gate_linearization_contrib = rescue_custom_gate_linearization_contribution(
+ vk,
+ proof,
+ state
+ );
+ result.point_add_assign(rescue_custom_gate_linearization_contrib);
+ require(vk.non_residues.length == STATE_WIDTH - 1);
+
+ PairingsBn254.Fr memory one = PairingsBn254.new_fr(1);
+ PairingsBn254.Fr memory factor = state.alpha_values[4].copy();
+ for (uint256 i = 0; i < proof.state_polys_openings_at_z.length; ) {
+ t = state.z.copy();
+ if (i == 0) {
+ t.mul_assign(one);
+ } else {
+ t.mul_assign(vk.non_residues[i - 1]); // TODO add one into non-residues during codegen?
+ }
+ t.mul_assign(state.beta);
+ t.add_assign(state.gamma);
+ t.add_assign(proof.state_polys_openings_at_z[i]);
+
+ factor.mul_assign(t);
+ unchecked {
+ ++i;
+ }
+ }
+
+ scaled = proof.copy_permutation_grand_product_commitment.point_mul(factor);
+ result.point_add_assign(scaled);
+
+ // - (a(z) + beta*perm_a + gamma)*()*()*z(z*omega) * beta * perm_d(X)
+ factor = state.alpha_values[4].copy();
+ factor.mul_assign(state.beta);
+ factor.mul_assign(proof.copy_permutation_grand_product_opening_at_z_omega);
+ for (uint256 i = 0; i < STATE_WIDTH - 1; i = i.uncheckedInc()) {
+ t = proof.copy_permutation_polys_openings_at_z[i].copy();
+ t.mul_assign(state.beta);
+ t.add_assign(state.gamma);
+ t.add_assign(proof.state_polys_openings_at_z[i]);
+
+ factor.mul_assign(t);
+ }
+ scaled = vk.permutation_commitments[3].point_mul(factor);
+ result.point_sub_assign(scaled);
+
+ // + L_0(z) * Z(x)
+ // TODO
+ state.l_0_at_z = evaluate_lagrange_poly_out_of_domain(0, vk.domain_size, vk.omega, state.z);
+ require(state.l_0_at_z.value != 0);
+ factor = state.l_0_at_z.copy();
+ factor.mul_assign(state.alpha_values[4 + 1]);
+ scaled = proof.copy_permutation_grand_product_commitment.point_mul(factor);
+ result.point_add_assign(scaled);
+
+ PairingsBn254.G1Point memory lookup_linearization_contrib = lookup_linearization_contribution(proof, state);
+ result.point_add_assign(lookup_linearization_contrib);
+ }
+
+ function rescue_custom_gate_linearization_contribution(
+ VerificationKey memory vk,
+ Proof memory proof,
+ PartialVerifierState memory state
+ ) public view returns (PairingsBn254.G1Point memory result) {
+ PairingsBn254.Fr memory t;
+ PairingsBn254.Fr memory intermediate_result;
+
+ // a^2 - b = 0
+ t = proof.state_polys_openings_at_z[0].copy();
+ t.mul_assign(t);
+ t.sub_assign(proof.state_polys_openings_at_z[1]);
+ // t.mul_assign(challenge1);
+ t.mul_assign(state.alpha_values[1]);
+ intermediate_result.add_assign(t);
+
+ // b^2 - c = 0
+ t = proof.state_polys_openings_at_z[1].copy();
+ t.mul_assign(t);
+ t.sub_assign(proof.state_polys_openings_at_z[2]);
+ t.mul_assign(state.alpha_values[1 + 1]);
+ intermediate_result.add_assign(t);
+
+ // c*a - d = 0;
+ t = proof.state_polys_openings_at_z[2].copy();
+ t.mul_assign(proof.state_polys_openings_at_z[0]);
+ t.sub_assign(proof.state_polys_openings_at_z[3]);
+ t.mul_assign(state.alpha_values[1 + 2]);
+ intermediate_result.add_assign(t);
+
+ result = vk.gate_selectors_commitments[1].point_mul(intermediate_result);
+ }
+
+ function lookup_linearization_contribution(Proof memory proof, PartialVerifierState memory state)
+ internal
+ view
+ returns (PairingsBn254.G1Point memory result)
+ {
+ PairingsBn254.Fr memory zero = PairingsBn254.new_fr(0);
+
+ PairingsBn254.Fr memory t;
+ PairingsBn254.Fr memory factor;
+ // s(x) from the Z(x*omega)*(\gamma*(1 + \beta) + s(x) + \beta * s(x*omega)))
+ factor = proof.lookup_grand_product_opening_at_z_omega.copy();
+ factor.mul_assign(state.alpha_values[6]);
+ factor.mul_assign(state.z_minus_last_omega);
+
+ PairingsBn254.G1Point memory scaled = proof.lookup_s_poly_commitment.point_mul(factor);
+ result.point_add_assign(scaled);
+
+ // Z(x) from - alpha_0 * Z(x) * (\beta + 1) * (\gamma + f(x)) * (\gamma(1 + \beta) + t(x) + \beta * t(x*omega))
+ // + alpha_1 * Z(x) * L_{0}(z) + alpha_2 * Z(x) * L_{n-1}(z)
+
+ // accumulate coefficient
+ factor = proof.lookup_t_poly_opening_at_z_omega.copy();
+ factor.mul_assign(state.beta_lookup);
+ factor.add_assign(proof.lookup_t_poly_opening_at_z);
+ factor.add_assign(state.beta_gamma);
+
+ // (\gamma + f(x))
+ PairingsBn254.Fr memory f_reconstructed;
+ PairingsBn254.Fr memory current = PairingsBn254.new_fr(1);
+ PairingsBn254.Fr memory tmp0;
+ for (uint256 i = 0; i < STATE_WIDTH - 1; i = i.uncheckedInc()) {
+ tmp0 = proof.state_polys_openings_at_z[i].copy();
+ tmp0.mul_assign(current);
+ f_reconstructed.add_assign(tmp0);
+
+ current.mul_assign(state.eta);
+ }
+
+ // add type of table
+ t = proof.lookup_table_type_poly_opening_at_z.copy();
+ t.mul_assign(current);
+ f_reconstructed.add_assign(t);
+
+ f_reconstructed.mul_assign(proof.lookup_selector_poly_opening_at_z);
+ f_reconstructed.add_assign(state.gamma_lookup);
+
+ // end of (\gamma + f(x)) part
+ factor.mul_assign(f_reconstructed);
+ factor.mul_assign(state.beta_plus_one);
+ t = zero.copy();
+ t.sub_assign(factor);
+ factor = t;
+ factor.mul_assign(state.alpha_values[6]);
+
+ // Multiply by (z - omega^{n-1})
+ factor.mul_assign(state.z_minus_last_omega);
+
+ // L_{0}(z) in front of Z(x)
+ t = state.l_0_at_z.copy();
+ t.mul_assign(state.alpha_values[6 + 1]);
+ factor.add_assign(t);
+
+ // L_{n-1}(z) in front of Z(x)
+ t = state.l_n_minus_one_at_z.copy();
+ t.mul_assign(state.alpha_values[6 + 2]);
+ factor.add_assign(t);
+
+ scaled = proof.lookup_grand_product_commitment.point_mul(factor);
+ result.point_add_assign(scaled);
+ }
+
+ struct Queries {
+ PairingsBn254.G1Point[13] commitments_at_z;
+ PairingsBn254.Fr[13] values_at_z;
+ PairingsBn254.G1Point[6] commitments_at_z_omega;
+ PairingsBn254.Fr[6] values_at_z_omega;
+ }
+
+ function prepare_queries(
+ VerificationKey memory vk,
+ Proof memory proof,
+ PartialVerifierState memory state
+ ) public view returns (Queries memory queries) {
+ // we set first two items in calee side so start idx from 2
+ uint256 idx = 2;
+ for (uint256 i = 0; i < STATE_WIDTH; i = i.uncheckedInc()) {
+ queries.commitments_at_z[idx] = proof.state_polys_commitments[i];
+ queries.values_at_z[idx] = proof.state_polys_openings_at_z[i];
+ idx = idx.uncheckedInc();
+ }
+ require(proof.gate_selectors_openings_at_z.length == 1);
+ queries.commitments_at_z[idx] = vk.gate_selectors_commitments[0];
+ queries.values_at_z[idx] = proof.gate_selectors_openings_at_z[0];
+ idx = idx.uncheckedInc();
+ for (uint256 i = 0; i < STATE_WIDTH - 1; i = i.uncheckedInc()) {
+ queries.commitments_at_z[idx] = vk.permutation_commitments[i];
+ queries.values_at_z[idx] = proof.copy_permutation_polys_openings_at_z[i];
+ idx = idx.uncheckedInc();
+ }
+
+ queries.commitments_at_z_omega[0] = proof.copy_permutation_grand_product_commitment;
+ queries.commitments_at_z_omega[1] = proof.state_polys_commitments[STATE_WIDTH - 1];
+
+ queries.values_at_z_omega[0] = proof.copy_permutation_grand_product_opening_at_z_omega;
+ queries.values_at_z_omega[1] = proof.state_polys_openings_at_z_omega[0];
+
+ PairingsBn254.G1Point memory lookup_t_poly_commitment_aggregated = vk.lookup_tables_commitments[0];
+ PairingsBn254.Fr memory current_eta = state.eta.copy();
+ for (uint256 i = 1; i < vk.lookup_tables_commitments.length; i = i.uncheckedInc()) {
+ state.tp = vk.lookup_tables_commitments[i].point_mul(current_eta);
+ lookup_t_poly_commitment_aggregated.point_add_assign(state.tp);
+
+ current_eta.mul_assign(state.eta);
+ }
+ queries.commitments_at_z[idx] = lookup_t_poly_commitment_aggregated;
+ queries.values_at_z[idx] = proof.lookup_t_poly_opening_at_z;
+ idx = idx.uncheckedInc();
+ queries.commitments_at_z[idx] = vk.lookup_selector_commitment;
+ queries.values_at_z[idx] = proof.lookup_selector_poly_opening_at_z;
+ idx = idx.uncheckedInc();
+ queries.commitments_at_z[idx] = vk.lookup_table_type_commitment;
+ queries.values_at_z[idx] = proof.lookup_table_type_poly_opening_at_z;
+ queries.commitments_at_z_omega[2] = proof.lookup_s_poly_commitment;
+ queries.values_at_z_omega[2] = proof.lookup_s_poly_opening_at_z_omega;
+ queries.commitments_at_z_omega[3] = proof.lookup_grand_product_commitment;
+ queries.values_at_z_omega[3] = proof.lookup_grand_product_opening_at_z_omega;
+ queries.commitments_at_z_omega[4] = lookup_t_poly_commitment_aggregated;
+ queries.values_at_z_omega[4] = proof.lookup_t_poly_opening_at_z_omega;
+ }
+
+ function final_pairing(
+ // VerificationKey memory vk,
+ PairingsBn254.G2Point[NUM_G2_ELS] memory g2_elements,
+ Proof memory proof,
+ PartialVerifierState memory state,
+ PairingsBn254.G1Point memory aggregated_commitment_at_z,
+ PairingsBn254.G1Point memory aggregated_commitment_at_z_omega,
+ PairingsBn254.Fr memory aggregated_opening_at_z,
+ PairingsBn254.Fr memory aggregated_opening_at_z_omega
+ ) internal view returns (bool) {
+ // q(x) = f(x) - f(z) / (x - z)
+ // q(x) * (x-z) = f(x) - f(z)
+
+ // f(x)
+ PairingsBn254.G1Point memory pair_with_generator = aggregated_commitment_at_z.copy_g1();
+ aggregated_commitment_at_z_omega.point_mul_assign(state.u);
+ pair_with_generator.point_add_assign(aggregated_commitment_at_z_omega);
+
+ // - f(z)*g
+ PairingsBn254.Fr memory aggregated_value = aggregated_opening_at_z_omega.copy();
+ aggregated_value.mul_assign(state.u);
+ aggregated_value.add_assign(aggregated_opening_at_z);
+ PairingsBn254.G1Point memory tp = PairingsBn254.P1().point_mul(aggregated_value);
+ pair_with_generator.point_sub_assign(tp);
+
+ // +z * q(x)
+ tp = proof.opening_proof_at_z.point_mul(state.z);
+ PairingsBn254.Fr memory t = state.z_omega.copy();
+ t.mul_assign(state.u);
+ PairingsBn254.G1Point memory t1 = proof.opening_proof_at_z_omega.point_mul(t);
+ tp.point_add_assign(t1);
+ pair_with_generator.point_add_assign(tp);
+
+ // rhs
+ PairingsBn254.G1Point memory pair_with_x = proof.opening_proof_at_z_omega.point_mul(state.u);
+ pair_with_x.point_add_assign(proof.opening_proof_at_z);
+ pair_with_x.negate();
+ // Pairing precompile expects points to be in a `i*x[1] + x[0]` form instead of `x[0] + i*x[1]`
+ // so we handle it in code generation step
+ PairingsBn254.G2Point memory first_g2 = g2_elements[0];
+ PairingsBn254.G2Point memory second_g2 = g2_elements[1];
+ PairingsBn254.G2Point memory gen2 = PairingsBn254.P2();
+
+ return PairingsBn254.pairingProd2(pair_with_generator, first_g2, pair_with_x, second_g2);
+ }
+}
diff --git a/ethereum/contracts/zksync/Storage.sol b/ethereum/contracts/zksync/Storage.sol
new file mode 100644
index 000000000..d7eb684d2
--- /dev/null
+++ b/ethereum/contracts/zksync/Storage.sol
@@ -0,0 +1,129 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./Verifier.sol";
+import "../common/interfaces/IAllowList.sol";
+import "./libraries/PriorityQueue.sol";
+
+/// @notice Indicates whether an upgrade is initiated and if yes what type
+/// @param None Upgrade is NOT initiated
+/// @param Transparent Fully transparent upgrade is initiated, upgrade data is publicly known
+/// @param Shadow Shadow upgrade is initiated, upgrade data is hidden
+enum UpgradeState {
+ None,
+ Transparent,
+ Shadow
+}
+
+/// @dev Logically separated part of the storage structure, which is responsible for everything related to proxy upgrades and diamond cuts
+/// @param proposedUpgradeHash The hash of the current upgrade proposal, zero if there is no active proposal
+/// @param state Indicates whether an upgrade is initiated and if yes what type
+/// @param securityCouncil Address which has the permission to approve instant upgrades (expected to be a Gnosis multisig)
+/// @param approvedBySecurityCouncil Indicates whether the security council has approved the upgrade
+/// @param proposedUpgradeTimestamp The timestamp when the upgrade was proposed, zero if there are no active proposals
+/// @param currentProposalId The serial number of proposed upgrades, increments when proposing a new one
+struct UpgradeStorage {
+ bytes32 proposedUpgradeHash;
+ UpgradeState state;
+ address securityCouncil;
+ bool approvedBySecurityCouncil;
+ uint40 proposedUpgradeTimestamp;
+ uint40 currentProposalId;
+}
+
+/// @dev The log passed from L2
+/// @param l2ShardId The shard identifier, 0 - rollup, 1 - porter. All other values are not used but are reserved for the future
+/// @param isService A boolean flag that is part of the log along with `key`, `value`, and `sender` address.
+/// This field is required formally but does not have any special meaning.
+/// @param txNumberInBlock The L2 transaction number in a block, in which the log was sent
+/// @param sender The L2 address which sent the log
+/// @param key The 32 bytes of information that was sent in the log
+/// @param value The 32 bytes of information that was sent in the log
+// Both `key` and `value` are arbitrary 32-bytes selected by the log sender
+struct L2Log {
+ uint8 l2ShardId;
+ bool isService;
+ uint16 txNumberInBlock;
+ address sender;
+ bytes32 key;
+ bytes32 value;
+}
+
+/// @dev An arbitrary length message passed from L2
+/// @notice Under the hood it is `L2Log` sent from the special system L2 contract
+/// @param txNumberInBlock The L2 transaction number in a block, in which the message was sent
+/// @param sender The address of the L2 account from which the message was passed
+/// @param data An arbitrary length message
+struct L2Message {
+ uint16 txNumberInBlock;
+ address sender;
+ bytes data;
+}
+
+/// @notice Part of the configuration parameters of ZKP circuits
+struct VerifierParams {
+ bytes32 recursionNodeLevelVkHash;
+ bytes32 recursionLeafLevelVkHash;
+ bytes32 recursionCircuitsSetVksHash;
+}
+
+/// @dev storing all storage variables for zkSync facets
+/// NOTE: It is used in a proxy, so it is possible to add new variables to the end
+/// NOTE: but NOT to modify already existing variables or change their order
+/// NOTE: DiamondCutStorage is unused, but it must remain a member of AppStorage to not have storage collision
+/// NOTE: instead UpgradeStorage is used that is appended to the end of the AppStorage struct
+struct AppStorage {
+ /// @dev Storage of variables needed for deprecated diamond cut facet
+ uint256[7] __DEPRECATED_diamondCutStorage;
+ /// @notice Address which will exercise governance over the network i.e. change validator set, conduct upgrades
+ address governor;
+ /// @notice Address that the governor proposed as one that will replace it
+ address pendingGovernor;
+ /// @notice List of permitted validators
+ mapping(address => bool) validators;
+ /// @dev Verifier contract. Used to verify aggregated proof for blocks
+ Verifier verifier;
+ /// @notice Total number of executed blocks i.e. blocks[totalBlocksExecuted] points at the latest executed block (block 0 is genesis)
+ uint256 totalBlocksExecuted;
+ /// @notice Total number of proved blocks i.e. blocks[totalBlocksProved] points at the latest proved block
+ uint256 totalBlocksVerified;
+ /// @notice Total number of committed blocks i.e. blocks[totalBlocksCommitted] points at the latest committed block
+ uint256 totalBlocksCommitted;
+ /// @dev Stored hashed StoredBlock for block number
+ mapping(uint256 => bytes32) storedBlockHashes;
+ /// @dev Stored root hashes of L2 -> L1 logs
+ mapping(uint256 => bytes32) l2LogsRootHashes;
+ /// @dev Container that stores transactions requested from L1
+ PriorityQueue.Queue priorityQueue;
+ /// @dev The smart contract that manages the list with permission to call contract functions
+ IAllowList allowList;
+ /// @notice Part of the configuration parameters of ZKP circuits. Used as an input for the verifier smart contract
+ VerifierParams verifierParams;
+ /// @notice Bytecode hash of bootloader program.
+ /// @dev Used as an input to zkp-circuit.
+ bytes32 l2BootloaderBytecodeHash;
+ /// @notice Bytecode hash of default account (bytecode for EOA).
+ /// @dev Used as an input to zkp-circuit.
+ bytes32 l2DefaultAccountBytecodeHash;
+ /// @dev Indicates that the porter may be touched on L2 transactions.
+ /// @dev Used as an input to zkp-circuit.
+ bool zkPorterIsAvailable;
+ /// @dev The maximum number of the L2 gas that a user can request for L1 -> L2 transactions
+ /// @dev This is the maximum number of L2 gas that is available for the "body" of the transaction, i.e.
+ /// without overhead for proving the block.
+ uint256 priorityTxMaxGasLimit;
+ /// @dev Storage of variables needed for upgrade facet
+ UpgradeStorage upgrades;
+ /// @dev A mapping L2 block number => message number => flag.
+ /// @dev The L2 -> L1 log is sent for every withdrawal, so this mapping is serving as
+ /// a flag to indicate that the message was already processed.
+ /// @dev Used to indicate that eth withdrawal was already processed
+ mapping(uint256 => mapping(uint256 => bool)) isEthWithdrawalFinalized;
+ /// @dev The most recent withdrawal time and amount reset
+ uint256 lastWithdrawalLimitReset;
+ /// @dev The accumulated withdrawn amount during the withdrawal limit window
+ uint256 withdrawnAmountInWindow;
+ /// @dev A mapping user address => the total deposited amount by the user
+ mapping(address => uint256) totalDepositedAmountPerUser;
+}
diff --git a/ethereum/contracts/zksync/Verifier.sol b/ethereum/contracts/zksync/Verifier.sol
new file mode 100644
index 000000000..8c41587c4
--- /dev/null
+++ b/ethereum/contracts/zksync/Verifier.sol
@@ -0,0 +1,237 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./Plonk4VerifierWithAccessToDNext.sol";
+import "../common/libraries/UncheckedMath.sol";
+
+contract Verifier is Plonk4VerifierWithAccessToDNext {
+ using UncheckedMath for uint256;
+
+ function get_verification_key() public pure returns (VerificationKey memory vk) {
+ vk.num_inputs = 1;
+ vk.domain_size = 67108864;
+ vk.omega = PairingsBn254.new_fr(0x1dba8b5bdd64ef6ce29a9039aca3c0e524395c43b9227b96c75090cc6cc7ec97);
+ // coefficients
+ vk.gate_setup_commitments[0] = PairingsBn254.new_g1(
+ 0x08fa9d6f0dd6ac1cbeb94ae20fe7a23df05cb1095df66fb561190e615a4037ef,
+ 0x196dcc8692fe322d21375920559944c12ba7b1ba8b732344cf4ba2e3aa0fc8b4
+ );
+ vk.gate_setup_commitments[1] = PairingsBn254.new_g1(
+ 0x0074aaf5d97bd57551311a8b3e4aa7840bc55896502020b2f43ad6a98d81a443,
+ 0x2d275a3ad153dc9d89ebb9c9b6a0afd2dde82470554e9738d905c328fbb4c8bc
+ );
+ vk.gate_setup_commitments[2] = PairingsBn254.new_g1(
+ 0x287f1975a9aeaef5d2bb0767b5ef538f76e82f7da01c0cb6db8c6f920818ec4f,
+ 0x2fff6f53594129f794a7731d963d27e72f385c5c6d8e08829e6f66a9d29a12ea
+ );
+ vk.gate_setup_commitments[3] = PairingsBn254.new_g1(
+ 0x038809fa3d4b7320d43e023454194f0a7878baa7e73a295d2d105260f1c34cbc,
+ 0x25418b1105cf45b2a3da6c349bab1d9caaf145eaf24d1e8fb92c11654c000781
+ );
+ vk.gate_setup_commitments[4] = PairingsBn254.new_g1(
+ 0x0561cafd527ac3f0bc550db77d87cd1c63938f7ec051e62ebf84a5bbe07f9840,
+ 0x28f87201b4cbe19f1517a1c29ca6d6cb074502ccfed4c31c8931c6992c3eea43
+ );
+ vk.gate_setup_commitments[5] = PairingsBn254.new_g1(
+ 0x27e0af572bac6e36d31c33808cb44c0ef8ceee5e2850e916fb01f3747db72491,
+ 0x1da20087ba61c59366b21e31e4ac6889d357cf11bf16b94d875f94f41525c427
+ );
+ vk.gate_setup_commitments[6] = PairingsBn254.new_g1(
+ 0x2c2bcafea8f93d07f96874f470985a8d272c09c8ed49373f36497ee80bd8da17,
+ 0x299276cf6dca1a7e3780f6276c5d067403f6e024e83e0cc1ab4c5f7252b7f653
+ );
+ vk.gate_setup_commitments[7] = PairingsBn254.new_g1(
+ 0x0ba9d4a53e050da25b8410045b634f1ca065ff74acd35bab1a72bf1f20047ef3,
+ 0x1f1eefc8b0507a08f852f554bd7abcbd506e52de390ca127477a678d212abfe5
+ );
+ // gate selectors
+ vk.gate_selectors_commitments[0] = PairingsBn254.new_g1(
+ 0x1c6b68d9920620012d85a4850dad9bd6d03ae8bbc7a08b827199e85dba1ef2b1,
+ 0x0f6380560d1b585628ed259289cec19d3a7c70c60e66bbfebfcb70c8c312d91e
+ );
+ vk.gate_selectors_commitments[1] = PairingsBn254.new_g1(
+ 0x0dfead780e5067181aae631ff734a33fca302773472997daca58ba49dbd20dcc,
+ 0x00f13fa6e356f525d2fd1c533acf2858c0d2b9f0a9b3180f94e1543929c75073
+ );
+ // permutation
+ vk.permutation_commitments[0] = PairingsBn254.new_g1(
+ 0x1df0747c787934650d99c5696f9273088ad07ec3e0825c9d39685a9b9978ebed,
+ 0x2ace2a277becbc69af4e89518eb50960a733d9d71354845ea43d2e65c8e0e4cb
+ );
+ vk.permutation_commitments[1] = PairingsBn254.new_g1(
+ 0x06598c8236a5f5045cd7444dc87f3e1f66f99bf01251e13be4dc0ab1f7f1af4b,
+ 0x14ca234fe9b3bb1e5517fc60d6b90f8ad44b0899a2d4f71a64c9640b3142ce8b
+ );
+ vk.permutation_commitments[2] = PairingsBn254.new_g1(
+ 0x1889e2c684caefde60471748f4259196ecf4209a735ccdf7b1816f05bafa50a,
+ 0x92d287a080bfe2fd40ad392ff290e462cd0e347b8fd9d05b90af234ce77a11b
+ );
+ vk.permutation_commitments[3] = PairingsBn254.new_g1(
+ 0x0dd98eeb5bc12c221da969398b67750a8774dbdd37a78da52367f9fc0e566d5c,
+ 0x06750ceb40c9fb87fc424df9599340938b7552b759914a90cb0e41d3915c945b
+ );
+ // lookup table commitments
+ vk.lookup_selector_commitment = PairingsBn254.new_g1(
+ 0x2f491c662ae53ceb358f57a868dc00b89befa853bd9a449127ea2d46820995bd,
+ 0x231fe6538634ff8b6fa21ca248fb15e7f43d82eb0bfa705490d24ddb3e3cad77
+ );
+ vk.lookup_tables_commitments[0] = PairingsBn254.new_g1(
+ 0x0ebe0de4a2f39df3b903da484c1641ffdffb77ff87ce4f9508c548659eb22d3c,
+ 0x12a3209440242d5662729558f1017ed9dcc08fe49a99554dd45f5f15da5e4e0b
+ );
+ vk.lookup_tables_commitments[1] = PairingsBn254.new_g1(
+ 0x1b7d54f8065ca63bed0bfbb9280a1011b886d07e0c0a26a66ecc96af68c53bf9,
+ 0x2c51121fff5b8f58c302f03c74e0cb176ae5a1d1730dec4696eb9cce3fe284ca
+ );
+ vk.lookup_tables_commitments[2] = PairingsBn254.new_g1(
+ 0x0138733c5faa9db6d4b8df9748081e38405999e511fb22d40f77cf3aef293c44,
+ 0x269bee1c1ac28053238f7fe789f1ea2e481742d6d16ae78ed81e87c254af0765
+ );
+ vk.lookup_tables_commitments[3] = PairingsBn254.new_g1(
+ 0x1b1be7279d59445065a95f01f16686adfa798ec4f1e6845ffcec9b837e88372e,
+ 0x057c90cb96d8259238ed86b05f629efd55f472a721efeeb56926e979433e6c0e
+ );
+ vk.lookup_table_type_commitment = PairingsBn254.new_g1(
+ 0x12cd873a6f18a4a590a846d9ebf61565197edf457efd26bc408eb61b72f37b59,
+ 0x19890cbdac892682e7a5910ca6c238c082130e1c71f33d0c9c901153377770d1
+ );
+ // non residues
+ vk.non_residues[0] = PairingsBn254.new_fr(0x0000000000000000000000000000000000000000000000000000000000000005);
+ vk.non_residues[1] = PairingsBn254.new_fr(0x0000000000000000000000000000000000000000000000000000000000000007);
+ vk.non_residues[2] = PairingsBn254.new_fr(0x000000000000000000000000000000000000000000000000000000000000000a);
+
+ // g2 elements
+ vk.g2_elements[0] = PairingsBn254.new_g2(
+ [
+ 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2,
+ 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed
+ ],
+ [
+ 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b,
+ 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa
+ ]
+ );
+ vk.g2_elements[1] = PairingsBn254.new_g2(
+ [
+ 0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1,
+ 0x0118c4d5b837bcc2bc89b5b398b5974e9f5944073b32078b7e231fec938883b0
+ ],
+ [
+ 0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4,
+ 0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55
+ ]
+ );
+ }
+
+ function deserialize_proof(uint256[] calldata public_inputs, uint256[] calldata serialized_proof)
+ internal
+ pure
+ returns (Proof memory proof)
+ {
+ // require(serialized_proof.length == 44); TODO
+ proof.input_values = new uint256[](public_inputs.length);
+ for (uint256 i = 0; i < public_inputs.length; i = i.uncheckedInc()) {
+ proof.input_values[i] = public_inputs[i];
+ }
+
+ uint256 j;
+ for (uint256 i = 0; i < STATE_WIDTH; i = i.uncheckedInc()) {
+ proof.state_polys_commitments[i] = PairingsBn254.new_g1_checked(
+ serialized_proof[j],
+ serialized_proof[j.uncheckedInc()]
+ );
+
+ j = j.uncheckedAdd(2);
+ }
+ proof.copy_permutation_grand_product_commitment = PairingsBn254.new_g1_checked(
+ serialized_proof[j],
+ serialized_proof[j.uncheckedInc()]
+ );
+ j = j.uncheckedAdd(2);
+
+ proof.lookup_s_poly_commitment = PairingsBn254.new_g1_checked(
+ serialized_proof[j],
+ serialized_proof[j.uncheckedInc()]
+ );
+ j = j.uncheckedAdd(2);
+
+ proof.lookup_grand_product_commitment = PairingsBn254.new_g1_checked(
+ serialized_proof[j],
+ serialized_proof[j.uncheckedInc()]
+ );
+ j = j.uncheckedAdd(2);
+ for (uint256 i = 0; i < proof.quotient_poly_parts_commitments.length; i = i.uncheckedInc()) {
+ proof.quotient_poly_parts_commitments[i] = PairingsBn254.new_g1_checked(
+ serialized_proof[j],
+ serialized_proof[j.uncheckedInc()]
+ );
+ j = j.uncheckedAdd(2);
+ }
+
+ for (uint256 i = 0; i < proof.state_polys_openings_at_z.length; i = i.uncheckedInc()) {
+ proof.state_polys_openings_at_z[i] = PairingsBn254.new_fr(serialized_proof[j]);
+
+ j = j.uncheckedInc();
+ }
+
+ for (uint256 i = 0; i < proof.state_polys_openings_at_z_omega.length; i = i.uncheckedInc()) {
+ proof.state_polys_openings_at_z_omega[i] = PairingsBn254.new_fr(serialized_proof[j]);
+
+ j = j.uncheckedInc();
+ }
+ for (uint256 i = 0; i < proof.gate_selectors_openings_at_z.length; i = i.uncheckedInc()) {
+ proof.gate_selectors_openings_at_z[i] = PairingsBn254.new_fr(serialized_proof[j]);
+
+ j = j.uncheckedInc();
+ }
+ for (uint256 i = 0; i < proof.copy_permutation_polys_openings_at_z.length; i = i.uncheckedInc()) {
+ proof.copy_permutation_polys_openings_at_z[i] = PairingsBn254.new_fr(serialized_proof[j]);
+
+ j = j.uncheckedInc();
+ }
+ proof.copy_permutation_grand_product_opening_at_z_omega = PairingsBn254.new_fr(serialized_proof[j]);
+
+ j = j.uncheckedInc();
+ proof.lookup_s_poly_opening_at_z_omega = PairingsBn254.new_fr(serialized_proof[j]);
+ j = j.uncheckedInc();
+ proof.lookup_grand_product_opening_at_z_omega = PairingsBn254.new_fr(serialized_proof[j]);
+
+ j = j.uncheckedInc();
+ proof.lookup_t_poly_opening_at_z = PairingsBn254.new_fr(serialized_proof[j]);
+
+ j = j.uncheckedInc();
+ proof.lookup_t_poly_opening_at_z_omega = PairingsBn254.new_fr(serialized_proof[j]);
+ j = j.uncheckedInc();
+ proof.lookup_selector_poly_opening_at_z = PairingsBn254.new_fr(serialized_proof[j]);
+ j = j.uncheckedInc();
+ proof.lookup_table_type_poly_opening_at_z = PairingsBn254.new_fr(serialized_proof[j]);
+ j = j.uncheckedInc();
+ proof.quotient_poly_opening_at_z = PairingsBn254.new_fr(serialized_proof[j]);
+ j = j.uncheckedInc();
+ proof.linearization_poly_opening_at_z = PairingsBn254.new_fr(serialized_proof[j]);
+ j = j.uncheckedInc();
+ proof.opening_proof_at_z = PairingsBn254.new_g1_checked(
+ serialized_proof[j],
+ serialized_proof[j.uncheckedInc()]
+ );
+ j = j.uncheckedAdd(2);
+ proof.opening_proof_at_z_omega = PairingsBn254.new_g1_checked(
+ serialized_proof[j],
+ serialized_proof[j.uncheckedInc()]
+ );
+ }
+
+ function verify_serialized_proof(uint256[] calldata public_inputs, uint256[] calldata serialized_proof)
+ public
+ view
+ returns (bool)
+ {
+ VerificationKey memory vk = get_verification_key();
+ require(vk.num_inputs == public_inputs.length);
+
+ Proof memory proof = deserialize_proof(public_inputs, serialized_proof);
+
+ return verify(proof, vk);
+ }
+}
diff --git a/ethereum/contracts/zksync/facets/Base.sol b/ethereum/contracts/zksync/facets/Base.sol
new file mode 100644
index 000000000..e399deac6
--- /dev/null
+++ b/ethereum/contracts/zksync/facets/Base.sol
@@ -0,0 +1,30 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../Storage.sol";
+import "../../common/ReentrancyGuard.sol";
+import "../../common/AllowListed.sol";
+
+/// @title Base contract containing functions accessible to the other facets.
+/// @author Matter Labs
+contract Base is ReentrancyGuard, AllowListed {
+ AppStorage internal s;
+
+ /// @notice Checks that the message sender is an active governor
+ modifier onlyGovernor() {
+ require(msg.sender == s.governor, "1g"); // only by governor
+ _;
+ }
+
+ /// @notice Checks if validator is active
+ modifier onlyValidator() {
+ require(s.validators[msg.sender], "1h"); // validator is not active
+ _;
+ }
+
+ modifier onlySecurityCouncil() {
+ require(msg.sender == s.upgrades.securityCouncil, "a9"); // not a security council
+ _;
+ }
+}
diff --git a/ethereum/contracts/zksync/facets/DiamondCut.sol b/ethereum/contracts/zksync/facets/DiamondCut.sol
new file mode 100644
index 000000000..2cb769638
--- /dev/null
+++ b/ethereum/contracts/zksync/facets/DiamondCut.sol
@@ -0,0 +1,196 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../../common/libraries/UncheckedMath.sol";
+import "../interfaces/IDiamondCut.sol";
+import "../libraries/Diamond.sol";
+import "../Config.sol";
+import "./Base.sol";
+
+/// @title DiamondCutFacet - contract responsible for the management of upgrades.
+/// @author Matter Labs
+contract DiamondCutFacet is Base, IDiamondCut {
+ using UncheckedMath for uint256;
+
+ modifier upgradeProposed() {
+ require(s.upgrades.state != UpgradeState.None, "a3"); // Proposal doesn't exist
+ _;
+ }
+
+ modifier noUpgradeProposed() {
+ require(s.upgrades.state == UpgradeState.None, "a8"); // Proposal already exists
+ _;
+ }
+
+ /*//////////////////////////////////////////////////////////////
+ UPGRADE PROPOSING
+ //////////////////////////////////////////////////////////////*/
+
+ /// @notice Propose a fully transparent upgrade, providing upgrade data on-chain
+ /// @notice The governor will be able to execute the proposal either
+ /// - With a `UPGRADE_NOTICE_PERIOD` timelock on its own
+ /// - With security council approvals instantly
+ /// @dev Only the current governor can propose an upgrade
+ /// @param _diamondCut The diamond cut parameters will be executed with the upgrade
+ function proposeTransparentUpgrade(Diamond.DiamondCutData calldata _diamondCut, uint40 _proposalId)
+ external
+ onlyGovernor
+ noUpgradeProposed
+ {
+ // Set the default value for proposal salt, since the proposal is fully transparent it doesn't change anything
+ bytes32 proposalSalt;
+ uint40 expectedProposalId = s.upgrades.currentProposalId + 1;
+ // Input proposal ID should be equal to the expected proposal id
+ require(_proposalId == expectedProposalId, "yb");
+ s.upgrades.proposedUpgradeHash = upgradeProposalHash(_diamondCut, expectedProposalId, proposalSalt);
+ s.upgrades.proposedUpgradeTimestamp = uint40(block.timestamp);
+ s.upgrades.currentProposalId = expectedProposalId;
+ s.upgrades.state = UpgradeState.Transparent;
+
+ emit ProposeTransparentUpgrade(_diamondCut, expectedProposalId, proposalSalt);
+ }
+
+ /// @notice Propose "shadow" upgrade, upgrade data is not publishing on-chain
+ /// @notice The governor will be able to execute the proposal only:
+ /// - With security council approvals instantly
+ /// @dev Only the current governor can propose an upgrade
+ /// @param _proposalHash Upgrade proposal hash (see `upgradeProposalHash` function)
+ /// @param _proposalId An expected value for the current proposal Id
+ function proposeShadowUpgrade(bytes32 _proposalHash, uint40 _proposalId) external onlyGovernor noUpgradeProposed {
+ require(_proposalHash != bytes32(0), "mi");
+
+ s.upgrades.proposedUpgradeHash = _proposalHash;
+ s.upgrades.proposedUpgradeTimestamp = uint40(block.timestamp); // Safe to cast
+ s.upgrades.state = UpgradeState.Shadow;
+
+ uint256 currentProposalId = s.upgrades.currentProposalId;
+ // Expected proposal ID should be one more than the current saved proposal ID value
+ require(_proposalId == currentProposalId.uncheckedInc(), "ya");
+ s.upgrades.currentProposalId = _proposalId;
+
+ emit ProposeShadowUpgrade(_proposalId, _proposalHash);
+ }
+
+ /*//////////////////////////////////////////////////////////////
+ UPGRADE CANCELING
+ //////////////////////////////////////////////////////////////*/
+
+ /// @notice Cancel the proposed upgrade
+ /// @dev Only the current governor can remove the proposal
+ /// @param _proposedUpgradeHash Expected upgrade hash value to be canceled
+ function cancelUpgradeProposal(bytes32 _proposedUpgradeHash) external onlyGovernor upgradeProposed {
+ bytes32 currentUpgradeHash = s.upgrades.proposedUpgradeHash;
+ // Soft check that the governor is not mistaken about canceling proposals
+ require(_proposedUpgradeHash == currentUpgradeHash, "rx");
+
+ _resetProposal();
+ emit CancelUpgradeProposal(s.upgrades.currentProposalId, currentUpgradeHash);
+ }
+
+ /*//////////////////////////////////////////////////////////////
+ SECURITY COUNCIL
+ //////////////////////////////////////////////////////////////*/
+
+ /// @notice Approves the instant upgrade by the security council
+ /// @param _upgradeProposalHash The upgrade proposal hash that security council members want to approve. Needed to prevent unintentional approvals, including reorg attacks
+ function securityCouncilUpgradeApprove(bytes32 _upgradeProposalHash) external onlySecurityCouncil upgradeProposed {
+ require(s.upgrades.proposedUpgradeHash == _upgradeProposalHash, "un");
+ s.upgrades.approvedBySecurityCouncil = true;
+
+ emit SecurityCouncilUpgradeApprove(s.upgrades.currentProposalId, _upgradeProposalHash);
+ }
+
+ /*//////////////////////////////////////////////////////////////
+ UPGRADE EXECUTION
+ //////////////////////////////////////////////////////////////*/
+
+ /// @notice Executes a proposed governor upgrade
+ /// @dev Only the current governor can execute the upgrade
+ /// @param _diamondCut The diamond cut parameters to be executed
+ /// @param _proposalSalt The committed 32 bytes salt for upgrade proposal data
+ function executeUpgrade(Diamond.DiamondCutData calldata _diamondCut, bytes32 _proposalSalt) external onlyGovernor {
+ Diamond.DiamondStorage storage diamondStorage = Diamond.getDiamondStorage();
+
+ bool approvedBySecurityCouncil = s.upgrades.approvedBySecurityCouncil;
+ UpgradeState upgradeState = s.upgrades.state;
+ if (upgradeState == UpgradeState.Transparent) {
+ bool upgradeNoticePeriodPassed = block.timestamp >=
+ s.upgrades.proposedUpgradeTimestamp + UPGRADE_NOTICE_PERIOD;
+ require(upgradeNoticePeriodPassed || approvedBySecurityCouncil, "va");
+ require(_proposalSalt == bytes32(0), "po"); // The transparent upgrade may be initiated only with zero salt
+ } else if (upgradeState == UpgradeState.Shadow) {
+ require(approvedBySecurityCouncil, "av");
+ require(_proposalSalt != bytes32(0), "op"); // Shadow upgrade should be initialized with "random" salt
+ } else {
+ revert("ab"); // There is no active upgrade
+ }
+
+ require(approvedBySecurityCouncil || !diamondStorage.isFrozen, "f3");
+ // Should not be frozen or should have enough security council approvals
+
+ uint256 currentProposalId = s.upgrades.currentProposalId;
+ bytes32 executingProposalHash = upgradeProposalHash(_diamondCut, currentProposalId, _proposalSalt);
+ require(s.upgrades.proposedUpgradeHash == executingProposalHash, "a4"); // Proposal should be created
+ _resetProposal();
+
+ if (diamondStorage.isFrozen) {
+ diamondStorage.isFrozen = false;
+ emit Unfreeze();
+ }
+
+ Diamond.diamondCut(_diamondCut);
+ emit ExecuteUpgrade(currentProposalId, executingProposalHash, _proposalSalt);
+ }
+
+ /*//////////////////////////////////////////////////////////////
+ CONTRACT FREEZING
+ //////////////////////////////////////////////////////////////*/
+
+ /// @notice Instantly pause the functionality of all freezable facets & their selectors
+ function freezeDiamond() external onlyGovernor {
+ Diamond.DiamondStorage storage diamondStorage = Diamond.getDiamondStorage();
+
+ require(!diamondStorage.isFrozen, "a9"); // diamond proxy is frozen already
+ _resetProposal();
+ diamondStorage.isFrozen = true;
+
+ emit Freeze();
+ }
+
+ /// @notice Unpause the functionality of all freezable facets & their selectors
+ function unfreezeDiamond() external onlyGovernor {
+ Diamond.DiamondStorage storage diamondStorage = Diamond.getDiamondStorage();
+
+ require(diamondStorage.isFrozen, "a7"); // diamond proxy is not frozen
+ _resetProposal();
+ diamondStorage.isFrozen = false;
+
+ emit Unfreeze();
+ }
+
+ /*//////////////////////////////////////////////////////////////
+ GETTERS & HELPERS
+ //////////////////////////////////////////////////////////////*/
+
+ /// @notice Generate the upgrade proposal hash
+ /// @param _diamondCut The diamond cut parameters will be executed with the upgrade
+ /// @param _proposalId The current proposal ID, to set a unique upgrade hash depending on the upgrades order
+ /// @param _salt The arbitrary 32 bytes, primarily used in shadow upgrades to prevent guessing the upgrade proposal content by its hash
+ /// @return The upgrade proposal hash
+ function upgradeProposalHash(
+ Diamond.DiamondCutData calldata _diamondCut,
+ uint256 _proposalId,
+ bytes32 _salt
+ ) public pure returns (bytes32) {
+ return keccak256(abi.encode(_diamondCut, _proposalId, _salt));
+ }
+
+ /// @dev Set up the proposed upgrade state to the default values
+ function _resetProposal() internal {
+ delete s.upgrades.state;
+ delete s.upgrades.proposedUpgradeHash;
+ delete s.upgrades.proposedUpgradeTimestamp;
+ delete s.upgrades.approvedBySecurityCouncil;
+ }
+}
diff --git a/ethereum/contracts/zksync/facets/Executor.sol b/ethereum/contracts/zksync/facets/Executor.sol
new file mode 100644
index 000000000..6778c5280
--- /dev/null
+++ b/ethereum/contracts/zksync/facets/Executor.sol
@@ -0,0 +1,397 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./Base.sol";
+import "../Config.sol";
+import "../interfaces/IExecutor.sol";
+import "../libraries/PairingsBn254.sol";
+import "../libraries/PriorityQueue.sol";
+import "../../common/libraries/UncheckedMath.sol";
+import "../../common/libraries/UnsafeBytes.sol";
+import "../../common/L2ContractHelper.sol";
+
+/// @title zkSync Executor contract capable of processing events emitted in the zkSync protocol.
+/// @author Matter Labs
+contract ExecutorFacet is Base, IExecutor {
+ using UncheckedMath for uint256;
+ using PriorityQueue for PriorityQueue.Queue;
+
+ /// @dev Process one block commit using the previous block StoredBlockInfo
+ /// @dev returns new block StoredBlockInfo
+ /// @notice Does not change storage
+ function _commitOneBlock(StoredBlockInfo memory _previousBlock, CommitBlockInfo calldata _newBlock)
+ internal
+ view
+ returns (StoredBlockInfo memory)
+ {
+ require(_newBlock.blockNumber == _previousBlock.blockNumber + 1, "f"); // only commit next block
+
+ // Check that block contain all meta information for L2 logs.
+ // Get the chained hash of priority transaction hashes.
+ (
+ uint256 expectedNumberOfLayer1Txs,
+ bytes32 expectedPriorityOperationsHash,
+ bytes32 previousBlockHash,
+ uint256 l2BlockTimestamp
+ ) = _processL2Logs(_newBlock);
+
+ require(_previousBlock.blockHash == previousBlockHash, "l");
+ // Check that the priority operation hash in the L2 logs is as expected
+ require(expectedPriorityOperationsHash == _newBlock.priorityOperationsHash, "t");
+ // Check that the number of processed priority operations is as expected
+ require(expectedNumberOfLayer1Txs == _newBlock.numberOfLayer1Txs, "ta");
+ // Check that the timestamp that came from the Bootloader is expected
+ require(l2BlockTimestamp == _newBlock.timestamp, "tb");
+
+ // Preventing "stack too deep error"
+ {
+ // Check the timestamp of the new block
+ bool timestampNotTooSmall = block.timestamp - COMMIT_TIMESTAMP_NOT_OLDER <= l2BlockTimestamp;
+ bool timestampNotTooBig = l2BlockTimestamp <= block.timestamp + COMMIT_TIMESTAMP_APPROXIMATION_DELTA;
+ require(timestampNotTooSmall, "h"); // New block timestamp is too small
+ require(timestampNotTooBig, "h1"); // New block timestamp is too big
+
+ // Check the index of repeated storage writes
+ uint256 newStorageChangesIndexes = uint256(uint32(bytes4(_newBlock.initialStorageChanges[:4])));
+ require(
+ _previousBlock.indexRepeatedStorageChanges + newStorageChangesIndexes ==
+ _newBlock.indexRepeatedStorageChanges,
+ "yq"
+ );
+
+ // NOTE: We don't check that _newBlock.timestamp > _previousBlock.timestamp, it is checked inside the L2
+ }
+
+ // Create block commitment for the proof verification
+ bytes32 commitment = _createBlockCommitment(_newBlock);
+
+ return
+ StoredBlockInfo(
+ _newBlock.blockNumber,
+ _newBlock.newStateRoot,
+ _newBlock.indexRepeatedStorageChanges,
+ _newBlock.numberOfLayer1Txs,
+ _newBlock.priorityOperationsHash,
+ _newBlock.l2LogsTreeRoot,
+ _newBlock.timestamp,
+ commitment
+ );
+ }
+
+ /// @dev Check that L2 logs are proper and block contain all meta information for them
+ function _processL2Logs(CommitBlockInfo calldata _newBlock)
+ internal
+ pure
+ returns (
+ uint256 numberOfLayer1Txs,
+ bytes32 chainedPriorityTxsHash,
+ bytes32 previousBlockHash,
+ uint256 blockTimestamp
+ )
+ {
+ // Copy L2 to L1 logs into memory.
+ bytes memory emittedL2Logs = _newBlock.l2Logs[4:];
+ bytes[] calldata l2Messages = _newBlock.l2ArbitraryLengthMessages;
+ uint256 currentMessage;
+ // Auxiliary variable that is needed to enforce that `previousBlockHash` and `blockTimestamp` was read exactly one time
+ bool isSystemContextLogProcessed;
+ bytes[] calldata factoryDeps = _newBlock.factoryDeps;
+ uint256 currentBytecode;
+
+ chainedPriorityTxsHash = EMPTY_STRING_KECCAK;
+
+ // linear traversal of the logs
+ for (uint256 i = 0; i < emittedL2Logs.length; i = i.uncheckedAdd(L2_TO_L1_LOG_SERIALIZE_SIZE)) {
+ (address logSender, ) = UnsafeBytes.readAddress(emittedL2Logs, i + 4);
+
+ // show preimage for hashed message stored in log
+ if (logSender == L2_TO_L1_MESSENGER) {
+ (bytes32 hashedMessage, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + 56);
+ require(keccak256(l2Messages[currentMessage]) == hashedMessage, "k2");
+
+ currentMessage = currentMessage.uncheckedInc();
+ } else if (logSender == L2_BOOTLOADER_ADDRESS) {
+ (bytes32 canonicalTxHash, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + 24);
+ chainedPriorityTxsHash = keccak256(abi.encode(chainedPriorityTxsHash, canonicalTxHash));
+
+ // Overflow is not realistic
+ numberOfLayer1Txs = numberOfLayer1Txs.uncheckedInc();
+ } else if (logSender == L2_SYSTEM_CONTEXT_ADDRESS) {
+ // Make sure that the system context log wasn't processed yet, to
+ // avoid accident double reading `blockTimestamp` and `previousBlockHash`
+ require(!isSystemContextLogProcessed, "fx");
+ (blockTimestamp, ) = UnsafeBytes.readUint256(emittedL2Logs, i + 24);
+ (previousBlockHash, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + 56);
+ // Mark system context log as processed
+ isSystemContextLogProcessed = true;
+ } else if (logSender == L2_KNOWN_CODE_STORAGE_ADDRESS) {
+ (bytes32 bytecodeHash, ) = UnsafeBytes.readBytes32(emittedL2Logs, i + 24);
+ require(bytecodeHash == L2ContractHelper.hashL2Bytecode(factoryDeps[currentBytecode]), "k3");
+
+ currentBytecode = currentBytecode.uncheckedInc();
+ }
+ }
+ // To check that only relevant preimages have been included in the calldata
+ require(currentBytecode == factoryDeps.length, "ym");
+ require(currentMessage == l2Messages.length, "pl");
+ // `blockTimestamp` and `previousBlockHash` wasn't read from L2 logs
+ require(isSystemContextLogProcessed, "by");
+ }
+
+ /// @notice Commit block
+ /// @notice 1. Checks timestamp.
+ /// @notice 2. Process L2 logs.
+ /// @notice 3. Store block commitments.
+ function commitBlocks(StoredBlockInfo memory _lastCommittedBlockData, CommitBlockInfo[] calldata _newBlocksData)
+ external
+ override
+ nonReentrant
+ onlyValidator
+ {
+ // Check that we commit blocks after last committed block
+ require(s.storedBlockHashes[s.totalBlocksCommitted] == _hashStoredBlockInfo(_lastCommittedBlockData), "i"); // incorrect previous block data
+
+ uint256 blocksLength = _newBlocksData.length;
+ for (uint256 i = 0; i < blocksLength; i = i.uncheckedInc()) {
+ _lastCommittedBlockData = _commitOneBlock(_lastCommittedBlockData, _newBlocksData[i]);
+ s.storedBlockHashes[_lastCommittedBlockData.blockNumber] = _hashStoredBlockInfo(_lastCommittedBlockData);
+
+ emit BlockCommit(
+ _lastCommittedBlockData.blockNumber,
+ _lastCommittedBlockData.blockHash,
+ _lastCommittedBlockData.commitment
+ );
+ }
+
+ s.totalBlocksCommitted = s.totalBlocksCommitted + blocksLength;
+ }
+
+ /// @dev Pops the priority operations from the priority queue and returns a rolling hash of operations
+ function _collectOperationsFromPriorityQueue(uint256 _nPriorityOps) internal returns (bytes32 concatHash) {
+ concatHash = EMPTY_STRING_KECCAK;
+
+ for (uint256 i = 0; i < _nPriorityOps; i = i.uncheckedInc()) {
+ PriorityOperation memory priorityOp = s.priorityQueue.popFront();
+ concatHash = keccak256(abi.encode(concatHash, priorityOp.canonicalTxHash));
+ }
+ }
+
+ /// @dev Executes one block
+ /// @dev 1. Processes all pending operations (Complete priority requests)
+ /// @dev 2. Finalizes block on Ethereum
+ /// @dev _executedBlockIdx is an index in the array of the blocks that we want to execute together
+ function _executeOneBlock(StoredBlockInfo memory _storedBlock, uint256 _executedBlockIdx) internal {
+ uint256 currentBlockNumber = _storedBlock.blockNumber;
+ require(currentBlockNumber == s.totalBlocksExecuted + _executedBlockIdx + 1, "k"); // Execute blocks in order
+ require(
+ _hashStoredBlockInfo(_storedBlock) == s.storedBlockHashes[currentBlockNumber],
+ "exe10" // executing block should be committed
+ );
+
+ bytes32 priorityOperationsHash = _collectOperationsFromPriorityQueue(_storedBlock.numberOfLayer1Txs);
+ require(priorityOperationsHash == _storedBlock.priorityOperationsHash, "x"); // priority operations hash does not match to expected
+
+ // Save root hash of L2 -> L1 logs tree
+ s.l2LogsRootHashes[currentBlockNumber] = _storedBlock.l2LogsTreeRoot;
+ }
+
+ /// @notice Execute blocks, complete priority operations and process withdrawals.
+ /// @notice 1. Processes all pending operations (Complete priority requests)
+ /// @notice 2. Finalizes block on Ethereum
+ function executeBlocks(StoredBlockInfo[] calldata _blocksData) external nonReentrant onlyValidator {
+ uint256 nBlocks = _blocksData.length;
+ for (uint256 i = 0; i < nBlocks; i = i.uncheckedInc()) {
+ _executeOneBlock(_blocksData[i], i);
+ emit BlockExecution(_blocksData[i].blockNumber, _blocksData[i].blockHash, _blocksData[i].commitment);
+ }
+
+ s.totalBlocksExecuted = s.totalBlocksExecuted + nBlocks;
+ require(s.totalBlocksExecuted <= s.totalBlocksVerified, "n"); // Can't execute blocks more than committed and proven currently.
+ }
+
+ /// @notice Blocks commitment verification.
+ /// @notice Only verifies block commitments without any other processing
+ function proveBlocks(
+ StoredBlockInfo calldata _prevBlock,
+ StoredBlockInfo[] calldata _committedBlocks,
+ ProofInput calldata _proof
+ ) external nonReentrant onlyValidator {
+ // Save the variables into the stack to save gas on reading them later
+ uint256 currentTotalBlocksVerified = s.totalBlocksVerified;
+ uint256 committedBlocksLength = _committedBlocks.length;
+
+ // Save the variable from the storage to memory to save gas
+ VerifierParams memory verifierParams = s.verifierParams;
+
+ // Initialize the array, that will be used as public input to the ZKP
+ uint256[] memory proofPublicInput = new uint256[](committedBlocksLength);
+
+ // Check that the block passed by the validator is indeed the first unverified block
+ require(_hashStoredBlockInfo(_prevBlock) == s.storedBlockHashes[currentTotalBlocksVerified], "t1");
+
+ bytes32 prevBlockCommitment = _prevBlock.commitment;
+ for (uint256 i = 0; i < committedBlocksLength; i = i.uncheckedInc()) {
+ currentTotalBlocksVerified = currentTotalBlocksVerified.uncheckedInc();
+ require(_hashStoredBlockInfo(_committedBlocks[i]) == s.storedBlockHashes[currentTotalBlocksVerified], "o1");
+
+ bytes32 currentBlockCommitment = _committedBlocks[i].commitment;
+ proofPublicInput[i] = _getBlockProofPublicInput(
+ prevBlockCommitment,
+ currentBlockCommitment,
+ _proof,
+ verifierParams
+ );
+
+ prevBlockCommitment = currentBlockCommitment;
+ }
+ require(currentTotalBlocksVerified <= s.totalBlocksCommitted, "q");
+
+ // #if DUMMY_VERIFIER
+
+ // Additional level of protection for the mainnet
+ assert(block.chainid != 1);
+ // We allow skipping the zkp verification for the test(net) environment
+ // If the proof is not empty, verify it, otherwise, skip the verification
+ if (_proof.serializedProof.length > 0) {
+ // TODO: We keep the code duplication here to NOT to invalidate the audit, refactor it before the next audit. (SMA-1631)
+ bool successVerifyProof = s.verifier.verify_serialized_proof(proofPublicInput, _proof.serializedProof);
+ require(successVerifyProof, "p"); // Proof verification fail
+
+ // Verify the recursive part that was given to us through the public input
+ bool successProofAggregation = _verifyRecursivePartOfProof(_proof.recursiveAggregationInput);
+ require(successProofAggregation, "hh"); // Proof aggregation must be valid
+ }
+ // #else
+ bool successVerifyProof = s.verifier.verify_serialized_proof(proofPublicInput, _proof.serializedProof);
+ require(successVerifyProof, "p"); // Proof verification fail
+
+ // Verify the recursive part that was given to us through the public input
+ bool successProofAggregation = _verifyRecursivePartOfProof(_proof.recursiveAggregationInput);
+ require(successProofAggregation, "hh"); // Proof aggregation must be valid
+ // #endif
+
+ emit BlocksVerification(s.totalBlocksVerified, currentTotalBlocksVerified);
+ s.totalBlocksVerified = currentTotalBlocksVerified;
+ }
+
+ /// @dev Gets zk proof public input
+ function _getBlockProofPublicInput(
+ bytes32 _prevBlockCommitment,
+ bytes32 _currentBlockCommitment,
+ ProofInput calldata _proof,
+ VerifierParams memory _verifierParams
+ ) internal pure returns (uint256) {
+ return
+ uint256(
+ keccak256(
+ abi.encodePacked(
+ _prevBlockCommitment,
+ _currentBlockCommitment,
+ _verifierParams.recursionNodeLevelVkHash,
+ _verifierParams.recursionLeafLevelVkHash,
+ _verifierParams.recursionCircuitsSetVksHash,
+ _proof.recursiveAggregationInput
+ )
+ )
+ ) & INPUT_MASK;
+ }
+
+ /// @dev Verify a part of the zkp, that is responsible for the aggregation
+ function _verifyRecursivePartOfProof(uint256[] calldata _recursiveAggregationInput) internal view returns (bool) {
+ require(_recursiveAggregationInput.length == 4, "vr");
+
+ PairingsBn254.G1Point memory pairWithGen = PairingsBn254.new_g1_checked(
+ _recursiveAggregationInput[0],
+ _recursiveAggregationInput[1]
+ );
+ PairingsBn254.G1Point memory pairWithX = PairingsBn254.new_g1_checked(
+ _recursiveAggregationInput[2],
+ _recursiveAggregationInput[3]
+ );
+
+ PairingsBn254.G2Point memory g2Gen = PairingsBn254.new_g2(
+ [
+ 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2,
+ 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed
+ ],
+ [
+ 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b,
+ 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa
+ ]
+ );
+ PairingsBn254.G2Point memory g2X = PairingsBn254.new_g2(
+ [
+ 0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1,
+ 0x0118c4d5b837bcc2bc89b5b398b5974e9f5944073b32078b7e231fec938883b0
+ ],
+ [
+ 0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4,
+ 0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55
+ ]
+ );
+
+ return PairingsBn254.pairingProd2(pairWithGen, g2Gen, pairWithX, g2X);
+ }
+
+ /// @notice Reverts unexecuted blocks
+ /// @param _newLastBlock block number after which blocks should be reverted
+ /// NOTE: Doesn't delete the stored data about blocks, but only decreases
+ /// counters that are responsible for the number of blocks
+ function revertBlocks(uint256 _newLastBlock) external nonReentrant onlyValidator {
+ require(s.totalBlocksCommitted > _newLastBlock, "v1"); // The last committed block is less than new last block
+ uint256 newTotalBlocksCommitted = _maxU256(_newLastBlock, s.totalBlocksExecuted);
+
+ if (newTotalBlocksCommitted < s.totalBlocksVerified) {
+ s.totalBlocksVerified = newTotalBlocksCommitted;
+ }
+ s.totalBlocksCommitted = newTotalBlocksCommitted;
+
+ emit BlocksRevert(s.totalBlocksCommitted, s.totalBlocksVerified, s.totalBlocksExecuted);
+ }
+
+ /// @notice Returns larger of two values
+ function _maxU256(uint256 a, uint256 b) internal pure returns (uint256) {
+ return a < b ? b : a;
+ }
+
+ /// @dev Creates block commitment from its data
+ function _createBlockCommitment(CommitBlockInfo calldata _newBlockData) internal view returns (bytes32) {
+ bytes32 passThroughDataHash = keccak256(_blockPassThroughData(_newBlockData));
+ bytes32 metadataHash = keccak256(_blockMetaParameters());
+ bytes32 auxiliaryOutputHash = keccak256(_blockAuxiliaryOutput(_newBlockData));
+
+ return keccak256(abi.encode(passThroughDataHash, metadataHash, auxiliaryOutputHash));
+ }
+
+ function _blockPassThroughData(CommitBlockInfo calldata _block) internal pure returns (bytes memory) {
+ return
+ abi.encodePacked(
+ _block.indexRepeatedStorageChanges,
+ _block.newStateRoot,
+ uint64(0), // index repeated storage changes in zkPorter
+ bytes32(0) // zkPorter block hash
+ );
+ }
+
+ function _blockMetaParameters() internal view returns (bytes memory) {
+ return abi.encodePacked(s.zkPorterIsAvailable, s.l2BootloaderBytecodeHash, s.l2DefaultAccountBytecodeHash);
+ }
+
+ function _blockAuxiliaryOutput(CommitBlockInfo calldata _block) internal pure returns (bytes memory) {
+ require(_block.initialStorageChanges.length <= MAX_INITIAL_STORAGE_CHANGES_COMMITMENT_BYTES, "pf");
+ require(_block.repeatedStorageChanges.length <= MAX_REPEATED_STORAGE_CHANGES_COMMITMENT_BYTES, "py");
+ require(_block.l2Logs.length <= MAX_L2_TO_L1_LOGS_COMMITMENT_BYTES, "pu");
+
+ bytes32 initialStorageChangesHash = keccak256(_block.initialStorageChanges);
+ bytes32 repeatedStorageChangesHash = keccak256(_block.repeatedStorageChanges);
+ bytes32 l2ToL1LogsHash = keccak256(_block.l2Logs);
+
+ return abi.encode(_block.l2LogsTreeRoot, l2ToL1LogsHash, initialStorageChangesHash, repeatedStorageChangesHash);
+ }
+
+ /// @notice Returns the keccak hash of the ABI-encoded StoredBlockInfo
+ function _hashStoredBlockInfo(StoredBlockInfo memory _storedBlockInfo) internal pure returns (bytes32) {
+ return keccak256(abi.encode(_storedBlockInfo));
+ }
+}
diff --git a/ethereum/contracts/zksync/facets/Getters.sol b/ethereum/contracts/zksync/facets/Getters.sol
new file mode 100644
index 000000000..b8b1ca940
--- /dev/null
+++ b/ethereum/contracts/zksync/facets/Getters.sol
@@ -0,0 +1,209 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./Base.sol";
+import "../libraries/Diamond.sol";
+import "../libraries/PriorityQueue.sol";
+import "../../common/libraries/UncheckedMath.sol";
+import "../interfaces/IGetters.sol";
+
+/// @title Getters Contract implements functions for getting contract state from outside the blockchain.
+/// @author Matter Labs
+contract GettersFacet is Base, IGetters {
+ using UncheckedMath for uint256;
+ using PriorityQueue for PriorityQueue.Queue;
+
+ /*//////////////////////////////////////////////////////////////
+ CUSTOM GETTERS
+ //////////////////////////////////////////////////////////////*/
+
+ /// @return The address of the verifier smart contract
+ function getVerifier() external view returns (address) {
+ return address(s.verifier);
+ }
+
+ /// @return The address of the current governor
+ function getGovernor() external view returns (address) {
+ return s.governor;
+ }
+
+ /// @return The address of the pending governor
+ function getPendingGovernor() external view returns (address) {
+ return s.pendingGovernor;
+ }
+
+ /// @return The total number of blocks that were committed
+ function getTotalBlocksCommitted() external view returns (uint256) {
+ return s.totalBlocksCommitted;
+ }
+
+ /// @return The total number of blocks that were committed & verified
+ function getTotalBlocksVerified() external view returns (uint256) {
+ return s.totalBlocksVerified;
+ }
+
+ /// @return The total number of blocks that were committed & verified & executed
+ function getTotalBlocksExecuted() external view returns (uint256) {
+ return s.totalBlocksExecuted;
+ }
+
+ /// @return The total number of priority operations that were added to the priority queue, including all processed ones
+ function getTotalPriorityTxs() external view returns (uint256) {
+ return s.priorityQueue.getTotalPriorityTxs();
+ }
+
+ /// @notice Returns zero if and only if no operations were processed from the queue
+ /// @notice Reverts if there are no unprocessed priority transactions
+ /// @return Index of the oldest priority operation that wasn't processed yet
+ function getFirstUnprocessedPriorityTx() external view returns (uint256) {
+ return s.priorityQueue.getFirstUnprocessedPriorityTx();
+ }
+
+ /// @return The number of priority operations currently in the queue
+ function getPriorityQueueSize() external view returns (uint256) {
+ return s.priorityQueue.getSize();
+ }
+
+ /// @return The first unprocessed priority operation from the queue
+ function priorityQueueFrontOperation() external view returns (PriorityOperation memory) {
+ return s.priorityQueue.front();
+ }
+
+ /// @return Whether the address has a validator access
+ function isValidator(address _address) external view returns (bool) {
+ return s.validators[_address];
+ }
+
+ /// @return Merkle root of the tree with L2 logs for the selected block
+ function l2LogsRootHash(uint256 _blockNumber) external view returns (bytes32) {
+ return s.l2LogsRootHashes[_blockNumber];
+ }
+
+ /// @notice For unfinalized (non executed) blocks may change
+ /// @dev returns zero for non-committed blocks
+ /// @return The hash of committed L2 block.
+ function storedBlockHash(uint256 _blockNumber) external view returns (bytes32) {
+ return s.storedBlockHashes[_blockNumber];
+ }
+
+ /// @return Bytecode hash of bootloader program.
+ function getL2BootloaderBytecodeHash() external view returns (bytes32) {
+ return s.l2BootloaderBytecodeHash;
+ }
+
+ /// @return Bytecode hash of default account (bytecode for EOA).
+ function getL2DefaultAccountBytecodeHash() external view returns (bytes32) {
+ return s.l2DefaultAccountBytecodeHash;
+ }
+
+ /// @return Verifier parameters.
+ function getVerifierParams() external view returns (VerifierParams memory) {
+ return s.verifierParams;
+ }
+
+ /// @return The address of the security council multisig
+ function getSecurityCouncil() external view returns (address) {
+ return s.upgrades.securityCouncil;
+ }
+
+ /// @return Current upgrade proposal state
+ function getUpgradeProposalState() external view returns (UpgradeState) {
+ return s.upgrades.state;
+ }
+
+ /// @return The upgrade proposal hash if there is an active one and zero otherwise
+ function getProposedUpgradeHash() external view returns (bytes32) {
+ return s.upgrades.proposedUpgradeHash;
+ }
+
+ /// @return The timestamp when the upgrade was proposed, zero if there are no active proposals
+ function getProposedUpgradeTimestamp() external view returns (uint256) {
+ return s.upgrades.proposedUpgradeTimestamp;
+ }
+
+ /// @return The serial number of a proposed upgrade, increments when proposing a new one
+ function getCurrentProposalId() external view returns (uint256) {
+ return s.upgrades.currentProposalId;
+ }
+
+ /// @return The number of received upgrade approvals from the security council
+ function isApprovedBySecurityCouncil() external view returns (bool) {
+ return s.upgrades.approvedBySecurityCouncil;
+ }
+
+ /// @return Whether the diamond is frozen or not
+ function isDiamondStorageFrozen() external view returns (bool) {
+ Diamond.DiamondStorage storage ds = Diamond.getDiamondStorage();
+ return ds.isFrozen;
+ }
+
+ /// @return isFreezable Whether the facet can be frozen by the governor or always accessible
+ function isFacetFreezable(address _facet) external view returns (bool isFreezable) {
+ Diamond.DiamondStorage storage ds = Diamond.getDiamondStorage();
+
+ // There is no direct way to get whether the facet address is freezable,
+ // so we get it from one of the selectors that are associated with the facet.
+ uint256 selectorsArrayLen = ds.facetToSelectors[_facet].selectors.length;
+ if (selectorsArrayLen != 0) {
+ bytes4 selector0 = ds.facetToSelectors[_facet].selectors[0];
+ isFreezable = ds.selectorToFacet[selector0].isFreezable;
+ }
+ }
+
+ /// @return The maximum number of L2 gas that a user can request for L1 -> L2 transactions
+ function getpriorityTxMaxGasLimit() external view returns (uint256) {
+ return s.priorityTxMaxGasLimit;
+ }
+
+ /// @return Whether the selector can be frozen by the governor or always accessible
+ function isFunctionFreezable(bytes4 _selector) external view returns (bool) {
+ Diamond.DiamondStorage storage ds = Diamond.getDiamondStorage();
+ require(ds.selectorToFacet[_selector].facetAddress != address(0), "g2");
+ return ds.selectorToFacet[_selector].isFreezable;
+ }
+
+ /// @return Whether a withdrawal has been finalized.
+ /// @param _l2BlockNumber The L2 block number within which the withdrawal happened.
+ /// @param _l2MessageIndex The index of the L2->L1 message denoting the withdrawal.
+ function isEthWithdrawalFinalized(uint256 _l2BlockNumber, uint256 _l2MessageIndex) external view returns (bool) {
+ return s.isEthWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex];
+ }
+
+ /*//////////////////////////////////////////////////////////////
+ DIAMOND LOUPE
+ //////////////////////////////////////////////////////////////*/
+
+ /// @return result All facet addresses and their function selectors
+ function facets() external view returns (Facet[] memory result) {
+ Diamond.DiamondStorage storage ds = Diamond.getDiamondStorage();
+
+ uint256 facetsLen = ds.facets.length;
+ result = new Facet[](facetsLen);
+
+ for (uint256 i = 0; i < facetsLen; i = i.uncheckedInc()) {
+ address facetAddr = ds.facets[i];
+ Diamond.FacetToSelectors memory facetToSelectors = ds.facetToSelectors[facetAddr];
+
+ result[i] = Facet(facetAddr, facetToSelectors.selectors);
+ }
+ }
+
+ /// @return NON-sorted array with function selectors supported by a specific facet
+ function facetFunctionSelectors(address _facet) external view returns (bytes4[] memory) {
+ Diamond.DiamondStorage storage ds = Diamond.getDiamondStorage();
+ return ds.facetToSelectors[_facet].selectors;
+ }
+
+ /// @return NON-sorted array of facet addresses supported on diamond
+ function facetAddresses() external view returns (address[] memory) {
+ Diamond.DiamondStorage storage ds = Diamond.getDiamondStorage();
+ return ds.facets;
+ }
+
+ /// @return Facet address associated with a selector. Zero if the selector is not added to the diamond
+ function facetAddress(bytes4 _selector) external view returns (address) {
+ Diamond.DiamondStorage storage ds = Diamond.getDiamondStorage();
+ return ds.selectorToFacet[_selector].facetAddress;
+ }
+}
diff --git a/ethereum/contracts/zksync/facets/Governance.sol b/ethereum/contracts/zksync/facets/Governance.sol
new file mode 100644
index 000000000..a954fa828
--- /dev/null
+++ b/ethereum/contracts/zksync/facets/Governance.sol
@@ -0,0 +1,120 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../interfaces/IGovernance.sol";
+import "../../common/L2ContractHelper.sol";
+import "./Base.sol";
+
+/// @title Governance Contract controls access rights for contract management.
+/// @author Matter Labs
+contract GovernanceFacet is Base, IGovernance {
+ /// @notice Starts the transfer of governor rights. Only the current governor can propose a new pending one.
+ /// @notice New governor can accept governor rights by calling `acceptGovernor` function.
+ /// @param _newPendingGovernor Address of the new governor
+ function setPendingGovernor(address _newPendingGovernor) external onlyGovernor {
+ // Save previous value into the stack to put it into the event later
+ address oldPendingGovernor = s.pendingGovernor;
+
+ if (oldPendingGovernor != _newPendingGovernor) {
+ // Change pending governor
+ s.pendingGovernor = _newPendingGovernor;
+
+ emit NewPendingGovernor(oldPendingGovernor, _newPendingGovernor);
+ }
+ }
+
+ /// @notice Accepts transfer of admin rights. Only pending governor can accept the role.
+ function acceptGovernor() external {
+ address pendingGovernor = s.pendingGovernor;
+ require(msg.sender == pendingGovernor, "n4"); // Only proposed by current governor address can claim the governor rights
+
+ if (pendingGovernor != s.governor) {
+ address previousGovernor = s.governor;
+ s.governor = pendingGovernor;
+ delete s.pendingGovernor;
+
+ emit NewPendingGovernor(pendingGovernor, address(0));
+ emit NewGovernor(previousGovernor, pendingGovernor);
+ }
+ }
+
+ /// @notice Change validator status (active or not active)
+ /// @param _validator Validator address
+ /// @param _active Active flag
+ function setValidator(address _validator, bool _active) external onlyGovernor {
+ if (s.validators[_validator] != _active) {
+ s.validators[_validator] = _active;
+ emit ValidatorStatusUpdate(_validator, _active);
+ }
+ }
+
+ /// @notice Change bootloader bytecode hash, that is used on L2
+ /// @param _l2BootloaderBytecodeHash The hash of bootloader L2 bytecode
+ function setL2BootloaderBytecodeHash(bytes32 _l2BootloaderBytecodeHash) external onlyGovernor {
+ L2ContractHelper.validateBytecodeHash(_l2BootloaderBytecodeHash);
+
+ // Save previous value into the stack to put it into the event later
+ bytes32 previousBootloaderBytecodeHash = s.l2BootloaderBytecodeHash;
+
+ if (previousBootloaderBytecodeHash != _l2BootloaderBytecodeHash) {
+ // Change the bootloader bytecode hash
+ s.l2BootloaderBytecodeHash = _l2BootloaderBytecodeHash;
+ emit NewL2BootloaderBytecodeHash(previousBootloaderBytecodeHash, _l2BootloaderBytecodeHash);
+ }
+ }
+
+ /// @notice Change default account bytecode hash, that is used on L2
+ /// @param _l2DefaultAccountBytecodeHash The hash of default account L2 bytecode
+ function setL2DefaultAccountBytecodeHash(bytes32 _l2DefaultAccountBytecodeHash) external onlyGovernor {
+ L2ContractHelper.validateBytecodeHash(_l2DefaultAccountBytecodeHash);
+
+ // Save previous value into the stack to put it into the event later
+ bytes32 previousDefaultAccountBytecodeHash = s.l2DefaultAccountBytecodeHash;
+
+ if (previousDefaultAccountBytecodeHash != _l2DefaultAccountBytecodeHash) {
+ // Change the default account bytecode hash
+ s.l2DefaultAccountBytecodeHash = _l2DefaultAccountBytecodeHash;
+ emit NewL2DefaultAccountBytecodeHash(previousDefaultAccountBytecodeHash, _l2DefaultAccountBytecodeHash);
+ }
+ }
+
+ /// @notice Change zk porter availability
+ /// @param _zkPorterIsAvailable The availability of zk porter shard
+ function setPorterAvailability(bool _zkPorterIsAvailable) external onlyGovernor {
+ if (s.zkPorterIsAvailable != _zkPorterIsAvailable) {
+ // Change the porter availability
+ s.zkPorterIsAvailable = _zkPorterIsAvailable;
+ emit IsPorterAvailableStatusUpdate(_zkPorterIsAvailable);
+ }
+ }
+
+ /// @notice Change the address of the verifier smart contract
+ /// @param _newVerifier Verifier smart contract address
+ function setVerifier(Verifier _newVerifier) external onlyGovernor {
+ Verifier oldVerifier = s.verifier;
+ if (oldVerifier != _newVerifier) {
+ s.verifier = _newVerifier;
+ emit NewVerifier(address(oldVerifier), address(_newVerifier));
+ }
+ }
+
+ /// @notice Change the verifier parameters
+ /// @param _newVerifierParams New parameters for the verifier
+ function setVerifierParams(VerifierParams calldata _newVerifierParams) external onlyGovernor {
+ VerifierParams memory oldVerifierParams = s.verifierParams;
+
+ s.verifierParams = _newVerifierParams;
+ emit NewVerifierParams(oldVerifierParams, _newVerifierParams);
+ }
+
+ /// @notice Change the max L2 gas limit for L1 -> L2 transactions
+ /// @param _newPriorityTxMaxGasLimit The maximum number of L2 gas that a user can request for L1 -> L2 transactions
+ function setPriorityTxMaxGasLimit(uint256 _newPriorityTxMaxGasLimit) external onlyGovernor {
+ uint256 oldPriorityTxMaxGasLimit = s.priorityTxMaxGasLimit;
+ if (oldPriorityTxMaxGasLimit != _newPriorityTxMaxGasLimit) {
+ s.priorityTxMaxGasLimit = _newPriorityTxMaxGasLimit;
+ emit NewPriorityTxMaxGasLimit(oldPriorityTxMaxGasLimit, _newPriorityTxMaxGasLimit);
+ }
+ }
+}
diff --git a/ethereum/contracts/zksync/facets/Mailbox.sol b/ethereum/contracts/zksync/facets/Mailbox.sol
new file mode 100644
index 000000000..c59155535
--- /dev/null
+++ b/ethereum/contracts/zksync/facets/Mailbox.sol
@@ -0,0 +1,537 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "@openzeppelin/contracts/utils/math/Math.sol";
+
+import "../interfaces/IMailbox.sol";
+import "../libraries/Merkle.sol";
+import "../libraries/PriorityQueue.sol";
+import "../Storage.sol";
+import "../Config.sol";
+import "../../common/libraries/UncheckedMath.sol";
+import "../../common/libraries/UnsafeBytes.sol";
+import "../../common/L2ContractHelper.sol";
+import "../../vendor/AddressAliasHelper.sol";
+import "./Base.sol";
+
+/// @title zkSync Mailbox contract providing interfaces for L1 <-> L2 interaction.
+/// @author Matter Labs
+contract MailboxFacet is Base, IMailbox {
+ using UncheckedMath for uint256;
+ using PriorityQueue for PriorityQueue.Queue;
+
+ /// @notice Prove that a specific arbitrary-length message was sent in a specific L2 block number
+ /// @param _blockNumber The executed L2 block number in which the message appeared
+ /// @param _index The position in the L2 logs Merkle tree of the l2Log that was sent with the message
+ /// @param _message Information about the sent message: sender address, the message itself, tx index in the L2 block where the message was sent
+ /// @param _proof Merkle proof for inclusion of L2 log that was sent with the message
+ /// @return Whether the proof is valid
+ function proveL2MessageInclusion(
+ uint256 _blockNumber,
+ uint256 _index,
+ L2Message memory _message,
+ bytes32[] calldata _proof
+ ) public view returns (bool) {
+ return _proveL2LogInclusion(_blockNumber, _index, _L2MessageToLog(_message), _proof);
+ }
+
+ /// @notice Prove that a specific L2 log was sent in a specific L2 block
+ /// @param _blockNumber The executed L2 block number in which the log appeared
+ /// @param _index The position of the l2log in the L2 logs Merkle tree
+ /// @param _log Information about the sent log
+ /// @param _proof Merkle proof for inclusion of the L2 log
+ /// @return Whether the proof is correct and L2 log is included in block
+ function proveL2LogInclusion(
+ uint256 _blockNumber,
+ uint256 _index,
+ L2Log memory _log,
+ bytes32[] calldata _proof
+ ) external view returns (bool) {
+ return _proveL2LogInclusion(_blockNumber, _index, _log, _proof);
+ }
+
+ /// @notice Prove that the L1 -> L2 transaction was processed with the specified status.
+ /// @param _l2TxHash The L2 canonical transaction hash
+ /// @param _l2BlockNumber The L2 block number where the transaction was processed
+ /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message
+ /// @param _l2TxNumberInBlock The L2 transaction number in a block, in which the log was sent
+ /// @param _merkleProof The Merkle proof of the processing L1 -> L2 transaction
+ /// @param _status The execution status of the L1 -> L2 transaction (true - success & 0 - fail)
+ /// @return Whether the proof is correct and the transaction was actually executed with provided status
+ /// NOTE: It may return `false` for incorrect proof, but it doesn't mean that the L1 -> L2 transaction has an opposite status!
+ function proveL1ToL2TransactionStatus(
+ bytes32 _l2TxHash,
+ uint256 _l2BlockNumber,
+ uint256 _l2MessageIndex,
+ uint16 _l2TxNumberInBlock,
+ bytes32[] calldata _merkleProof,
+ TxStatus _status
+ ) public view override returns (bool) {
+ // Bootloader sends an L2 -> L1 log only after processing the L1 -> L2 transaction.
+ // Thus, we can verify that the L1 -> L2 transaction was included in the L2 block with specified status.
+ //
+ // The semantics of such L2 -> L1 log is always:
+ // - sender = BOOTLOADER_ADDRESS
+ // - key = hash(L1ToL2Transaction)
+ // - value = status of the processing transaction (1 - success & 0 - fail)
+ // - isService = true (just a conventional value)
+ // - l2ShardId = 0 (means that L1 -> L2 transaction was processed in a rollup shard, other shards are not available yet anyway)
+ // - txNumberInBlock = number of transaction in the block
+ L2Log memory l2Log = L2Log({
+ l2ShardId: 0,
+ isService: true,
+ txNumberInBlock: _l2TxNumberInBlock,
+ sender: BOOTLOADER_ADDRESS,
+ key: _l2TxHash,
+ value: bytes32(uint256(_status))
+ });
+ return _proveL2LogInclusion(_l2BlockNumber, _l2MessageIndex, l2Log, _merkleProof);
+ }
+
+ /// @notice Transfer ether from the contract to the receiver
+ /// @dev Reverts only if the transfer call failed
+ function _withdrawFunds(address _to, uint256 _amount) internal {
+ bool callSuccess;
+ // Low-level assembly call, to avoid any memory copying (save gas)
+ assembly {
+ callSuccess := call(gas(), _to, _amount, 0, 0, 0, 0)
+ }
+ require(callSuccess, "pz");
+ }
+
+ /// @dev Prove that a specific L2 log was sent in a specific L2 block number
+ function _proveL2LogInclusion(
+ uint256 _blockNumber,
+ uint256 _index,
+ L2Log memory _log,
+ bytes32[] calldata _proof
+ ) internal view returns (bool) {
+ require(_blockNumber <= s.totalBlocksExecuted, "xx");
+
+ bytes32 hashedLog = keccak256(
+ abi.encodePacked(_log.l2ShardId, _log.isService, _log.txNumberInBlock, _log.sender, _log.key, _log.value)
+ );
+ // Check that hashed log is not the default one,
+ // otherwise it means that the value is out of range of sent L2 -> L1 logs
+ require(hashedLog != L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH, "tw");
+ // Check that the proof length is exactly the same as tree height, to prevent
+ // any shorter/longer paths attack on the Merkle path validation
+ require(_proof.length == L2_TO_L1_LOG_MERKLE_TREE_HEIGHT, "rz");
+
+ bytes32 calculatedRootHash = Merkle.calculateRoot(_proof, _index, hashedLog);
+ bytes32 actualRootHash = s.l2LogsRootHashes[_blockNumber];
+
+ return actualRootHash == calculatedRootHash;
+ }
+
+ /// @dev Convert arbitrary-length message to the raw l2 log
+ function _L2MessageToLog(L2Message memory _message) internal pure returns (L2Log memory) {
+ return
+ L2Log({
+ l2ShardId: 0,
+ isService: true,
+ txNumberInBlock: _message.txNumberInBlock,
+ sender: L2_TO_L1_MESSENGER,
+ key: bytes32(uint256(uint160(_message.sender))),
+ value: keccak256(_message.data)
+ });
+ }
+
+ /// @notice Estimates the cost in Ether of requesting execution of an L2 transaction from L1
+ /// @return The estimated L2 gas for the transaction to be paid
+ function l2TransactionBaseCost(
+ uint256, // _gasPrice
+ uint256, // _l2GasLimit
+ uint256 // _l2GasPerPubdataByteLimit
+ ) public pure returns (uint256) {
+ // TODO: for now, all the L1->L2 transaction are free.
+ // Below the return is the correct code for estimation of the base cost for
+ // the transaction.
+ return 0;
+
+ // uint256 l2GasPrice = _deriveL2GasPrice(
+ // _gasPrice,
+ // _l2GasPerPubdataByteLimit
+ // );
+ // return l2GasPrice * _l2GasLimit;
+ }
+
+ /// @notice Derives the price for L2 gas in ETH to be paid.
+ /// @param _l1GasPrice The gas price on L1.
+ /// @param _gasPricePerPubdata The price for each pubdata byte in L2 gas
+ function _deriveL2GasPrice(uint256 _l1GasPrice, uint256 _gasPricePerPubdata) internal pure returns (uint256) {
+ uint256 pubdataPriceETH = L1_GAS_PER_PUBDATA_BYTE * _l1GasPrice;
+ uint256 minL2GasPriceETH = (pubdataPriceETH + _gasPricePerPubdata - 1) / _gasPricePerPubdata;
+
+ return Math.max(FAIR_L2_GAS_PRICE, minL2GasPriceETH);
+ }
+
+ /// @notice Finalize the withdrawal and release funds
+ /// @param _l2BlockNumber The L2 block number where the withdrawal was processed
+ /// @param _l2MessageIndex The position in the L2 logs Merkle tree of the l2Log that was sent with the message
+ /// @param _l2TxNumberInBlock The L2 transaction number in a block, in which the log was sent
+ /// @param _message The L2 withdraw data, stored in an L2 -> L1 message
+ /// @param _merkleProof The Merkle proof of the inclusion L2 -> L1 message about withdrawal initialization
+ function finalizeEthWithdrawal(
+ uint256 _l2BlockNumber,
+ uint256 _l2MessageIndex,
+ uint16 _l2TxNumberInBlock,
+ bytes calldata _message,
+ bytes32[] calldata _merkleProof
+ ) external override nonReentrant {
+ require(!s.isEthWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex], "jj");
+
+ L2Message memory l2ToL1Message = L2Message({
+ txNumberInBlock: _l2TxNumberInBlock,
+ sender: L2_ETH_TOKEN_ADDRESS,
+ data: _message
+ });
+
+ (address _l1WithdrawReceiver, uint256 _amount) = _parseL2WithdrawalMessage(_message);
+
+ _verifyWithdrawalLimit(_amount);
+
+ bool proofValid = proveL2MessageInclusion(_l2BlockNumber, _l2MessageIndex, l2ToL1Message, _merkleProof);
+ require(proofValid, "pi"); // Failed to verify that withdrawal was actually initialized on L2
+
+ s.isEthWithdrawalFinalized[_l2BlockNumber][_l2MessageIndex] = true;
+ _withdrawFunds(_l1WithdrawReceiver, _amount);
+
+ emit EthWithdrawalFinalized(_l1WithdrawReceiver, _amount);
+ }
+
+ function _verifyWithdrawalLimit(uint256 _amount) internal {
+ IAllowList.Withdrawal memory limitData = IAllowList(s.allowList).getTokenWithdrawalLimitData(address(0)); // address(0) denotes the ETH
+ if (!limitData.withdrawalLimitation) return; // no withdrwawal limitation is placed for ETH
+ if (block.timestamp > s.lastWithdrawalLimitReset + 1 days) {
+ // The _amount should be <= %10 of balance
+ require(_amount <= (limitData.withdrawalFactor * address(this).balance) / 100, "w3");
+ s.withdrawnAmountInWindow = _amount; // reseting the withdrawn amount
+ s.lastWithdrawalLimitReset = block.timestamp;
+ } else {
+ // The _amount + withdrawn amount should be <= %10 of balance
+ require(
+ _amount + s.withdrawnAmountInWindow <= (limitData.withdrawalFactor * address(this).balance) / 100,
+ "w4"
+ );
+ s.withdrawnAmountInWindow += _amount; // accumulate the withdrawn amount for ETH
+ }
+ }
+
+ /// @notice Request execution of L2 transaction from L1.
+ /// @param _contractL2 The L2 receiver address
+ /// @param _l2Value `msg.value` of L2 transaction
+ /// @param _calldata The input of the L2 transaction
+ /// @param _l2GasLimit Maximum amount of L2 gas that transaction can consume during execution on L2
+ /// @param _l2GasPerPubdataByteLimit The maximum amount L2 gas that the operator may charge the user for.
+ /// @param _factoryDeps An array of L2 bytecodes that will be marked as known on L2
+ /// @param _refundRecipient The address on L2 that will receive the refund for the transaction. If the transaction fails,
+ /// it will also be the address to receive `_l2Value`.
+ /// @return canonicalTxHash The hash of the requested L2 transaction. This hash can be used to follow the transaction status
+ function requestL2Transaction(
+ address _contractL2,
+ uint256 _l2Value,
+ bytes calldata _calldata,
+ uint256 _l2GasLimit,
+ uint256 _l2GasPerPubdataByteLimit,
+ bytes[] calldata _factoryDeps,
+ address _refundRecipient
+ ) external payable nonReentrant senderCanCallFunction(s.allowList) returns (bytes32 canonicalTxHash) {
+ // Change the sender address if it is a smart contract to prevent address collision between L1 and L2.
+ // Please note, currently zkSync address derivation is different from Ethereum one, but it may be changed in the future.
+ address sender = msg.sender;
+ if (sender != tx.origin) {
+ sender = AddressAliasHelper.applyL1ToL2Alias(msg.sender);
+ }
+
+ // The L1 -> L2 transaction may be failed and funds will be sent to the `_refundRecipient`,
+ // so we use `msg.value` instead of `_l2Value` as the bridged amount.
+ _verifyDepositLimit(msg.sender, msg.value);
+ canonicalTxHash = _requestL2Transaction(
+ sender,
+ _contractL2,
+ _l2Value,
+ _calldata,
+ _l2GasLimit,
+ _l2GasPerPubdataByteLimit,
+ _factoryDeps,
+ false,
+ _refundRecipient
+ );
+ }
+
+ function _verifyDepositLimit(address _depositor, uint256 _amount) internal {
+ IAllowList.Deposit memory limitData = IAllowList(s.allowList).getTokenDepositLimitData(address(0)); // address(0) denotes the ETH
+ if (!limitData.depositLimitation) return; // no deposit limitation is placed for ETH
+
+ require(s.totalDepositedAmountPerUser[_depositor] + _amount <= limitData.depositCap, "d2");
+ s.totalDepositedAmountPerUser[_depositor] += _amount;
+ }
+
+ function _requestL2Transaction(
+ address _sender,
+ address _contractAddressL2,
+ uint256 _l2Value,
+ bytes calldata _calldata,
+ uint256 _l2GasLimit,
+ uint256 _l2GasPerPubdataByteLimit,
+ bytes[] calldata _factoryDeps,
+ bool _isFree,
+ address _refundRecipient
+ ) internal returns (bytes32 canonicalTxHash) {
+ require(_factoryDeps.length <= MAX_NEW_FACTORY_DEPS, "uj");
+ uint64 expirationTimestamp = uint64(block.timestamp + PRIORITY_EXPIRATION); // Safe to cast
+ uint256 txId = s.priorityQueue.getTotalPriorityTxs();
+
+ // Checking that the user provided enough ether to pay for the transaction.
+ // Using a new scope to prevent "stack too deep" error
+ {
+ uint256 baseCost = _isFree ? 0 : l2TransactionBaseCost(tx.gasprice, _l2GasLimit, _l2GasPerPubdataByteLimit);
+ require(msg.value >= baseCost + _l2Value);
+ }
+
+ // Here we manually assign fields for the struct to prevent "stack too deep" error
+ WritePriorityOpParams memory params;
+ params.sender = _sender;
+ params.txId = txId;
+ params.l2Value = _l2Value;
+ params.contractAddressL2 = _contractAddressL2;
+ params.expirationTimestamp = expirationTimestamp;
+ params.l2GasLimit = _l2GasLimit;
+ params.l2GasPricePerPubdata = _l2GasPerPubdataByteLimit;
+ params.valueToMint = msg.value;
+ params.refundRecipient = _refundRecipient == address(0) ? _sender : _refundRecipient;
+
+ canonicalTxHash = _writePriorityOp(params, _calldata, _factoryDeps);
+ }
+
+ function _serializeL2Transaction(
+ WritePriorityOpParams memory _priorityOpParams,
+ bytes calldata _calldata,
+ bytes[] calldata _factoryDeps
+ ) internal pure returns (L2CanonicalTransaction memory transaction) {
+ // Saving these two parameters in the local variables prevents
+ // "stack too deep error"
+ uint256 toMint = _priorityOpParams.valueToMint;
+ address refundRecipient = _priorityOpParams.refundRecipient;
+ transaction = serializeL2Transaction(
+ _priorityOpParams.txId,
+ _priorityOpParams.l2Value,
+ _priorityOpParams.sender,
+ _priorityOpParams.contractAddressL2,
+ _calldata,
+ _priorityOpParams.l2GasLimit,
+ _priorityOpParams.l2GasPricePerPubdata,
+ _factoryDeps,
+ toMint,
+ refundRecipient
+ );
+ }
+
+ /// @notice Stores a transaction record in storage & send event about that
+ function _writePriorityOp(
+ WritePriorityOpParams memory _priorityOpParams,
+ bytes calldata _calldata,
+ bytes[] calldata _factoryDeps
+ ) internal returns (bytes32 canonicalTxHash) {
+ L2CanonicalTransaction memory transaction = _serializeL2Transaction(_priorityOpParams, _calldata, _factoryDeps);
+
+ bytes memory transactionEncoding = abi.encode(transaction);
+
+ uint256 l2GasForTxBody = _getTransactionBodyGasLimit(
+ _priorityOpParams.l2GasLimit,
+ _priorityOpParams.l2GasPricePerPubdata,
+ transactionEncoding.length
+ );
+
+ // Ensuring that the transaction is provable
+ require(l2GasForTxBody <= s.priorityTxMaxGasLimit, "ui");
+ // Ensuring that the transaction can not output more pubdata than is processable
+ require(l2GasForTxBody / _priorityOpParams.l2GasPricePerPubdata <= PRIORITY_TX_MAX_PUBDATA, "uk");
+
+ // Ensuring that the transaction covers the minimal costs for its processing:
+ // hashing its content, publishing the factory dependencies, etc.
+ require(
+ _getMinimalPriorityTransactionGasLimit(
+ transactionEncoding.length,
+ _factoryDeps.length,
+ _priorityOpParams.l2GasPricePerPubdata
+ ) <= _priorityOpParams.l2GasLimit,
+ "um"
+ );
+
+ canonicalTxHash = keccak256(transactionEncoding);
+
+ s.priorityQueue.pushBack(
+ PriorityOperation({
+ canonicalTxHash: canonicalTxHash,
+ expirationTimestamp: _priorityOpParams.expirationTimestamp,
+ layer2Tip: uint192(0) // TODO: Restore after fee modeling will be stable. (SMA-1230)
+ })
+ );
+
+ // Data that is needed for the operator to simulate priority queue offchain
+ emit NewPriorityRequest(
+ _priorityOpParams.txId,
+ canonicalTxHash,
+ _priorityOpParams.expirationTimestamp,
+ transaction,
+ _factoryDeps
+ );
+ }
+
+ function _getMinimalPriorityTransactionGasLimit(
+ uint256 _encodingLength,
+ uint256 _numberOfFactoryDependencies,
+ uint256 _l2GasPricePerPubdata
+ ) internal pure returns (uint256) {
+ uint256 costForComputation;
+ {
+ // Adding the intrinsic cost for the transaction, i.e. auxilary prices which can not be easily accounted for
+ costForComputation = L1_TX_INTRINSIC_L2_GAS;
+
+ // Taking into account the hashing costs that depend on the length of the transaction
+ // Note that, L1_TX_DELTA_544_ENCODING_BYTES is the delta in price for each 544 bytes of
+ // the transaction's encoding. It is taken as LCM between 136 and 32 (the length for each keccak round
+ // and the size of each new encoding word).
+ costForComputation += Math.ceilDiv(_encodingLength * L1_TX_DELTA_544_ENCODING_BYTES, 544);
+
+ // Taking into the account the additional costs of providing new factory dependenies
+ costForComputation += _numberOfFactoryDependencies * L1_TX_DELTA_FACTORY_DEPS_L2_GAS;
+
+ // There is a minimal amount of computational L2 gas that the transaction should cover
+ costForComputation = Math.max(costForComputation, L1_TX_MIN_L2_GAS_BASE);
+ }
+
+ uint256 costForPubdata = 0;
+ {
+ // Adding the intrinsic cost for the transaction, i.e. auxilary prices which can not be easily accounted for
+ costForPubdata = L1_TX_INTRINSIC_PUBDATA * _l2GasPricePerPubdata;
+
+ // Taking into the account the additional costs of providing new factory dependenies
+ costForPubdata += _numberOfFactoryDependencies * L1_TX_DELTA_FACTORY_DEPS_PUBDATA * _l2GasPricePerPubdata;
+ }
+
+ return costForComputation + costForPubdata;
+ }
+
+ /// @dev Accepts the parameters of the l2 transaction and converts it to the canonical form.
+ /// @param _txId Priority operation ID, used as a unique identifier so that transactions always have a different hash
+ /// @param _l2Value `msg.value` of L2 transaction. Please note, this ether is not transferred with requesting priority op,
+ /// but will be taken from the balance in L2 during the execution
+ /// @param _sender The L2 address of the account that initiates the transaction
+ /// @param _contractAddressL2 The L2 receiver address
+ /// @param _calldata The input of the L2 transaction
+ /// @param _l2GasLimit Maximum amount of L2 gas that transaction can consume during execution on L2
+ /// @param _l2GasPerPubdataByteLimit The maximum price in L2 gas per pubdata byte that the user can be charged by the operator in this transaction
+ /// @param _factoryDeps An array of L2 bytecodes that will be marked as known on L2
+ /// @param _toMint The amount of ether to be minted with this transaction
+ /// @param _refundRecipient The address on L2 that will receive the refund for the transaction. If the transaction fails,
+ /// it will also be the address to receive `_l2Value`.
+ /// @return The canonical form of the l2 transaction parameters
+ function serializeL2Transaction(
+ uint256 _txId,
+ uint256 _l2Value,
+ address _sender,
+ address _contractAddressL2,
+ bytes calldata _calldata,
+ uint256 _l2GasLimit,
+ uint256 _l2GasPerPubdataByteLimit,
+ bytes[] calldata _factoryDeps,
+ uint256 _toMint,
+ address _refundRecipient
+ ) public pure returns (L2CanonicalTransaction memory) {
+ return
+ L2CanonicalTransaction({
+ txType: PRIORITY_OPERATION_L2_TX_TYPE,
+ from: uint256(uint160(_sender)),
+ to: uint256(uint160(_contractAddressL2)),
+ gasLimit: _l2GasLimit,
+ gasPerPubdataByteLimit: _l2GasPerPubdataByteLimit,
+ maxFeePerGas: uint256(0),
+ maxPriorityFeePerGas: uint256(0),
+ paymaster: uint256(0),
+ // Note, that the priority operation id is used as "nonce" for L1->L2 transactions
+ nonce: uint256(_txId),
+ value: _l2Value,
+ reserved: [_toMint, uint256(uint160(_refundRecipient)), 0, 0],
+ data: _calldata,
+ signature: new bytes(0),
+ factoryDeps: _hashFactoryDeps(_factoryDeps),
+ paymasterInput: new bytes(0),
+ reservedDynamic: new bytes(0)
+ });
+ }
+
+ /// @notice Hashes the L2 bytecodes and returns them in the format in which they are processed by the bootloader
+ function _hashFactoryDeps(bytes[] calldata _factoryDeps)
+ internal
+ pure
+ returns (uint256[] memory hashedFactoryDeps)
+ {
+ uint256 factoryDepsLen = _factoryDeps.length;
+ hashedFactoryDeps = new uint256[](factoryDepsLen);
+ for (uint256 i = 0; i < factoryDepsLen; i = i.uncheckedInc()) {
+ bytes32 hashedBytecode = L2ContractHelper.hashL2Bytecode(_factoryDeps[i]);
+
+ // Store the resulting hash sequentially in bytes.
+ assembly {
+ mstore(add(hashedFactoryDeps, mul(add(i, 1), 32)), hashedBytecode)
+ }
+ }
+ }
+
+ /// @notice Based on the total L2 gas limit and several other parameters of the transaction
+ /// returns the part of the L2 gas that will be spent on the block's overhead.
+ /// @dev The details of how this function works can be checked in the documentation
+ /// of the fee model of zkSync. The appropriate comments are also present
+ /// in the Rust implementation description of function `get_maximal_allowed_overhead`.
+ /// @param _totalGasLimit The L2 gas limit that includes both the overhead for processing the block
+ /// and the L2 gas needed to process the transaction itself (i.e. the actual gasLimit that will be used for the transaction).
+ function _getOverheadForTransaction(
+ uint256 _totalGasLimit,
+ uint256, // _gasPricePerPubdata
+ uint256 // _encodingLength
+ ) internal pure returns (uint256 blockOverheadForTransaction) {
+ // TODO: (SMA-1715) make users pay for overhead
+ return 0;
+ }
+
+ /// @notice Based on the full L2 gas limit (that includes the block overhead) and other
+ /// properties of the transaction, returns the l2GasLimit for the body of the transaction (the actual execution).
+ /// @param _totalGasLimit The L2 gas limit that includes both the overhead for processing the block
+ /// and the L2 gas needed to process the transaction itself (i.e. the actual l2GasLimit that will be used for the transaction).
+ /// @param _gasPricePerPubdata The L2 gas price for each byte of pubdata.
+ /// @param _encodingLength The length of the ABI-encoding of the transaction.
+ function _getTransactionBodyGasLimit(
+ uint256 _totalGasLimit,
+ uint256 _gasPricePerPubdata,
+ uint256 _encodingLength
+ ) internal pure returns (uint256 txBodyGasLimit) {
+ uint256 overhead = _getOverheadForTransaction(_totalGasLimit, _gasPricePerPubdata, _encodingLength);
+
+ unchecked {
+ // The implementation of the `getOverheadForTransaction` function
+ // enforces the fact that _totalGasLimit >= overhead.
+ txBodyGasLimit = _totalGasLimit - overhead;
+ }
+ }
+
+ /// @dev Decode the withdraw message that came from L2
+ function _parseL2WithdrawalMessage(bytes memory _message)
+ internal
+ pure
+ returns (address l1Receiver, uint256 amount)
+ {
+ // Check that the message length is correct.
+ // It should be equal to the length of the function signature + address + uint256 = 4 + 20 + 32 = 56 (bytes).
+ require(_message.length == 56);
+
+ (uint32 functionSignature, uint256 offset) = UnsafeBytes.readUint32(_message, 0);
+ require(bytes4(functionSignature) == this.finalizeEthWithdrawal.selector);
+
+ (l1Receiver, offset) = UnsafeBytes.readAddress(_message, offset);
+ (amount, offset) = UnsafeBytes.readUint256(_message, offset);
+ }
+}
diff --git a/ethereum/contracts/zksync/interfaces/IDiamondCut.sol b/ethereum/contracts/zksync/interfaces/IDiamondCut.sol
new file mode 100644
index 000000000..7a614bc36
--- /dev/null
+++ b/ethereum/contracts/zksync/interfaces/IDiamondCut.sol
@@ -0,0 +1,45 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../libraries/Diamond.sol";
+
+interface IDiamondCut {
+ function proposeTransparentUpgrade(Diamond.DiamondCutData calldata _diamondCut, uint40 _proposalId) external;
+
+ function proposeShadowUpgrade(bytes32 _proposalHash, uint40 _proposalId) external;
+
+ function cancelUpgradeProposal(bytes32 _proposedUpgradeHash) external;
+
+ function securityCouncilUpgradeApprove(bytes32 _upgradeProposalHash) external;
+
+ function executeUpgrade(Diamond.DiamondCutData calldata _diamondCut, bytes32 _proposalSalt) external;
+
+ function freezeDiamond() external;
+
+ function unfreezeDiamond() external;
+
+ function upgradeProposalHash(
+ Diamond.DiamondCutData calldata _diamondCut,
+ uint256 _proposalId,
+ bytes32 _salt
+ ) external pure returns (bytes32);
+
+ event ProposeTransparentUpgrade(
+ Diamond.DiamondCutData diamondCut,
+ uint256 indexed proposalId,
+ bytes32 proposalSalt
+ );
+
+ event ProposeShadowUpgrade(uint256 indexed proposalId, bytes32 indexed proposalHash);
+
+ event CancelUpgradeProposal(uint256 indexed proposalId, bytes32 indexed proposalHash);
+
+ event SecurityCouncilUpgradeApprove(uint256 indexed proposalId, bytes32 indexed proposalHash);
+
+ event ExecuteUpgrade(uint256 indexed proposalId, bytes32 indexed proposalHash, bytes32 proposalSalt);
+
+ event Freeze();
+
+ event Unfreeze();
+}
diff --git a/ethereum/contracts/zksync/interfaces/IExecutor.sol b/ethereum/contracts/zksync/interfaces/IExecutor.sol
new file mode 100644
index 000000000..f3dba2edd
--- /dev/null
+++ b/ethereum/contracts/zksync/interfaces/IExecutor.sol
@@ -0,0 +1,84 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+interface IExecutor {
+ /// @notice Rollup block stored data
+ /// @param blockNumber Rollup block number
+ /// @param blockHash Hash of L2 block
+ /// @param indexRepeatedStorageChanges The serial number of the shortcut index that's used as a unique identifier for storage keys that were used twice or more
+ /// @param numberOfLayer1Txs Number of priority operations to be processed
+ /// @param priorityOperationsHash Hash of all priority operations from this block
+ /// @param l2LogsTreeRoot Root hash of tree that contains L2 -> L1 messages from this block
+ /// @param timestamp Rollup block timestamp, have the same format as Ethereum block constant
+ /// @param commitment Verified input for the zkSync circuit
+ struct StoredBlockInfo {
+ uint64 blockNumber;
+ bytes32 blockHash;
+ uint64 indexRepeatedStorageChanges;
+ uint256 numberOfLayer1Txs;
+ bytes32 priorityOperationsHash;
+ bytes32 l2LogsTreeRoot;
+ uint256 timestamp;
+ bytes32 commitment;
+ }
+
+ /// @notice Data needed to commit new block
+ /// @param blockNumber Number of the committed block
+ /// @param timestamp Unix timestamp denoting the start of the block execution
+ /// @param indexRepeatedStorageChanges The serial number of the shortcut index that's used as a unique identifier for storage keys that were used twice or more
+ /// @param newStateRoot The state root of the full state tree
+ /// @param numberOfLayer1Txs Number of priority operations to be processed
+ /// @param l2LogsTreeRoot The root hash of the tree that contains all L2 -> L1 logs in the block
+ /// @param priorityOperationsHash Hash of all priority operations from this block
+ /// @param initialStorageChanges Storage write access as a concatenation key-value
+ /// @param repeatedStorageChanges Storage write access as a concatenation index-value
+ /// @param l2Logs concatenation of all L2 -> L1 logs in the block
+ /// @param l2ArbitraryLengthMessages array of hash preimages that were sent as value of L2 logs by special system L2 contract
+ /// @param factoryDeps array of l2 bytecodes that were marked as known on L2
+ struct CommitBlockInfo {
+ uint64 blockNumber;
+ uint64 timestamp;
+ uint64 indexRepeatedStorageChanges;
+ bytes32 newStateRoot;
+ uint256 numberOfLayer1Txs;
+ bytes32 l2LogsTreeRoot;
+ bytes32 priorityOperationsHash;
+ bytes initialStorageChanges;
+ bytes repeatedStorageChanges;
+ bytes l2Logs;
+ bytes[] l2ArbitraryLengthMessages;
+ bytes[] factoryDeps;
+ }
+
+ /// @notice Recursive proof input data (individual commitments are constructed onchain)
+ struct ProofInput {
+ uint256[] recursiveAggregationInput;
+ uint256[] serializedProof;
+ }
+
+ function commitBlocks(StoredBlockInfo calldata _lastCommittedBlockData, CommitBlockInfo[] calldata _newBlocksData)
+ external;
+
+ function proveBlocks(
+ StoredBlockInfo calldata _prevBlock,
+ StoredBlockInfo[] calldata _committedBlocks,
+ ProofInput calldata _proof
+ ) external;
+
+ function executeBlocks(StoredBlockInfo[] calldata _blocksData) external;
+
+ function revertBlocks(uint256 _newLastBlock) external;
+
+ /// @notice Event emitted when a block is committed
+ event BlockCommit(uint256 indexed blockNumber, bytes32 indexed blockHash, bytes32 indexed commitment);
+
+ /// @notice Event emitted when blocks are verified
+ event BlocksVerification(uint256 indexed previousLastVerifiedBlock, uint256 indexed currentLastVerifiedBlock);
+
+ /// @notice Event emitted when a block is executed
+ event BlockExecution(uint256 indexed blockNumber, bytes32 indexed blockHash, bytes32 indexed commitment);
+
+ /// @notice Event emitted when blocks are reverted
+ event BlocksRevert(uint256 totalBlocksCommitted, uint256 totalBlocksVerified, uint256 totalBlocksExecuted);
+}
diff --git a/ethereum/contracts/zksync/interfaces/IGetters.sol b/ethereum/contracts/zksync/interfaces/IGetters.sol
new file mode 100644
index 000000000..0658f5944
--- /dev/null
+++ b/ethereum/contracts/zksync/interfaces/IGetters.sol
@@ -0,0 +1,87 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../Storage.sol";
+import "../libraries/PriorityQueue.sol";
+import {VerifierParams} from "../Storage.sol";
+
+interface IGetters {
+ /*//////////////////////////////////////////////////////////////
+ CUSTOM GETTERS
+ //////////////////////////////////////////////////////////////*/
+
+ function getVerifier() external view returns (address);
+
+ function getGovernor() external view returns (address);
+
+ function getPendingGovernor() external view returns (address);
+
+ function getTotalBlocksCommitted() external view returns (uint256);
+
+ function getTotalBlocksVerified() external view returns (uint256);
+
+ function getTotalBlocksExecuted() external view returns (uint256);
+
+ function getTotalPriorityTxs() external view returns (uint256);
+
+ function getFirstUnprocessedPriorityTx() external view returns (uint256);
+
+ function getPriorityQueueSize() external view returns (uint256);
+
+ function priorityQueueFrontOperation() external view returns (PriorityOperation memory);
+
+ function isValidator(address _address) external view returns (bool);
+
+ function l2LogsRootHash(uint256 _blockNumber) external view returns (bytes32 hash);
+
+ function storedBlockHash(uint256 _blockNumber) external view returns (bytes32);
+
+ function getL2BootloaderBytecodeHash() external view returns (bytes32);
+
+ function getL2DefaultAccountBytecodeHash() external view returns (bytes32);
+
+ function getVerifierParams() external view returns (VerifierParams memory);
+
+ function isDiamondStorageFrozen() external view returns (bool);
+
+ function getSecurityCouncil() external view returns (address);
+
+ function getUpgradeProposalState() external view returns (UpgradeState);
+
+ function getProposedUpgradeHash() external view returns (bytes32);
+
+ function getProposedUpgradeTimestamp() external view returns (uint256);
+
+ function getCurrentProposalId() external view returns (uint256);
+
+ function isApprovedBySecurityCouncil() external view returns (bool);
+
+ function getpriorityTxMaxGasLimit() external view returns (uint256);
+
+ function isEthWithdrawalFinalized(uint256 _l2BlockNumber, uint256 _l2MessageIndex) external view returns (bool);
+
+ /*//////////////////////////////////////////////////////////////
+ DIAMOND LOUPE
+ //////////////////////////////////////////////////////////////*/
+
+ /// @notice Faсet structure compatible with the EIP-2535 diamond loupe
+ /// @param addr The address of the facet contract
+ /// @param selectors The NON-sorted array with selectors associated with facet
+ struct Facet {
+ address addr;
+ bytes4[] selectors;
+ }
+
+ function facets() external view returns (Facet[] memory);
+
+ function facetFunctionSelectors(address _facet) external view returns (bytes4[] memory);
+
+ function facetAddresses() external view returns (address[] memory facets);
+
+ function facetAddress(bytes4 _selector) external view returns (address facet);
+
+ function isFunctionFreezable(bytes4 _selector) external view returns (bool);
+
+ function isFacetFreezable(address _facet) external view returns (bool isFreezable);
+}
diff --git a/ethereum/contracts/zksync/interfaces/IGovernance.sol b/ethereum/contracts/zksync/interfaces/IGovernance.sol
new file mode 100644
index 000000000..5b9cbe374
--- /dev/null
+++ b/ethereum/contracts/zksync/interfaces/IGovernance.sol
@@ -0,0 +1,54 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../Verifier.sol";
+import "../Storage.sol";
+
+interface IGovernance {
+ function setPendingGovernor(address _newPendingGovernor) external;
+
+ function acceptGovernor() external;
+
+ function setValidator(address _validator, bool _active) external;
+
+ function setL2BootloaderBytecodeHash(bytes32 _l2BootloaderBytecodeHash) external;
+
+ function setL2DefaultAccountBytecodeHash(bytes32 _l2DefaultAccountBytecodeHash) external;
+
+ function setPorterAvailability(bool _zkPorterIsAvailable) external;
+
+ function setVerifier(Verifier _newVerifier) external;
+
+ function setVerifierParams(VerifierParams calldata _newVerifierParams) external;
+
+ function setPriorityTxMaxGasLimit(uint256 _newPriorityTxMaxGasLimit) external;
+
+ /// @notice Сhanges to the bytecode that is used in L2 as a bootloader (start program)
+ event NewL2BootloaderBytecodeHash(bytes32 indexed previousBytecodeHash, bytes32 indexed newBytecodeHash);
+
+ /// @notice Сhanges to the bytecode that is used in L2 as a default account
+ event NewL2DefaultAccountBytecodeHash(bytes32 indexed previousBytecodeHash, bytes32 indexed newBytecodeHash);
+
+ /// @notice Porter availability status changes
+ event IsPorterAvailableStatusUpdate(bool isPorterAvailable);
+
+ /// @notice Validator's status changed
+ event ValidatorStatusUpdate(address indexed validatorAddress, bool isActive);
+
+ /// @notice pendingGovernor is changed
+ /// @dev Also emitted when new governor is accepted and in this case, `newPendingGovernor` would be zero address
+ event NewPendingGovernor(address indexed oldPendingGovernor, address indexed newPendingGovernor);
+
+ /// @notice Governor changed
+ event NewGovernor(address indexed oldGovernor, address indexed newGovernor);
+
+ /// @notice Verifier address changed
+ event NewVerifier(address indexed oldVerifier, address indexed newVerifier);
+
+ /// @notice Verifier address changed
+ event NewVerifierParams(VerifierParams oldVerifierParams, VerifierParams newVerifierParams);
+
+ /// @notice Priority transaction max L2 gas limit changed
+ event NewPriorityTxMaxGasLimit(uint256 oldPriorityTxMaxGasLimit, uint256 newPriorityTxMaxGasLimit);
+}
diff --git a/ethereum/contracts/zksync/interfaces/IMailbox.sol b/ethereum/contracts/zksync/interfaces/IMailbox.sol
new file mode 100644
index 000000000..1ec59078e
--- /dev/null
+++ b/ethereum/contracts/zksync/interfaces/IMailbox.sol
@@ -0,0 +1,165 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import {L2Log, L2Message} from "../Storage.sol";
+
+/// @dev The enum that represents the transaction execution status
+/// @param Failure The transaction execution failed
+/// @param Success The transaction execution succeeded
+enum TxStatus {
+ Failure,
+ Success
+}
+
+interface IMailbox {
+ /// @dev Structure that includes all fields of the L2 transaction
+ /// @dev The hash of this structure is the "canonical L2 transaction hash" and can be used as a unique identifier of a tx
+ /// @param txType The tx type number, depending on which the L2 transaction can be interpreted differently
+ /// @param from The sender's address. `uint256` type for possible address format changes and maintaining backward compatibility
+ /// @param to The recipient's address. `uint256` type for possible address format changes and maintaining backward compatibility
+ /// @param gasLimit The L2 gas limit for L2 transaction. Analog to the `gasLimit` on an L1 transactions
+ /// @param gasPerPubdataByteLimit Maximum number of L2 gas that will cost one byte of pubdata (every piece of data that will be stored on L1 as calldata)
+ /// @param maxFeePerGas The absolute maximum sender willing to pay per unit of L2 gas to get the transaction included in a block. Analog to the EIP-1559 `maxFeePerGas` on an L1 transactions
+ /// @param maxPriorityFeePerGas The additional fee that is paid directly to the validator to incentivize them to include the transaction in a block. Analog to the EIP-1559 `maxPriorityFeePerGas` on an L1 transactions
+ /// @param paymaster The address of the EIP-4337 paymaster, that will pay fees for the transaction. `uint256` type for possible address format changes and maintaining backward compatibility
+ /// @param nonce The nonce of the transaction. For L1->L2 transactions it is the priority operation Id.
+ /// @param value The value to pass with the transaction
+ /// @param reserved The fixed-length fields for usage in a future extension of transaction formats
+ /// @param data The calldata that is transmitted for the transaction call
+ /// @param signature An abstract set of bytes that are used for transaction authorization
+ /// @param factoryDeps The set of L2 bytecode hashes whose preimages were shown on L1
+ /// @param paymasterInput The arbitrary-length data that is used as a calldata to the paymaster pre-call
+ /// @param reservedDynamic The arbitrary-length field for usage in a future extension of transaction formats
+ struct L2CanonicalTransaction {
+ uint256 txType;
+ uint256 from;
+ uint256 to;
+ uint256 gasLimit;
+ uint256 gasPerPubdataByteLimit;
+ uint256 maxFeePerGas;
+ uint256 maxPriorityFeePerGas;
+ uint256 paymaster;
+ uint256 nonce;
+ uint256 value;
+ // In the future, we might want to add some
+ // new fields to the struct. The `txData` struct
+ // is to be passed to account and any changes to its structure
+ // would mean a breaking change to these accounts. To prevent this,
+ // we should keep some fields as "reserved".
+ // It is also recommended that their length is fixed, since
+ // it would allow easier proof integration (in case we will need
+ // some special circuit for preprocessing transactions).
+ uint256[4] reserved;
+ bytes data;
+ bytes signature;
+ uint256[] factoryDeps;
+ bytes paymasterInput;
+ // Reserved dynamic type for the future use-case. Using it should be avoided,
+ // But it is still here, just in case we want to enable some additional functionality.
+ bytes reservedDynamic;
+ }
+
+ /// @dev Internal structure that contains the parameters for the writePriorityOp
+ /// internal function.
+ /// @param sender The sender's address.
+ /// @param txId The id of the priority transaction.
+ /// @param l2Value The msg.value of the L2 transaction.
+ /// @param contractAddressL2 The address of the contract on L2 to call.
+ /// @param expirationTimestamp The timestamp by which the priority operation must be processed by the operator.
+ /// @param l2GasLimit The limit of the L2 gas for the L2 transaction
+ /// @param l2GasPricePerPubdata The price for a single pubdata byte in L2 gas.
+ /// @param valueToMint The amount of ether that should be minted on L2 as the result of this transaction.
+ /// @param refundRecipient The recipient of the refund for the transaction on L2. If the transaction fails, then
+ /// this address will receive the `l2Value`.
+ struct WritePriorityOpParams {
+ address sender;
+ uint256 txId;
+ uint256 l2Value;
+ address contractAddressL2;
+ uint64 expirationTimestamp;
+ uint256 l2GasLimit;
+ uint256 l2GasPricePerPubdata;
+ uint256 valueToMint;
+ address refundRecipient;
+ }
+
+ function proveL2MessageInclusion(
+ uint256 _blockNumber,
+ uint256 _index,
+ L2Message calldata _message,
+ bytes32[] calldata _proof
+ ) external view returns (bool);
+
+ function proveL2LogInclusion(
+ uint256 _blockNumber,
+ uint256 _index,
+ L2Log memory _log,
+ bytes32[] calldata _proof
+ ) external view returns (bool);
+
+ function proveL1ToL2TransactionStatus(
+ bytes32 _l2TxHash,
+ uint256 _l2BlockNumber,
+ uint256 _l2MessageIndex,
+ uint16 _l2TxNumberInBlock,
+ bytes32[] calldata _merkleProof,
+ TxStatus _status
+ ) external view returns (bool);
+
+ function serializeL2Transaction(
+ uint256 _txId,
+ uint256 _l2Value,
+ address _sender,
+ address _contractAddressL2,
+ bytes calldata _calldata,
+ uint256 _l2GasLimit,
+ uint256 _l2GasPerPubdataByteLimit,
+ bytes[] calldata _factoryDeps,
+ uint256 _toMint,
+ address _refundRecipient
+ ) external pure returns (L2CanonicalTransaction memory);
+
+ function finalizeEthWithdrawal(
+ uint256 _l2BlockNumber,
+ uint256 _l2MessageIndex,
+ uint16 _l2TxNumberInBlock,
+ bytes calldata _message,
+ bytes32[] calldata _merkleProof
+ ) external;
+
+ function requestL2Transaction(
+ address _contractL2,
+ uint256 _l2Value,
+ bytes calldata _calldata,
+ uint256 _l2GasLimit,
+ uint256 _l2GasPerPubdataByteLimit,
+ bytes[] calldata _factoryDeps,
+ address _refundRecipient
+ ) external payable returns (bytes32 canonicalTxHash);
+
+ function l2TransactionBaseCost(
+ uint256 _gasPrice,
+ uint256 _l2GasLimit,
+ uint256 _l2GasPerPubdataByteLimit
+ ) external view returns (uint256);
+
+ /// @notice New priority request event. Emitted when a request is placed into the priority queue
+ /// @param txId Serial number of the priority operation
+ /// @param txHash keccak256 hash of encoded transaction representation
+ /// @param expirationTimestamp Timestamp up to which priority request should be processed
+ /// @param transaction The whole transaction structure that is requested to be executed on L2
+ /// @param factoryDeps An array of bytecodes that were shown in the L1 public data. Will be marked as known bytecodes in L2
+ event NewPriorityRequest(
+ uint256 txId,
+ bytes32 txHash,
+ uint64 expirationTimestamp,
+ L2CanonicalTransaction transaction,
+ bytes[] factoryDeps
+ );
+
+ /// @notice Emitted when the withdrawal is finalized on L1 and funds are released.
+ /// @param to The address to which the funds were sent
+ /// @param amount The amount of funds that were sent
+ event EthWithdrawalFinalized(address indexed to, uint256 amount);
+}
diff --git a/ethereum/contracts/zksync/interfaces/IMockExecutor.sol b/ethereum/contracts/zksync/interfaces/IMockExecutor.sol
new file mode 100644
index 000000000..62037bc88
--- /dev/null
+++ b/ethereum/contracts/zksync/interfaces/IMockExecutor.sol
@@ -0,0 +1,13 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./IExecutor.sol";
+
+interface IMockExecutor {
+ function fakeProveBlocks(
+ IExecutor.StoredBlockInfo calldata _prevBlock,
+ IExecutor.StoredBlockInfo[] calldata _committedBlocks,
+ IExecutor.ProofInput calldata _proof
+ ) external;
+}
diff --git a/ethereum/contracts/zksync/interfaces/IZkSync.sol b/ethereum/contracts/zksync/interfaces/IZkSync.sol
new file mode 100644
index 000000000..d47a3eed7
--- /dev/null
+++ b/ethereum/contracts/zksync/interfaces/IZkSync.sol
@@ -0,0 +1,11 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./IMailbox.sol";
+import "./IGovernance.sol";
+import "./IExecutor.sol";
+import "./IDiamondCut.sol";
+import "./IGetters.sol";
+
+interface IZkSync is IMailbox, IGovernance, IExecutor, IDiamondCut, IGetters {}
diff --git a/ethereum/contracts/zksync/libraries/Diamond.sol b/ethereum/contracts/zksync/libraries/Diamond.sol
new file mode 100644
index 000000000..debd45461
--- /dev/null
+++ b/ethereum/contracts/zksync/libraries/Diamond.sol
@@ -0,0 +1,296 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "@openzeppelin/contracts/utils/math/SafeCast.sol";
+import "../../common/libraries/UncheckedMath.sol";
+
+/// @author Matter Labs
+/// @notice The helper library for managing the EIP-2535 diamond proxy.
+library Diamond {
+ using UncheckedMath for uint256;
+ using SafeCast for uint256;
+
+ /// @dev Magic value that should be returned by diamond cut initialize contracts.
+ /// @dev Used to distinguish calls to contracts that were supposed to be used as diamond initializer from other contracts.
+ bytes32 constant DIAMOND_INIT_SUCCESS_RETURN_VALUE =
+ 0x33774e659306e47509050e97cb651e731180a42d458212294d30751925c551a2; // keccak256("diamond.zksync.init") - 1
+
+ /// @dev Storage position of `DiamondStorage` structure.
+ bytes32 constant DIAMOND_STORAGE_POSITION = 0xc8fcad8db84d3cc18b4c41d551ea0ee66dd599cde068d998e57d5e09332c131b; // keccak256("diamond.standard.diamond.storage") - 1;
+
+ event DiamondCut(FacetCut[] facetCuts, address initAddress, bytes initCalldata);
+
+ /// @dev Utility struct that contains associated facet & meta information of selector
+ /// @param facetAddress address of the facet which is connected with selector
+ /// @param selectorPosition index in `FacetToSelectors.selectors` array, where is selector stored
+ /// @param isFreezable denotes whether the selector can be frozen.
+ struct SelectorToFacet {
+ address facetAddress;
+ uint16 selectorPosition;
+ bool isFreezable;
+ }
+
+ /// @dev Utility struct that contains associated selectors & meta information of facet
+ /// @param selectors list of all selectors that belong to the facet
+ /// @param facetPosition index in `DiamondStorage.facets` array, where is facet stored
+ struct FacetToSelectors {
+ bytes4[] selectors;
+ uint16 facetPosition;
+ }
+
+ /// @notice The structure that holds all diamond proxy associated parameters
+ /// @dev According to the EIP-2535 should be stored on a special storage key - `DIAMOND_STORAGE_POSITION`
+ /// @param selectorToFacet A mapping from the selector to the facet address and its meta information
+ /// @param facetToSelectors A mapping from facet address to its selector with meta information
+ /// @param facets The array of all unique facet addresses that belong to the diamond proxy
+ /// @param isFrozen Denotes whether the diamond proxy is frozen and all freezable facets are not accessible
+ struct DiamondStorage {
+ mapping(bytes4 => SelectorToFacet) selectorToFacet;
+ mapping(address => FacetToSelectors) facetToSelectors;
+ address[] facets;
+ bool isFrozen;
+ }
+
+ /// @dev Parameters for diamond changes that touch one of the facets
+ /// @param facet The address of facet that's affected by the cut
+ /// @param action The action that is made on the facet
+ /// @param isFreezable Denotes whether the facet & all their selectors can be frozen
+ /// @param selectors An array of unique selectors that belongs to the facet address
+ struct FacetCut {
+ address facet;
+ Action action;
+ bool isFreezable;
+ bytes4[] selectors;
+ }
+
+ /// @dev Structure of the diamond proxy changes
+ /// @param facetCuts The set of changes (adding/removing/replacement) of implementation contracts
+ /// @param initAddress The address that's delegate called after setting up new facet changes
+ /// @param initCalldata Calldata for the delegate call to `initAddress`
+ struct DiamondCutData {
+ FacetCut[] facetCuts;
+ address initAddress;
+ bytes initCalldata;
+ }
+
+ /// @dev Type of change over diamond: add/replace/remove facets
+ enum Action {
+ Add,
+ Replace,
+ Remove
+ }
+
+ /// @return diamondStorage The pointer to the storage where all specific diamond proxy parameters stored
+ function getDiamondStorage() internal pure returns (DiamondStorage storage diamondStorage) {
+ bytes32 position = DIAMOND_STORAGE_POSITION;
+ assembly {
+ diamondStorage.slot := position
+ }
+ }
+
+ /// @dev Add/replace/remove any number of selectors and optionally execute a function with delegatecall
+ /// @param _diamondCut Diamond's facet changes and the parameters to optional initialization delegatecall
+ function diamondCut(DiamondCutData memory _diamondCut) internal {
+ FacetCut[] memory facetCuts = _diamondCut.facetCuts;
+ address initAddress = _diamondCut.initAddress;
+ bytes memory initCalldata = _diamondCut.initCalldata;
+ uint256 facetCutsLength = facetCuts.length;
+ for (uint256 i = 0; i < facetCutsLength; i = i.uncheckedInc()) {
+ Action action = facetCuts[i].action;
+ address facet = facetCuts[i].facet;
+ bool isFacetFreezable = facetCuts[i].isFreezable;
+ bytes4[] memory selectors = facetCuts[i].selectors;
+
+ require(selectors.length > 0, "B"); // no functions for diamond cut
+
+ if (action == Action.Add) {
+ _addFunctions(facet, selectors, isFacetFreezable);
+ } else if (action == Action.Replace) {
+ _replaceFunctions(facet, selectors, isFacetFreezable);
+ } else if (action == Action.Remove) {
+ _removeFunctions(facet, selectors);
+ } else {
+ revert("C"); // undefined diamond cut action
+ }
+ }
+
+ _initializeDiamondCut(initAddress, initCalldata);
+ emit DiamondCut(facetCuts, initAddress, initCalldata);
+ }
+
+ /// @dev Add new functions to the diamond proxy
+ /// NOTE: expect but NOT enforce that `_selectors` is NON-EMPTY array
+ function _addFunctions(
+ address _facet,
+ bytes4[] memory _selectors,
+ bool _isFacetFreezable
+ ) private {
+ DiamondStorage storage ds = getDiamondStorage();
+
+ require(_facet != address(0), "G"); // facet with zero address cannot be added
+
+ // Add facet to the list of facets if the facet address is new one
+ _saveFacetIfNew(_facet);
+
+ uint256 selectorsLength = _selectors.length;
+ for (uint256 i = 0; i < selectorsLength; i = i.uncheckedInc()) {
+ bytes4 selector = _selectors[i];
+ SelectorToFacet memory oldFacet = ds.selectorToFacet[selector];
+ require(oldFacet.facetAddress == address(0), "J"); // facet for this selector already exists
+
+ _addOneFunction(_facet, selector, _isFacetFreezable);
+ }
+ }
+
+ /// @dev Change associated facets to already known function selectors
+ /// NOTE: expect but NOT enforce that `_selectors` is NON-EMPTY array
+ function _replaceFunctions(
+ address _facet,
+ bytes4[] memory _selectors,
+ bool _isFacetFreezable
+ ) private {
+ DiamondStorage storage ds = getDiamondStorage();
+
+ require(_facet != address(0), "K"); // cannot replace facet with zero address
+
+ uint256 selectorsLength = _selectors.length;
+ for (uint256 i = 0; i < selectorsLength; i = i.uncheckedInc()) {
+ bytes4 selector = _selectors[i];
+ SelectorToFacet memory oldFacet = ds.selectorToFacet[selector];
+ require(oldFacet.facetAddress != address(0), "L"); // it is impossible to replace the facet with zero address
+
+ _removeOneFunction(oldFacet.facetAddress, selector);
+ // Add facet to the list of facets if the facet address is a new one
+ _saveFacetIfNew(_facet);
+ _addOneFunction(_facet, selector, _isFacetFreezable);
+ }
+ }
+
+ /// @dev Remove association with function and facet
+ /// NOTE: expect but NOT enforce that `_selectors` is NON-EMPTY array
+ function _removeFunctions(address _facet, bytes4[] memory _selectors) private {
+ DiamondStorage storage ds = getDiamondStorage();
+
+ require(_facet == address(0), "a1"); // facet address must be zero
+
+ uint256 selectorsLength = _selectors.length;
+ for (uint256 i = 0; i < selectorsLength; i = i.uncheckedInc()) {
+ bytes4 selector = _selectors[i];
+ SelectorToFacet memory oldFacet = ds.selectorToFacet[selector];
+ require(oldFacet.facetAddress != address(0), "a2"); // Can't delete a non-existent facet
+
+ _removeOneFunction(oldFacet.facetAddress, selector);
+ }
+ }
+
+ /// @dev Add address to the list of known facets if it is not on the list yet
+ /// NOTE: should be called ONLY before adding a new selector associated with the address
+ function _saveFacetIfNew(address _facet) private {
+ DiamondStorage storage ds = getDiamondStorage();
+
+ uint256 selectorsLength = ds.facetToSelectors[_facet].selectors.length;
+ // If there are no selectors associated with facet then save facet as new one
+ if (selectorsLength == 0) {
+ ds.facetToSelectors[_facet].facetPosition = ds.facets.length.toUint16();
+ ds.facets.push(_facet);
+ }
+ }
+
+ /// @dev Add one function to the already known facet
+ /// NOTE: It is expected but NOT enforced that:
+ /// - `_facet` is NON-ZERO address
+ /// - `_facet` is already stored address in `DiamondStorage.facets`
+ /// - `_selector` is NOT associated by another facet
+ function _addOneFunction(
+ address _facet,
+ bytes4 _selector,
+ bool _isSelectorFreezable
+ ) private {
+ DiamondStorage storage ds = getDiamondStorage();
+
+ uint16 selectorPosition = (ds.facetToSelectors[_facet].selectors.length).toUint16();
+
+ // if selectorPosition is nonzero, it means it is not a new facet
+ // so the freezability of the first selector must be matched to _isSelectorFreezable
+ // so all the selectors in a facet will have the same freezability
+ if (selectorPosition != 0) {
+ bytes4 selector0 = ds.facetToSelectors[_facet].selectors[0];
+ require(_isSelectorFreezable == ds.selectorToFacet[selector0].isFreezable, "J1");
+ }
+
+ ds.selectorToFacet[_selector] = SelectorToFacet({
+ facetAddress: _facet,
+ selectorPosition: selectorPosition,
+ isFreezable: _isSelectorFreezable
+ });
+ ds.facetToSelectors[_facet].selectors.push(_selector);
+ }
+
+ /// @dev Remove one associated function with facet
+ /// NOTE: It is expected but NOT enforced that `_facet` is NON-ZERO address
+ function _removeOneFunction(address _facet, bytes4 _selector) private {
+ DiamondStorage storage ds = getDiamondStorage();
+
+ // Get index of `FacetToSelectors.selectors` of the selector and last element of array
+ uint256 selectorPosition = ds.selectorToFacet[_selector].selectorPosition;
+ uint256 lastSelectorPosition = ds.facetToSelectors[_facet].selectors.length - 1;
+
+ // If the selector is not at the end of the array then move the last element to the selector position
+ if (selectorPosition != lastSelectorPosition) {
+ bytes4 lastSelector = ds.facetToSelectors[_facet].selectors[lastSelectorPosition];
+
+ ds.facetToSelectors[_facet].selectors[selectorPosition] = lastSelector;
+ ds.selectorToFacet[lastSelector].selectorPosition = selectorPosition.toUint16();
+ }
+
+ // Remove last element from the selectors array
+ ds.facetToSelectors[_facet].selectors.pop();
+
+ // Finally, clean up the association with facet
+ delete ds.selectorToFacet[_selector];
+
+ // If there are no selectors for facet then remove the facet from the list of known facets
+ if (lastSelectorPosition == 0) {
+ _removeFacet(_facet);
+ }
+ }
+
+ /// @dev remove facet from the list of known facets
+ /// NOTE: It is expected but NOT enforced that there are no selectors associated with `_facet`
+ function _removeFacet(address _facet) private {
+ DiamondStorage storage ds = getDiamondStorage();
+
+ // Get index of `DiamondStorage.facets` of the facet and last element of array
+ uint256 facetPosition = ds.facetToSelectors[_facet].facetPosition;
+ uint256 lastFacetPosition = ds.facets.length - 1;
+
+ // If the facet is not at the end of the array then move the last element to the facet position
+ if (facetPosition != lastFacetPosition) {
+ address lastFacet = ds.facets[lastFacetPosition];
+
+ ds.facets[facetPosition] = lastFacet;
+ ds.facetToSelectors[lastFacet].facetPosition = facetPosition.toUint16();
+ }
+
+ // Remove last element from the facets array
+ ds.facets.pop();
+ }
+
+ /// @dev Delegates call to the initialization address with provided calldata
+ /// @dev Used as a final step of diamond cut to execute the logic of the initialization for changed facets
+ function _initializeDiamondCut(address _init, bytes memory _calldata) private {
+ if (_init == address(0)) {
+ require(_calldata.length == 0, "H"); // Non-empty calldata for zero address
+ } else {
+ // Do not check whether `_init` is a contract since later we check that it returns data.
+ (bool success, bytes memory data) = _init.delegatecall(_calldata);
+ require(success, "I"); // delegatecall failed
+
+ // Check that called contract returns magic value to make sure that contract logic
+ // supposed to be used as diamond cut initializer.
+ require(data.length == 32, "lp");
+ require(abi.decode(data, (bytes32)) == DIAMOND_INIT_SUCCESS_RETURN_VALUE, "lp1");
+ }
+ }
+}
diff --git a/ethereum/contracts/zksync/libraries/Merkle.sol b/ethereum/contracts/zksync/libraries/Merkle.sol
new file mode 100644
index 000000000..196dfd16d
--- /dev/null
+++ b/ethereum/contracts/zksync/libraries/Merkle.sol
@@ -0,0 +1,46 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../../common/libraries/UncheckedMath.sol";
+
+/// @author Matter Labs
+library Merkle {
+ using UncheckedMath for uint256;
+
+ /// @dev Calculate Merkle root by the provided Merkle proof.
+ /// NOTE: When using this function, check that the _path length is equal to the tree height to prevent shorter/longer paths attack
+ /// @param _path Merkle path from the leaf to the root
+ /// @param _index Leaf index in the tree
+ /// @param _itemHash Hash of leaf content
+ /// @return The Merkle root
+ function calculateRoot(
+ bytes32[] calldata _path,
+ uint256 _index,
+ bytes32 _itemHash
+ ) internal pure returns (bytes32) {
+ uint256 pathLength = _path.length;
+ require(pathLength > 0, "xc");
+ require(pathLength < 256, "bt");
+ require(_index < (1 << pathLength), "pz");
+
+ bytes32 currentHash = _itemHash;
+ for (uint256 i; i < pathLength; i = i.uncheckedInc()) {
+ currentHash = (_index % 2 == 0)
+ ? _efficientHash(currentHash, _path[i])
+ : _efficientHash(_path[i], currentHash);
+ _index /= 2;
+ }
+
+ return currentHash;
+ }
+
+ /// @dev Keccak hash of the concatenation of two 32-byte words
+ function _efficientHash(bytes32 _lhs, bytes32 _rhs) private pure returns (bytes32 result) {
+ assembly {
+ mstore(0x00, _lhs)
+ mstore(0x20, _rhs)
+ result := keccak256(0x00, 0x40)
+ }
+ }
+}
diff --git a/ethereum/contracts/zksync/libraries/PairingsBn254.sol b/ethereum/contracts/zksync/libraries/PairingsBn254.sol
new file mode 100644
index 000000000..a2fb8bdc5
--- /dev/null
+++ b/ethereum/contracts/zksync/libraries/PairingsBn254.sol
@@ -0,0 +1,276 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+library PairingsBn254 {
+ uint256 constant q_mod = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
+ uint256 constant r_mod = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
+ uint256 constant bn254_b_coeff = 3;
+
+ struct G1Point {
+ uint256 X;
+ uint256 Y;
+ }
+
+ struct Fr {
+ uint256 value;
+ }
+
+ function new_fr(uint256 fr) internal pure returns (Fr memory) {
+ require(fr < r_mod);
+ return Fr({value: fr});
+ }
+
+ function copy(Fr memory self) internal pure returns (Fr memory n) {
+ n.value = self.value;
+ }
+
+ function assign(Fr memory self, Fr memory other) internal pure {
+ self.value = other.value;
+ }
+
+ function inverse(Fr memory fr) internal view returns (Fr memory) {
+ require(fr.value != 0);
+ return pow(fr, r_mod - 2);
+ }
+
+ function add_assign(Fr memory self, Fr memory other) internal pure {
+ self.value = addmod(self.value, other.value, r_mod);
+ }
+
+ function sub_assign(Fr memory self, Fr memory other) internal pure {
+ self.value = addmod(self.value, r_mod - other.value, r_mod);
+ }
+
+ function mul_assign(Fr memory self, Fr memory other) internal pure {
+ self.value = mulmod(self.value, other.value, r_mod);
+ }
+
+ function pow(Fr memory self, uint256 power) internal view returns (Fr memory) {
+ uint256[6] memory input = [32, 32, 32, self.value, power, r_mod];
+ uint256[1] memory result;
+ bool success;
+ assembly {
+ success := staticcall(gas(), 0x05, input, 0xc0, result, 0x20)
+ }
+ require(success);
+ return Fr({value: result[0]});
+ }
+
+ // Encoding of field elements is: X[0] * z + X[1]
+ struct G2Point {
+ uint256[2] X;
+ uint256[2] Y;
+ }
+
+ function P1() internal pure returns (G1Point memory) {
+ return G1Point(1, 2);
+ }
+
+ function new_g1(uint256 x, uint256 y) internal pure returns (G1Point memory) {
+ return G1Point(x, y);
+ }
+
+ // function new_g1_checked(uint256 x, uint256 y) internal pure returns (G1Point memory) {
+ function new_g1_checked(uint256 x, uint256 y) internal pure returns (G1Point memory) {
+ if (x == 0 && y == 0) {
+ // point of infinity is (0,0)
+ return G1Point(x, y);
+ }
+
+ // check encoding
+ require(x < q_mod, "x axis isn't valid");
+ require(y < q_mod, "y axis isn't valid");
+ // check on curve
+ uint256 lhs = mulmod(y, y, q_mod); // y^2
+
+ uint256 rhs = mulmod(x, x, q_mod); // x^2
+ rhs = mulmod(rhs, x, q_mod); // x^3
+ rhs = addmod(rhs, bn254_b_coeff, q_mod); // x^3 + b
+ require(lhs == rhs, "is not on curve");
+
+ return G1Point(x, y);
+ }
+
+ function new_g2(uint256[2] memory x, uint256[2] memory y) internal pure returns (G2Point memory) {
+ return G2Point(x, y);
+ }
+
+ function copy_g1(G1Point memory self) internal pure returns (G1Point memory result) {
+ result.X = self.X;
+ result.Y = self.Y;
+ }
+
+ function P2() internal pure returns (G2Point memory) {
+ // for some reason ethereum expects to have c1*v + c0 form
+
+ return
+ G2Point(
+ [
+ 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2,
+ 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed
+ ],
+ [
+ 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b,
+ 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa
+ ]
+ );
+ }
+
+ function negate(G1Point memory self) internal pure {
+ // The prime q in the base field F_q for G1
+ if (self.Y == 0) {
+ require(self.X == 0);
+ return;
+ }
+
+ self.Y = q_mod - self.Y;
+ }
+
+ function point_add(G1Point memory p1, G1Point memory p2) internal view returns (G1Point memory r) {
+ point_add_into_dest(p1, p2, r);
+ return r;
+ }
+
+ function point_add_assign(G1Point memory p1, G1Point memory p2) internal view {
+ point_add_into_dest(p1, p2, p1);
+ }
+
+ function point_add_into_dest(
+ G1Point memory p1,
+ G1Point memory p2,
+ G1Point memory dest
+ ) internal view {
+ if (p2.X == 0 && p2.Y == 0) {
+ // we add zero, nothing happens
+ dest.X = p1.X;
+ dest.Y = p1.Y;
+ return;
+ } else if (p1.X == 0 && p1.Y == 0) {
+ // we add into zero, and we add non-zero point
+ dest.X = p2.X;
+ dest.Y = p2.Y;
+ return;
+ } else {
+ uint256[4] memory input;
+
+ input[0] = p1.X;
+ input[1] = p1.Y;
+ input[2] = p2.X;
+ input[3] = p2.Y;
+
+ bool success;
+ assembly {
+ success := staticcall(gas(), 6, input, 0x80, dest, 0x40)
+ }
+ require(success);
+ }
+ }
+
+ function point_sub_assign(G1Point memory p1, G1Point memory p2) internal view {
+ point_sub_into_dest(p1, p2, p1);
+ }
+
+ function point_sub_into_dest(
+ G1Point memory p1,
+ G1Point memory p2,
+ G1Point memory dest
+ ) internal view {
+ if (p2.X == 0 && p2.Y == 0) {
+ // we subtracted zero, nothing happens
+ dest.X = p1.X;
+ dest.Y = p1.Y;
+ return;
+ } else if (p1.X == 0 && p1.Y == 0) {
+ // we subtract from zero, and we subtract non-zero point
+ dest.X = p2.X;
+ dest.Y = q_mod - p2.Y;
+ return;
+ } else {
+ uint256[4] memory input;
+
+ input[0] = p1.X;
+ input[1] = p1.Y;
+ input[2] = p2.X;
+ input[3] = q_mod - p2.Y;
+
+ bool success = false;
+ assembly {
+ success := staticcall(gas(), 6, input, 0x80, dest, 0x40)
+ }
+ require(success);
+ }
+ }
+
+ function point_mul(G1Point memory p, Fr memory s) internal view returns (G1Point memory r) {
+ // https://eips.ethereum.org/EIPS/eip-197
+ // Elliptic curve points are encoded as a Jacobian pair (X, Y) where the point at infinity is encoded as (0, 0)
+ // TODO
+ if (p.X == 0 && p.Y == 1) {
+ p.Y = 0;
+ }
+ point_mul_into_dest(p, s, r);
+ return r;
+ }
+
+ function point_mul_assign(G1Point memory p, Fr memory s) internal view {
+ point_mul_into_dest(p, s, p);
+ }
+
+ function point_mul_into_dest(
+ G1Point memory p,
+ Fr memory s,
+ G1Point memory dest
+ ) internal view {
+ uint256[3] memory input;
+ input[0] = p.X;
+ input[1] = p.Y;
+ input[2] = s.value;
+ bool success;
+ assembly {
+ success := staticcall(gas(), 7, input, 0x60, dest, 0x40)
+ }
+ require(success);
+ }
+
+ function pairing(G1Point[] memory p1, G2Point[] memory p2) internal view returns (bool) {
+ require(p1.length == p2.length);
+ uint256 elements = p1.length;
+ uint256 inputSize = elements * 6;
+ uint256[] memory input = new uint256[](inputSize);
+ for (uint256 i = 0; i < elements; ) {
+ input[i * 6 + 0] = p1[i].X;
+ input[i * 6 + 1] = p1[i].Y;
+ input[i * 6 + 2] = p2[i].X[0];
+ input[i * 6 + 3] = p2[i].X[1];
+ input[i * 6 + 4] = p2[i].Y[0];
+ input[i * 6 + 5] = p2[i].Y[1];
+ unchecked {
+ ++i;
+ }
+ }
+ uint256[1] memory out;
+ bool success;
+ assembly {
+ success := staticcall(gas(), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20)
+ }
+ require(success);
+ return out[0] != 0;
+ }
+
+ /// Convenience method for a pairing check for two pairs.
+ function pairingProd2(
+ G1Point memory a1,
+ G2Point memory a2,
+ G1Point memory b1,
+ G2Point memory b2
+ ) internal view returns (bool) {
+ G1Point[] memory p1 = new G1Point[](2);
+ G2Point[] memory p2 = new G2Point[](2);
+ p1[0] = a1;
+ p1[1] = b1;
+ p2[0] = a2;
+ p2[1] = b2;
+ return pairing(p1, p2);
+ }
+}
diff --git a/ethereum/contracts/zksync/libraries/PriorityQueue.sol b/ethereum/contracts/zksync/libraries/PriorityQueue.sol
new file mode 100644
index 000000000..cf635aae6
--- /dev/null
+++ b/ethereum/contracts/zksync/libraries/PriorityQueue.sol
@@ -0,0 +1,81 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+/// @notice The structure that contains meta information of the L2 transaction that was requested from L1
+/// @dev The weird size of fields was selected specifically to minimize the structure storage size
+/// @param canonicalTxHash Hashed L2 transaction data that is needed to process it
+/// @param expirationTimestamp Expiration timestamp for this request (must be satisfied before)
+/// @param layer2Tip Additional payment to the validator as an incentive to perform the operation
+struct PriorityOperation {
+ bytes32 canonicalTxHash;
+ uint64 expirationTimestamp;
+ uint192 layer2Tip;
+}
+
+/// @author Matter Labs
+/// @dev The library provides the API to interact with the priority queue container
+/// @dev Order of processing operations from queue - FIFO (Fist in - first out)
+library PriorityQueue {
+ using PriorityQueue for Queue;
+
+ /// @notice Container that stores priority operations
+ /// @param data The inner mapping that saves priority operation by its index
+ /// @param head The pointer to the first unprocessed priority operation, equal to the tail if the queue is empty
+ /// @param tail The pointer to the free slot
+ struct Queue {
+ mapping(uint256 => PriorityOperation) data;
+ uint256 tail;
+ uint256 head;
+ }
+
+ /// @notice Returns zero if and only if no operations were processed from the queue
+ /// @return Index of the oldest priority operation that wasn't processed yet
+ function getFirstUnprocessedPriorityTx(Queue storage _queue) internal view returns (uint256) {
+ return _queue.head;
+ }
+
+ /// @return The total number of priority operations that were added to the priority queue, including all processed ones
+ function getTotalPriorityTxs(Queue storage _queue) internal view returns (uint256) {
+ return _queue.tail;
+ }
+
+ /// @return The total number of unprocessed priority operations in a priority queue
+ function getSize(Queue storage _queue) internal view returns (uint256) {
+ return uint256(_queue.tail - _queue.head);
+ }
+
+ /// @return Whether the priority queue contains no operations
+ function isEmpty(Queue storage _queue) internal view returns (bool) {
+ return _queue.tail == _queue.head;
+ }
+
+ /// @notice Add the priority operation to the end of the priority queue
+ function pushBack(Queue storage _queue, PriorityOperation memory _operation) internal {
+ // Save value into the stack to avoid double reading from the storage
+ uint256 tail = _queue.tail;
+
+ _queue.data[tail] = _operation;
+ _queue.tail = tail + 1;
+ }
+
+ /// @return The first unprocessed priority operation from the queue
+ function front(Queue storage _queue) internal view returns (PriorityOperation memory) {
+ require(!_queue.isEmpty(), "D"); // priority queue is empty
+
+ return _queue.data[_queue.head];
+ }
+
+ /// @notice Remove the first unprocessed priority operation from the queue
+ /// @return priorityOperation that was popped from the priority queue
+ function popFront(Queue storage _queue) internal returns (PriorityOperation memory priorityOperation) {
+ require(!_queue.isEmpty(), "s"); // priority queue is empty
+
+ // Save value into the stack to avoid double reading from the storage
+ uint256 head = _queue.head;
+
+ priorityOperation = _queue.data[head];
+ delete _queue.data[head];
+ _queue.head = head + 1;
+ }
+}
diff --git a/ethereum/contracts/zksync/libraries/TranscriptLib.sol b/ethereum/contracts/zksync/libraries/TranscriptLib.sol
new file mode 100644
index 000000000..4ba7b839a
--- /dev/null
+++ b/ethereum/contracts/zksync/libraries/TranscriptLib.sol
@@ -0,0 +1,47 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "./PairingsBn254.sol";
+
+library TranscriptLib {
+ // flip 0xe000000000000000000000000000000000000000000000000000000000000000;
+ uint256 constant FR_MASK = 0x1fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff;
+
+ uint32 constant DST_0 = 0;
+ uint32 constant DST_1 = 1;
+ uint32 constant DST_CHALLENGE = 2;
+
+ struct Transcript {
+ bytes32 state_0;
+ bytes32 state_1;
+ uint32 challenge_counter;
+ }
+
+ function new_transcript() internal pure returns (Transcript memory t) {
+ t.state_0 = bytes32(0);
+ t.state_1 = bytes32(0);
+ t.challenge_counter = 0;
+ }
+
+ function update_with_u256(Transcript memory self, uint256 value) internal pure {
+ bytes32 old_state_0 = self.state_0;
+ self.state_0 = keccak256(abi.encodePacked(DST_0, old_state_0, self.state_1, value));
+ self.state_1 = keccak256(abi.encodePacked(DST_1, old_state_0, self.state_1, value));
+ }
+
+ function update_with_fr(Transcript memory self, PairingsBn254.Fr memory value) internal pure {
+ update_with_u256(self, value.value);
+ }
+
+ function update_with_g1(Transcript memory self, PairingsBn254.G1Point memory p) internal pure {
+ update_with_u256(self, p.X);
+ update_with_u256(self, p.Y);
+ }
+
+ function get_challenge(Transcript memory self) internal pure returns (PairingsBn254.Fr memory challenge) {
+ bytes32 query = keccak256(abi.encodePacked(DST_CHALLENGE, self.state_0, self.state_1, self.challenge_counter));
+ self.challenge_counter += 1;
+ challenge = PairingsBn254.Fr({value: uint256(query) & FR_MASK});
+ }
+}
diff --git a/ethereum/contracts/zksync/upgrade-initializers/DIamondUpgradeInit2.sol b/ethereum/contracts/zksync/upgrade-initializers/DIamondUpgradeInit2.sol
new file mode 100644
index 000000000..1e732e9e3
--- /dev/null
+++ b/ethereum/contracts/zksync/upgrade-initializers/DIamondUpgradeInit2.sol
@@ -0,0 +1,53 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../Config.sol";
+import "../facets/Mailbox.sol";
+import "../libraries/Diamond.sol";
+import "../../common/L2ContractHelper.sol";
+
+interface IOldContractDeployer {
+ function forceDeployOnAddress(
+ bytes32 _bytecodeHash,
+ address _newAddress,
+ bytes calldata _input
+ ) external payable returns (address);
+}
+
+/// @author Matter Labs
+contract DiamondUpgradeInit2 is MailboxFacet {
+ function forceDeploy2(
+ bytes calldata _upgradeDeployerCalldata,
+ bytes calldata _upgradeSystemContractsCalldata,
+ bytes[] calldata _factoryDeps
+ ) external payable returns (bytes32) {
+ // 1. Update bytecode for the deployer smart contract
+ _requestL2Transaction(
+ FORCE_DEPLOYER,
+ DEPLOYER_SYSTEM_CONTRACT_ADDRESS,
+ 0,
+ _upgradeDeployerCalldata,
+ $(PRIORITY_TX_MAX_GAS_LIMIT),
+ DEFAULT_L2_GAS_PRICE_PER_PUBDATA,
+ _factoryDeps,
+ true,
+ address(0)
+ );
+
+ // 2. Redeploy other contracts by one transaction
+ _requestL2Transaction(
+ FORCE_DEPLOYER,
+ DEPLOYER_SYSTEM_CONTRACT_ADDRESS,
+ 0,
+ _upgradeSystemContractsCalldata,
+ $(PRIORITY_TX_MAX_GAS_LIMIT),
+ DEFAULT_L2_GAS_PRICE_PER_PUBDATA,
+ _factoryDeps,
+ true,
+ address(0)
+ );
+
+ return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE;
+ }
+}
diff --git a/ethereum/contracts/zksync/upgrade-initializers/DiamondUpgradeInit1.sol b/ethereum/contracts/zksync/upgrade-initializers/DiamondUpgradeInit1.sol
new file mode 100644
index 000000000..37e308dcb
--- /dev/null
+++ b/ethereum/contracts/zksync/upgrade-initializers/DiamondUpgradeInit1.sol
@@ -0,0 +1,33 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../facets/Mailbox.sol";
+import "../libraries/Diamond.sol";
+import "../../common/L2ContractHelper.sol";
+import "../Config.sol";
+
+/// @author Matter Labs
+contract DiamondUpgradeInit1 is MailboxFacet {
+ /// @dev Request priority operation on behalf of force deployer address to the deployer system contract
+ /// @return The message indicating the successful force deployment of contract on L2
+ function forceDeployL2Contract(
+ bytes calldata _forceDeployCalldata,
+ bytes[] calldata _factoryDeps,
+ uint256 _l2GasLimit
+ ) external payable returns (bytes32) {
+ _requestL2Transaction(
+ FORCE_DEPLOYER,
+ DEPLOYER_SYSTEM_CONTRACT_ADDRESS,
+ 0,
+ _forceDeployCalldata,
+ _l2GasLimit,
+ DEFAULT_L2_GAS_PRICE_PER_PUBDATA,
+ _factoryDeps,
+ true,
+ address(0)
+ );
+
+ return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE;
+ }
+}
diff --git a/ethereum/contracts/zksync/upgrade-initializers/DiamondUpgradeInit3.sol b/ethereum/contracts/zksync/upgrade-initializers/DiamondUpgradeInit3.sol
new file mode 100644
index 000000000..57e1cbc13
--- /dev/null
+++ b/ethereum/contracts/zksync/upgrade-initializers/DiamondUpgradeInit3.sol
@@ -0,0 +1,57 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../libraries/Diamond.sol";
+import "../facets/Base.sol";
+
+interface IOldDiamondCut {
+ function proposeDiamondCut(Diamond.FacetCut[] calldata _facetCuts, address _initAddress) external;
+
+ function cancelDiamondCutProposal() external;
+
+ function executeDiamondCutProposal(Diamond.DiamondCutData calldata _diamondCut) external;
+
+ function emergencyFreezeDiamond() external;
+
+ function unfreezeDiamond() external;
+
+ function approveEmergencyDiamondCutAsSecurityCouncilMember(bytes32 _diamondCutHash) external;
+
+ // FIXME: token holders should have the ability to cancel the upgrade
+
+ event DiamondCutProposal(Diamond.FacetCut[] _facetCuts, address _initAddress);
+
+ event DiamondCutProposalCancelation(uint256 currentProposalId, bytes32 indexed proposedDiamondCutHash);
+
+ event DiamondCutProposalExecution(Diamond.DiamondCutData _diamondCut);
+
+ event EmergencyFreeze();
+
+ event Unfreeze(uint256 lastDiamondFreezeTimestamp);
+
+ event EmergencyDiamondCutApproved(
+ address indexed _address,
+ uint256 currentProposalId,
+ uint256 securityCouncilEmergencyApprovals,
+ bytes32 indexed proposedDiamondCutHash
+ );
+}
+
+/// @author Matter Labs
+contract DiamondUpgradeInit3 is Base {
+ function upgrade(
+ uint256 _priorityTxMaxGasLimit,
+ IAllowList _allowList,
+ Verifier _verifier
+ ) external payable returns (bytes32) {
+ // Zero out the deprecated storage slots
+ delete s.__DEPRECATED_diamondCutStorage;
+
+ s.priorityTxMaxGasLimit = _priorityTxMaxGasLimit;
+ s.allowList = _allowList;
+ s.verifier = _verifier;
+
+ return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE;
+ }
+}
diff --git a/ethereum/contracts/zksync/upgrade-initializers/DiamondUpgradeInit4.sol b/ethereum/contracts/zksync/upgrade-initializers/DiamondUpgradeInit4.sol
new file mode 100644
index 000000000..76575d3bd
--- /dev/null
+++ b/ethereum/contracts/zksync/upgrade-initializers/DiamondUpgradeInit4.sol
@@ -0,0 +1,56 @@
+// SPDX-License-Identifier: MIT
+
+pragma solidity ^0.8.0;
+
+import "../Config.sol";
+import "../facets/Mailbox.sol";
+import "../libraries/Diamond.sol";
+import "../../common/L2ContractHelper.sol";
+
+interface IOldContractDeployer {
+ struct ForceDeployment {
+ bytes32 bytecodeHash;
+ address newAddress;
+ uint256 value;
+ bytes input;
+ }
+
+ function forceDeployOnAddresses(ForceDeployment[] calldata _deployParams) external;
+}
+
+/// @author Matter Labs
+contract DiamondUpgradeInit4 is MailboxFacet {
+ function forceDeploy2(
+ bytes calldata _upgradeDeployerCalldata,
+ bytes calldata _upgradeSystemContractsCalldata,
+ bytes[] calldata _factoryDeps
+ ) external payable returns (bytes32) {
+ // 1. Update bytecode for the deployer smart contract
+ _requestL2Transaction(
+ FORCE_DEPLOYER,
+ DEPLOYER_SYSTEM_CONTRACT_ADDRESS,
+ 0,
+ _upgradeDeployerCalldata,
+ $(PRIORITY_TX_MAX_GAS_LIMIT),
+ DEFAULT_L2_GAS_PRICE_PER_PUBDATA,
+ _factoryDeps,
+ true,
+ address(0)
+ );
+
+ // 2. Redeploy other contracts by one transaction
+ _requestL2Transaction(
+ FORCE_DEPLOYER,
+ DEPLOYER_SYSTEM_CONTRACT_ADDRESS,
+ 0,
+ _upgradeSystemContractsCalldata,
+ $(PRIORITY_TX_MAX_GAS_LIMIT),
+ DEFAULT_L2_GAS_PRICE_PER_PUBDATA,
+ _factoryDeps,
+ true,
+ address(0)
+ );
+
+ return Diamond.DIAMOND_INIT_SUCCESS_RETURN_VALUE;
+ }
+}
diff --git a/ethereum/hardhat.config.ts b/ethereum/hardhat.config.ts
new file mode 100644
index 000000000..f3903f5d5
--- /dev/null
+++ b/ethereum/hardhat.config.ts
@@ -0,0 +1,102 @@
+import '@nomiclabs/hardhat-waffle';
+import '@nomiclabs/hardhat-solpp';
+import '@nomiclabs/hardhat-ethers';
+import '@nomiclabs/hardhat-etherscan';
+import 'hardhat-typechain';
+import 'hardhat-contract-sizer';
+import { getNumberFromEnv } from './scripts/utils';
+
+// If no network is specified, use the default config
+if (!process.env.CHAIN_ETH_NETWORK) {
+ require('dotenv').config();
+}
+
+const systemParams = require('../SystemConfig.json');
+
+const PRIORITY_TX_MAX_GAS_LIMIT = getNumberFromEnv('CONTRACTS_PRIORITY_TX_MAX_GAS_LIMIT');
+
+const prodConfig = {
+ UPGRADE_NOTICE_PERIOD: 0,
+ // PRIORITY_EXPIRATION: 101,
+ // NOTE: Should be greater than 0, otherwise zero approvals will be enough to make an instant upgrade!
+ SECURITY_COUNCIL_APPROVALS_FOR_EMERGENCY_UPGRADE: 1,
+ PRIORITY_TX_MAX_GAS_LIMIT,
+ DUMMY_VERIFIER: false
+};
+const testnetConfig = {
+ UPGRADE_NOTICE_PERIOD: 0,
+ // PRIORITY_EXPIRATION: 101,
+ // NOTE: Should be greater than 0, otherwise zero approvals will be enough to make an instant upgrade!
+ SECURITY_COUNCIL_APPROVALS_FOR_EMERGENCY_UPGRADE: 1,
+ PRIORITY_TX_MAX_GAS_LIMIT,
+ DUMMY_VERIFIER: true
+};
+const testConfig = {
+ UPGRADE_NOTICE_PERIOD: 0,
+ PRIORITY_EXPIRATION: 101,
+ SECURITY_COUNCIL_APPROVALS_FOR_EMERGENCY_UPGRADE: 2,
+ PRIORITY_TX_MAX_GAS_LIMIT,
+ DUMMY_VERIFIER: true
+};
+const localConfig = {
+ ...prodConfig,
+ DUMMY_VERIFIER: true
+};
+
+const contractDefs = {
+ rinkeby: testnetConfig,
+ ropsten: testnetConfig,
+ goerli: testnetConfig,
+ mainnet: prodConfig,
+ test: testConfig,
+ localhost: localConfig
+};
+
+export default {
+ defaultNetwork: 'env',
+ solidity: {
+ version: '0.8.17',
+ settings: {
+ optimizer: {
+ enabled: true,
+ runs: 200
+ },
+ outputSelection: {
+ '*': {
+ '*': ['storageLayout']
+ }
+ }
+ }
+ },
+ contractSizer: {
+ runOnCompile: false
+ },
+ paths: {
+ sources: './contracts'
+ },
+ solpp: {
+ defs: (() => {
+ const defs = process.env.CONTRACT_TESTS ? contractDefs.test : contractDefs[process.env.CHAIN_ETH_NETWORK];
+
+ return {
+ ...systemParams,
+ ...defs
+ };
+ })()
+ },
+ networks: {
+ env: {
+ url: process.env.ETH_CLIENT_WEB3_URL?.split(',')[0]
+ },
+ hardhat: {
+ allowUnlimitedContractSize: false,
+ forking: {
+ url: 'https://eth-goerli.g.alchemy.com/v2/' + process.env.ALCHEMY_KEY,
+ enabled: process.env.TEST_CONTRACTS_FORK === '1'
+ }
+ }
+ },
+ etherscan: {
+ apiKey: process.env.MISC_ETHERSCAN_API_KEY
+ }
+};
diff --git a/ethereum/package.json b/ethereum/package.json
new file mode 100644
index 000000000..90b514e3a
--- /dev/null
+++ b/ethereum/package.json
@@ -0,0 +1,70 @@
+{
+ "name": "l1-zksync-contracts",
+ "version": "0.1.0",
+ "license": "MIT",
+ "devDependencies": {
+ "@nomiclabs/hardhat-ethers": "^2.0.0",
+ "@nomiclabs/hardhat-etherscan": "^2.1.0",
+ "@nomiclabs/hardhat-solpp": "^2.0.0",
+ "@nomiclabs/hardhat-waffle": "^2.0.0",
+ "@openzeppelin/contracts": "4.8.0",
+ "@openzeppelin/contracts-upgradeable": "4.8.0",
+ "@typechain/ethers-v5": "^2.0.0",
+ "@types/argparse": "^1.0.36",
+ "@types/chai": "^4.2.21",
+ "@types/chai-as-promised": "^7.1.4",
+ "@types/mocha": "^8.2.3",
+ "argparse": "^1.0.10",
+ "axios": "^0.21.1",
+ "chai": "^4.3.4",
+ "chai-as-promised": "^7.1.1",
+ "chalk": "^4.1.0",
+ "collections": "^5.1.12",
+ "commander": "^8.3.0",
+ "ethereum-waffle": "^3.0.0",
+ "ethereumjs-abi": "^0.6.8",
+ "ethers": "^5.7.0",
+ "ethjs": "^0.4.0",
+ "fs": "^0.0.1-security",
+ "handlebars": "^4.7.6",
+ "hardhat": "=2.12.4",
+ "hardhat-contract-sizer": "^2.0.2",
+ "hardhat-typechain": "^0.3.3",
+ "jsonwebtoken": "^8.5.1",
+ "merkletreejs": "^0.2.32",
+ "mocha": "^9.0.2",
+ "path": "^0.12.7",
+ "prettier": "^1.18.2",
+ "prettier-plugin-solidity": "=1.0.0-dev.22",
+ "querystring": "^0.2.0",
+ "solc": "0.8.17",
+ "ts-generator": "^0.1.1",
+ "ts-node": "^10.1.0",
+ "typechain": "^4.0.0",
+ "typescript": "^4.3.5"
+ },
+ "scripts": {
+ "build": "hardhat compile",
+ "clean": "hardhat clean",
+ "test": "CONTRACT_TESTS=1 yarn run hardhat test test/unit_tests/*.spec.ts --network hardhat",
+ "test:fork": "CONTRACT_TESTS=1 TEST_CONTRACTS_FORK=1 yarn run hardhat test test/unit_tests/*.fork.ts --network hardhat",
+ "deploy-no-build": "ts-node scripts/deploy.ts",
+ "allow-list-manager": "ts-node scripts/allow-list-manager.ts",
+ "deploy-erc20": "ts-node scripts/deploy-erc20.ts",
+ "token-info": "ts-node scripts/token-info.ts",
+ "deploy-testkit": "ts-node scripts/deploy-testkit.ts",
+ "diamond-upgrade": "ts-node scripts/diamond-upgrade.ts",
+ "verify": "hardhat run --network env scripts/verify.ts",
+ "deploy-testnet-erc20": "ts-node scripts/deploy-testnet-token.ts",
+ "read-variable": "ts-node scripts/read-variable.ts",
+ "initialize-bridges": "ts-node scripts/initialize-bridges.ts",
+ "initialize-allow-list": "ts-node scripts/initialize-l1-allow-list.ts",
+ "upgrade-1": "ts-node scripts/upgrades/upgrade-1.ts",
+ "upgrade-2": "ts-node scripts/upgrades/upgrade-2.ts",
+ "upgrade-3": "ts-node scripts/upgrades/upgrade-3.ts",
+ "upgrade-4": "ts-node scripts/upgrades/upgrade-4.ts"
+ },
+ "dependencies": {
+ "dotenv": "^16.0.3"
+ }
+}
diff --git a/ethereum/scripts/allow-list-manager.ts b/ethereum/scripts/allow-list-manager.ts
new file mode 100644
index 000000000..3b1e12325
--- /dev/null
+++ b/ethereum/scripts/allow-list-manager.ts
@@ -0,0 +1,125 @@
+import { Command } from 'commander';
+import { ethers } from 'hardhat';
+import { Deployer } from '../src.ts/deploy';
+import { web3Provider, print } from './utils';
+import { hexlify } from 'ethers/lib/utils';
+
+const provider = web3Provider();
+
+interface RermissionToCall {
+ caller: string;
+ target: string;
+ functionSig: string;
+ enable: boolean;
+}
+
+interface PublicAccess {
+ target: string;
+ enable: boolean;
+}
+
+// Get interface for the L1 allow list smart contract
+function getAllowListInterface() {
+ // Create the dummy wallet with provider to get contracts from `Deployer`
+ const dummyWallet = ethers.Wallet.createRandom().connect(provider);
+ const deployer = new Deployer({ deployWallet: dummyWallet });
+
+ return deployer.l1AllowList(dummyWallet).interface;
+}
+
+// Get the solidity 4 bytes function selector from the function signature
+// https://solidity-by-example.org/function-selector/
+function functionSelector(functionSignature: string) {
+ return hexlify(ethers.utils.solidityKeccak256(['string'], [functionSignature])).slice(0, 10);
+}
+
+async function main() {
+ const program = new Command();
+
+ program.version('0.1.0').name('allow-list-manager');
+
+ const prepareCalldataProgram = program.command('prepare-calldata');
+
+ prepareCalldataProgram
+ .command('set-batch-permission-to-call ')
+ .action(async (permissionToCall: string) => {
+ const allowList = getAllowListInterface();
+
+ const parameters: Array = JSON.parse(permissionToCall);
+ // Extend parameters with the function selector, to check it manually
+ const extendedParameters = parameters.map((param) =>
+ Object.assign(param, { functionSel: functionSelector(param.functionSig) })
+ );
+ print('parameters', extendedParameters);
+
+ const callers = extendedParameters.map((permissionToCall) => permissionToCall.caller);
+ const targets = extendedParameters.map((permissionToCall) => permissionToCall.target);
+ const functionSelectors = extendedParameters.map((permissionToCall) => permissionToCall.functionSel);
+ const enables = extendedParameters.map((permissionToCall) => permissionToCall.enable);
+
+ const calldata = allowList.encodeFunctionData('setBatchPermissionToCall', [
+ callers,
+ targets,
+ functionSelectors,
+ enables
+ ]);
+ print('setBatchPermissionToCall', calldata);
+ });
+
+ prepareCalldataProgram
+ .command('set-permission-to-call')
+ .requiredOption('--caller ')
+ .requiredOption('--target ')
+ .requiredOption('--function-sig ')
+ .requiredOption('--enable ')
+ .action(async (cmd) => {
+ const allowList = getAllowListInterface();
+ const caller = cmd.caller;
+ const target = cmd.target;
+ const functionSig = cmd.functionSig;
+ const functionSel = functionSelector(functionSig);
+ const enable = cmd.enable;
+
+ print('parameters', { caller, target, functionSig, functionSel, enable });
+
+ const calldata = allowList.encodeFunctionData('setPermissionToCall', [caller, target, functionSel, enable]);
+ print('setPermissionToCall', calldata);
+ });
+
+ prepareCalldataProgram
+ .command('set-public-access')
+ .requiredOption('--target ')
+ .requiredOption('--enable ')
+ .action(async (cmd) => {
+ const allowList = getAllowListInterface();
+ const target = cmd.target;
+ const enable = cmd.enable;
+
+ print('parameters', { target, enable });
+
+ const calldata = allowList.encodeFunctionData('setPublicAccess', [target, enable]);
+ print('setPublicAccess', calldata);
+ });
+
+ prepareCalldataProgram.command('set-batch-public-access ').action(async (publicAccess: string) => {
+ const allowList = getAllowListInterface();
+
+ const parameters: Array = JSON.parse(publicAccess);
+ print('parameters', parameters);
+
+ const targets = parameters.map((publicAccess) => publicAccess.target);
+ const enables = parameters.map((publicAccess) => publicAccess.enable);
+
+ const calldata = allowList.encodeFunctionData('setBatchPublicAccess', [targets, enables]);
+ print('setBatchPublicAccess', calldata);
+ });
+
+ await program.parseAsync(process.argv);
+}
+
+main()
+ .then(() => process.exit(0))
+ .catch((err) => {
+ console.error('Error:', err);
+ process.exit(1);
+ });
diff --git a/ethereum/scripts/deploy-erc20.ts b/ethereum/scripts/deploy-erc20.ts
new file mode 100644
index 000000000..5023d7afd
--- /dev/null
+++ b/ethereum/scripts/deploy-erc20.ts
@@ -0,0 +1,97 @@
+import * as hardhat from 'hardhat';
+import '@nomiclabs/hardhat-ethers';
+import { Command } from 'commander';
+import { Wallet } from 'ethers';
+import { parseEther } from 'ethers/lib/utils';
+import { web3Provider } from './utils';
+import * as fs from 'fs';
+import * as path from 'path';
+
+const DEFAULT_ERC20 = 'TestnetERC20Token';
+
+const testConfigPath = path.join(process.env.ZKSYNC_HOME as string, `etc/test_config/constant`);
+const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' }));
+
+const provider = web3Provider();
+const wallet = Wallet.fromMnemonic(ethTestConfig.mnemonic, "m/44'/60'/0'/0/1").connect(provider);
+
+type Token = {
+ address: string | null;
+ name: string;
+ symbol: string;
+ decimals: number;
+};
+
+type TokenDescription = Token & {
+ implementation?: string;
+};
+
+async function deployToken(token: TokenDescription): Promise {
+ token.implementation = token.implementation || DEFAULT_ERC20;
+ const tokenFactory = await hardhat.ethers.getContractFactory(token.implementation, wallet);
+ const erc20 = await tokenFactory.deploy(token.name, token.symbol, token.decimals, { gasLimit: 5000000 });
+ await erc20.deployTransaction.wait();
+
+ await erc20.mint(wallet.address, parseEther('3000000000'));
+ for (let i = 0; i < 10; ++i) {
+ const testWallet = Wallet.fromMnemonic(ethTestConfig.test_mnemonic as string, "m/44'/60'/0'/0/" + i).connect(
+ provider
+ );
+ await erc20.mint(testWallet.address, parseEther('3000000000'));
+ }
+ token.address = erc20.address;
+
+ // Remove the unneeded field
+ if (token.implementation) {
+ delete token.implementation;
+ }
+
+ return token;
+}
+
+async function main() {
+ const program = new Command();
+
+ program.version('0.1.0').name('deploy-erc20').description('deploy testnet erc20 token');
+
+ program
+ .command('add')
+ .option('-n, --token-name ')
+ .option('-s, --symbol ')
+ .option('-d, --decimals ')
+ .option('-i --implementation ')
+ .description('Adds a new token with a given fields')
+ .action(async (cmd) => {
+ const token: TokenDescription = {
+ address: null,
+ name: cmd.tokenName,
+ symbol: cmd.symbol,
+ decimals: cmd.decimals,
+ implementation: cmd.implementation
+ };
+ console.log(JSON.stringify(await deployToken(token), null, 2));
+ });
+
+ program
+ .command('add-multi ')
+ .description('Adds a multiple tokens given in JSON format')
+ .action(async (tokens_json: string) => {
+ const tokens: Array = JSON.parse(tokens_json);
+ const result = [];
+
+ for (const token of tokens) {
+ result.push(await deployToken(token));
+ }
+
+ console.log(JSON.stringify(result, null, 2));
+ });
+
+ await program.parseAsync(process.argv);
+}
+
+main()
+ .then(() => process.exit(0))
+ .catch((err) => {
+ console.error('Error:', err.message || err);
+ process.exit(1);
+ });
diff --git a/ethereum/scripts/deploy-testkit.ts b/ethereum/scripts/deploy-testkit.ts
new file mode 100644
index 000000000..0e02a1c00
--- /dev/null
+++ b/ethereum/scripts/deploy-testkit.ts
@@ -0,0 +1,71 @@
+import * as hardhat from 'hardhat';
+import '@nomiclabs/hardhat-ethers';
+import { Command } from 'commander';
+import { ethers, Wallet } from 'ethers';
+import { Deployer } from '../src.ts/deploy';
+
+import * as fs from 'fs';
+import * as path from 'path';
+import { web3Provider } from './utils';
+
+const testConfigPath = path.join(process.env.ZKSYNC_HOME as string, `etc/test_config/constant`);
+const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' }));
+
+async function main() {
+ const program = new Command();
+
+ program.version('0.1.0').name('deploy').description('deploy testkit contracts');
+
+ program
+ .requiredOption('--genesis-root ')
+ .requiredOption('--genesis-rollup-leaf-index ')
+ .action(async (cmd) => {
+ process.env.CONTRACTS_GENESIS_ROOT = cmd.genesisRoot;
+ process.env.CONTRACTS_GENESIS_ROLLUP_LEAF_INDEX = cmd.genesisRollupLeafIndex;
+
+ if (process.env.CHAIN_ETH_NETWORK !== 'test') {
+ console.error('This deploy script is only for localhost-test network');
+ process.exit(1);
+ }
+
+ const provider = web3Provider();
+ provider.pollingInterval = 10;
+
+ const deployWallet = ethers.Wallet.fromMnemonic(ethTestConfig.test_mnemonic, "m/44'/60'/0'/0/0").connect(
+ provider
+ );
+
+ const deployer = new Deployer({ deployWallet, verbose: true });
+ await deployer.deployAll();
+
+ const zkSyncContract = deployer.zkSyncContract(deployWallet);
+ await (await zkSyncContract.setValidator(deployWallet.address, true)).wait();
+
+ const tokenFactory = await hardhat.ethers.getContractFactory('TestnetERC20Token', deployWallet);
+ const erc20 = await tokenFactory.deploy('Matter Labs Trial Token', 'MLTT', 18, { gasLimit: 5000000 });
+
+ console.log(`CONTRACTS_TEST_ERC20=${erc20.address}`);
+
+ const failOnReceiveFactory = await hardhat.ethers.getContractFactory('FailOnReceive', deployWallet);
+ const failOnReceive = await failOnReceiveFactory.deploy({
+ gasLimit: 5000000
+ });
+ console.log(`CONTRACTS_FAIL_ON_RECEIVE=${failOnReceive.address}`);
+
+ for (let i = 0; i < 10; ++i) {
+ const testWallet = Wallet.fromMnemonic(ethTestConfig.test_mnemonic, "m/44'/60'/0'/0/" + i).connect(
+ provider
+ );
+ await (await erc20.mint(testWallet.address, '0x4B3B4CA85A86C47A098A224000000000')).wait();
+ }
+ });
+
+ await program.parseAsync(process.argv);
+}
+
+main()
+ .then(() => process.exit(0))
+ .catch((err) => {
+ console.error('Error:', err.message || err);
+ process.exit(1);
+ });
diff --git a/ethereum/scripts/deploy-testnet-token.ts b/ethereum/scripts/deploy-testnet-token.ts
new file mode 100644
index 000000000..ee847741f
--- /dev/null
+++ b/ethereum/scripts/deploy-testnet-token.ts
@@ -0,0 +1,92 @@
+import * as hardhat from 'hardhat';
+import '@nomiclabs/hardhat-ethers';
+import { Wallet } from 'ethers';
+import * as fs from 'fs';
+import * as path from 'path';
+import { ArgumentParser } from 'argparse';
+import { web3Provider } from './utils';
+
+const mainnetTokens = require(`${process.env.ZKSYNC_HOME}/etc/tokens/mainnet`);
+
+const testConfigPath = path.join(process.env.ZKSYNC_HOME as string, `etc/test_config/constant`);
+const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' }));
+
+async function main() {
+ const parser = new ArgumentParser({
+ version: '0.1.0',
+ addHelp: true,
+ description: 'Deploy contracts and publish them on Etherscan'
+ });
+ parser.addArgument('--publish', {
+ required: false,
+ action: 'storeTrue',
+ help: 'Only publish code for deployed tokens'
+ });
+ parser.addArgument('--deployerPrivateKey', { required: false, help: 'Wallet used to deploy contracts' });
+ const args = parser.parseArgs(process.argv.slice(2));
+
+ const provider = web3Provider();
+ const wallet = args.deployerPrivateKey
+ ? new Wallet(args.deployerPrivateKey, provider)
+ : Wallet.fromMnemonic(ethTestConfig.mnemonic, "m/44'/60'/0'/0/1").connect(provider);
+
+ if (process.env.CHAIN_ETH_NETWORK === 'mainnet') {
+ throw new Error('Test ERC20 tokens should not be deployed to mainnet');
+ }
+
+ if (args.publish) {
+ // console.log('Publishing source code');
+ // let verifiedOnce = false;
+ // const networkTokens = require(`${process.env.ZKSYNC_HOME}/etc/tokens/${process.env.ETH_NETWORK}`);
+ // for (const token of networkTokens) {
+ // if (verifiedOnce) {
+ // break;
+ // }
+ // try {
+ // console.log(`Publishing code for : ${token.symbol}, ${token.address}`);
+ // const constructorArgs = [
+ // `${token.name} (${process.env.CHAIN_ETH_NETWORK})`,
+ // token.symbol,
+ // token.decimals
+ // ];
+ // const rawArgs = encodeConstructorArgs(contractCode, constructorArgs);
+ // await publishSourceCodeToEtherscan(token.address, 'TestnetERC20Token', rawArgs, 'contracts/test');
+ // verifiedOnce = true;
+ // } catch (e) {
+ // console.log('Error failed to verified code:', e);
+ // }
+ // }
+ // return;
+ }
+
+ const result = [];
+
+ for (const token of mainnetTokens) {
+ const constructorArgs = [
+ `${token.name} (${process.env.CHAIN_ETH_NETWORK})`,
+ token.symbol,
+ token.decimals,
+ { gasLimit: 800000 }
+ ];
+
+ console.log(`Deploying testnet ERC20: ${constructorArgs.toString()}`);
+ const tokenFactory = await hardhat.ethers.getContractFactory('TestnetERC20Token', wallet);
+ const erc20 = await tokenFactory.deploy(...constructorArgs);
+
+ const testnetToken = token;
+ testnetToken.address = erc20.address;
+ result.push(testnetToken);
+ }
+
+ fs.writeFileSync(
+ `${process.env.ZKSYNC_HOME}/etc/tokens/${process.env.CHAIN_ETH_NETWORK}.json`,
+ JSON.stringify(result, null, 2)
+ );
+}
+
+main()
+ .then(() => process.exit(0))
+ .catch((err) => {
+ console.error('Error:', err.message || err);
+ process.exit(1);
+ });
diff --git a/ethereum/scripts/deploy-withdrawal-helpers.ts b/ethereum/scripts/deploy-withdrawal-helpers.ts
new file mode 100644
index 000000000..e3336dc7e
--- /dev/null
+++ b/ethereum/scripts/deploy-withdrawal-helpers.ts
@@ -0,0 +1,55 @@
+// This script deploys the contracts required both for production and
+// for testing of the contracts required for the `withdrawal-helpers` library
+
+import * as hardhat from 'hardhat';
+import '@nomiclabs/hardhat-ethers';
+import { ethers } from 'ethers';
+import * as fs from 'fs';
+import * as path from 'path';
+import { web3Provider } from './utils';
+
+const testConfigPath = path.join(process.env.ZKSYNC_HOME as string, `etc/test_config/constant`);
+const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' }));
+
+async function main() {
+ try {
+ if (!['test', 'localhost'].includes(process.env.CHAIN_ETH_NETWORK)) {
+ console.error('This deploy script is only for localhost-test network');
+ process.exit(1);
+ }
+
+ const provider = web3Provider();
+ provider.pollingInterval = 10;
+
+ const deployWallet = ethers.Wallet.fromMnemonic(ethTestConfig.test_mnemonic, "m/44'/60'/0'/0/0").connect(
+ provider
+ );
+ const multicallFactory = await hardhat.ethers.getContractFactory('Multicall', deployWallet);
+ const multicallContract = await multicallFactory.deploy({
+ gasLimit: 5000000
+ });
+
+ const revertReceiveFactory = await hardhat.ethers.getContractFactory('RevertReceiveAccount', deployWallet);
+ const revertReceiveAccount = await revertReceiveFactory.deploy({
+ gasLimit: 5000000
+ });
+
+ const outConfig = {
+ multicall_address: multicallContract.address,
+ revert_receive_address: revertReceiveAccount.address
+ };
+ const outConfigPath = path.join(process.env.ZKSYNC_HOME, 'etc/test_config/volatile/withdrawal-helpers.json');
+ fs.writeFileSync(outConfigPath, JSON.stringify(outConfig), { encoding: 'utf-8' });
+ process.exit(0);
+ } catch (err) {
+ console.log(`Error: ${err}`);
+ process.exit(1);
+ }
+}
+
+main()
+ .then(() => process.exit(0))
+ .catch((err) => {
+ console.error('Error:', err.message || err);
+ process.exit(1);
+ });
diff --git a/ethereum/scripts/deploy.ts b/ethereum/scripts/deploy.ts
new file mode 100644
index 000000000..fe5ea3386
--- /dev/null
+++ b/ethereum/scripts/deploy.ts
@@ -0,0 +1,80 @@
+import { Command } from 'commander';
+import { Wallet, ethers } from 'ethers';
+import { Deployer } from '../src.ts/deploy';
+import { formatUnits, parseUnits } from 'ethers/lib/utils';
+import * as fs from 'fs';
+import * as path from 'path';
+import { web3Provider } from './utils';
+
+const provider = web3Provider();
+const testConfigPath = path.join(process.env.ZKSYNC_HOME as string, `etc/test_config/constant`);
+const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' }));
+
+async function main() {
+ const program = new Command();
+
+ program.version('0.1.0').name('deploy').description('deploy L1 contracts');
+
+ program
+ .option('--private-key ')
+ .option('--gas-price ')
+ .option('--nonce ')
+ .option('--governor-address ')
+ .option('--create2-salt ')
+ .option('--diamond-upgrade-init ')
+ .action(async (cmd) => {
+ const deployWallet = cmd.privateKey
+ ? new Wallet(cmd.privateKey, provider)
+ : Wallet.fromMnemonic(
+ process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic,
+ "m/44'/60'/0'/0/1"
+ ).connect(provider);
+ console.log(`Using deployer wallet: ${deployWallet.address}`);
+
+ const governorAddress = cmd.governorAddress ? cmd.governorAddress : deployWallet.address;
+ console.log(`Using governor address: ${governorAddress}`);
+
+ const gasPrice = cmd.gasPrice ? parseUnits(cmd.gasPrice, 'gwei') : await provider.getGasPrice();
+ console.log(`Using gas price: ${formatUnits(gasPrice, 'gwei')} gwei`);
+
+ let nonce = cmd.nonce ? parseInt(cmd.nonce) : await deployWallet.getTransactionCount();
+ console.log(`Using nonce: ${nonce}`);
+
+ const create2Salt = cmd.create2Salt ? cmd.create2Salt : ethers.utils.hexlify(ethers.utils.randomBytes(32));
+
+ const deployer = new Deployer({
+ deployWallet,
+ governorAddress,
+ verbose: true
+ });
+
+ // Create2 factory already deployed on the public networks, only deploy it on local node
+ if (process.env.CHAIN_ETH_NETWORK === 'localhost') {
+ await deployer.deployCreate2Factory({ gasPrice, nonce });
+ nonce++;
+ }
+
+ // Deploy diamond upgrade init contract if needed
+ const diamondUpgradeContractVersion = cmd.diamondUpgradeInit || 1;
+ if (diamondUpgradeContractVersion) {
+ await deployer.deployDiamondUpgradeInit(create2Salt, diamondUpgradeContractVersion, {
+ gasPrice,
+ nonce
+ });
+ nonce++;
+ }
+
+ await deployer.deployAllowList(create2Salt, { gasPrice, nonce });
+ await deployer.deployZkSyncContract(create2Salt, gasPrice, nonce + 1);
+ await deployer.deployBridgeContracts(create2Salt, gasPrice); // Do not pass nonce, since it was increment after deploying zkSync contracts
+ });
+
+ await program.parseAsync(process.argv);
+}
+
+main()
+ .then(() => process.exit(0))
+ .catch((err) => {
+ console.error('Error:', err);
+ process.exit(1);
+ });
diff --git a/ethereum/scripts/diamond-upgrade.ts b/ethereum/scripts/diamond-upgrade.ts
new file mode 100644
index 000000000..2f7b5301b
--- /dev/null
+++ b/ethereum/scripts/diamond-upgrade.ts
@@ -0,0 +1,113 @@
+import * as hardhat from 'hardhat';
+import { Command } from 'commander';
+import { diamondCut } from '../src.ts/diamondCut';
+import { ethers } from 'hardhat';
+import { Deployer } from '../src.ts/deploy';
+import { print, web3Provider } from './utils';
+import { FacetCut, getAllSelectors } from '../src.ts/diamondCut';
+
+const provider = web3Provider();
+const ZERO_ADDRESS = ethers.constants.AddressZero;
+
+function getZkSyncContract() {
+ // Create the dummy wallet with provider to get contracts from `Deployer`
+ const dummyWallet = ethers.Wallet.createRandom().connect(provider);
+ const deployer = new Deployer({ deployWallet: dummyWallet });
+
+ return deployer.zkSyncContract(dummyWallet);
+}
+
+async function main() {
+ const program = new Command();
+
+ program.version('0.1.0').name('upgrade-diamond');
+
+ program.command('get-contract-selectors ').action(async (contractName: string) => {
+ const contract = await hardhat.ethers.getContractAt(contractName, ZERO_ADDRESS);
+ const selectors = getAllSelectors(contract.interface);
+
+ print('Contract selectors', selectors);
+ });
+
+ program.command('diamond-loupe-view').action(async () => {
+ const facets = await getZkSyncContract().facets();
+
+ print('Facets', facets);
+ });
+
+ program
+ .command('legacy-prepare-upgrade-calldata ')
+ .option('--init-address ')
+ .option('--init-data ')
+ .action(async (facetCutsData: string, cmd) => {
+ const diamondCutFacet = await hardhat.ethers.getContractAt('IOldDiamondCut', ZERO_ADDRESS);
+
+ // Encode data for the upgrade call
+ const facetCuts: Array = JSON.parse(facetCutsData);
+
+ const initAddress = cmd.initAddress ? cmd.initAddress : ZERO_ADDRESS;
+ const initData = cmd.initData ? cmd.initData : '0x';
+
+ const upgradeParam = diamondCut(facetCuts, initAddress, initData);
+ print('DiamondCutData', upgradeParam);
+
+ // Get transaction data of the `proposeDiamondCut`
+ const proposeUpgrade = await diamondCutFacet.interface.encodeFunctionData('proposeDiamondCut', [
+ upgradeParam.facetCuts,
+ upgradeParam.initAddress
+ ]);
+
+ // Get transaction data of the `executeDiamondCutProposal`
+ const executeUpgrade = await diamondCutFacet.interface.encodeFunctionData('executeDiamondCutProposal', [
+ upgradeParam
+ ]);
+
+ print('proposeUpgrade', proposeUpgrade);
+ print('executeUpgrade', executeUpgrade);
+ });
+
+ program
+ .command('prepare-upgrade-calldata ')
+ .option('--init-address ')
+ .option('--init-data ')
+ .option('--proposal-id ')
+ .action(async (facetCutsData: string, cmd) => {
+ const diamondCutFacet = await hardhat.ethers.getContractAt('DiamondCutFacet', ZERO_ADDRESS);
+
+ // Encode data for the upgrade call
+ const facetCuts: Array = JSON.parse(facetCutsData);
+
+ const initAddress = cmd.initAddress ? cmd.initAddress : ZERO_ADDRESS;
+ const initData = cmd.initData ? cmd.initData : '0x';
+ const proposalId = cmd.proposalId
+ ? cmd.proposalId
+ : (await getZkSyncContract().getCurrentProposalId()).add(1);
+
+ const upgradeParam = diamondCut(facetCuts, initAddress, initData);
+ print('DiamondCut', upgradeParam);
+
+ // Get transaction data of the `proposeTransparentUpgrade`
+ const proposeUpgrade = await diamondCutFacet.interface.encodeFunctionData('proposeTransparentUpgrade', [
+ upgradeParam,
+ proposalId
+ ]);
+
+ // Get transaction data of the `executeDiamondCutProposal`
+ const executeUpgrade = await diamondCutFacet.interface.encodeFunctionData('executeUpgrade', [
+ upgradeParam,
+ ethers.constants.HashZero
+ ]);
+
+ print('proposeUpgrade', proposeUpgrade);
+ print('executeUpgrade', executeUpgrade);
+ });
+
+ await program.parseAsync(process.argv);
+}
+
+main()
+ .then(() => process.exit(0))
+ .catch((err) => {
+ console.error('Error:', err);
+ process.exit(1);
+ });
diff --git a/ethereum/scripts/initialize-bridges.ts b/ethereum/scripts/initialize-bridges.ts
new file mode 100644
index 000000000..132901a94
--- /dev/null
+++ b/ethereum/scripts/initialize-bridges.ts
@@ -0,0 +1,168 @@
+import { Command } from 'commander';
+import { ethers, Wallet } from 'ethers';
+import { Deployer } from '../src.ts/deploy';
+import { formatUnits, parseUnits } from 'ethers/lib/utils';
+import {
+ computeL2Create2Address,
+ web3Provider,
+ hashL2Bytecode,
+ applyL1ToL2Alias,
+ getNumberFromEnv,
+ DEFAULT_L2_GAS_PRICE_PER_PUBDATA
+} from './utils';
+
+import * as fs from 'fs';
+import * as path from 'path';
+
+const provider = web3Provider();
+const testConfigPath = path.join(process.env.ZKSYNC_HOME as string, `etc/test_config/constant`);
+const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' }));
+
+const contractArtifactsPath = path.join(process.env.ZKSYNC_HOME as string, 'contracts/zksync/artifacts-zk/');
+
+const l2BridgeArtifactsPath = path.join(contractArtifactsPath, 'cache-zk/solpp-generated-contracts/bridge/');
+
+const openzeppelinTransparentProxyArtifactsPath = path.join(
+ contractArtifactsPath,
+ '@openzeppelin/contracts/proxy/transparent/'
+);
+const openzeppelinBeaconProxyArtifactsPath = path.join(contractArtifactsPath, '@openzeppelin/contracts/proxy/beacon');
+
+function readBytecode(path: string, fileName: string) {
+ return JSON.parse(fs.readFileSync(`${path}/${fileName}.sol/${fileName}.json`, { encoding: 'utf-8' })).bytecode;
+}
+
+function readInterface(path: string, fileName: string) {
+ const abi = JSON.parse(fs.readFileSync(`${path}/${fileName}.sol/${fileName}.json`, { encoding: 'utf-8' })).abi;
+ return new ethers.utils.Interface(abi);
+}
+
+const L2_ERC20_BRIDGE_PROXY_BYTECODE = readBytecode(
+ openzeppelinTransparentProxyArtifactsPath,
+ 'TransparentUpgradeableProxy'
+);
+const L2_ERC20_BRIDGE_IMPLEMENTATION_BYTECODE = readBytecode(l2BridgeArtifactsPath, 'L2ERC20Bridge');
+const L2_STANDARD_ERC20_IMPLEMENTATION_BYTECODE = readBytecode(l2BridgeArtifactsPath, 'L2StandardERC20');
+const L2_STANDARD_ERC20_PROXY_BYTECODE = readBytecode(openzeppelinBeaconProxyArtifactsPath, 'BeaconProxy');
+const L2_STANDARD_ERC20_PROXY_FACTORY_BYTECODE = readBytecode(
+ openzeppelinBeaconProxyArtifactsPath,
+ 'UpgradeableBeacon'
+);
+const L2_ERC20_BRIDGE_INTERFACE = readInterface(l2BridgeArtifactsPath, 'L2ERC20Bridge');
+
+async function main() {
+ const program = new Command();
+
+ program.version('0.1.0').name('initialize-bridges');
+
+ program
+ .option('--private-key ')
+ .option('--gas-price ')
+ .option('--nonce ')
+ .action(async (cmd) => {
+ const deployWallet = cmd.privateKey
+ ? new Wallet(cmd.privateKey, provider)
+ : Wallet.fromMnemonic(
+ process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic,
+ "m/44'/60'/0'/0/0"
+ ).connect(provider);
+ console.log(`Using deployer wallet: ${deployWallet.address}`);
+
+ const gasPrice = cmd.gasPrice ? parseUnits(cmd.gasPrice, 'gwei') : await provider.getGasPrice();
+ console.log(`Using gas price: ${formatUnits(gasPrice, 'gwei')} gwei`);
+
+ const nonce = cmd.nonce ? parseInt(cmd.nonce) : await deployWallet.getTransactionCount();
+ console.log(`Using nonce: ${nonce}`);
+
+ const deployer = new Deployer({
+ deployWallet,
+ governorAddress: deployWallet.address,
+ verbose: true
+ });
+
+ const zkSync = deployer.zkSyncContract(deployWallet);
+ const erc20Bridge = deployer.defaultERC20Bridge(deployWallet);
+
+ const priorityTxMaxGasLimit = getNumberFromEnv('CONTRACTS_PRIORITY_TX_MAX_GAS_LIMIT');
+ const governorAddress = await zkSync.getGovernor();
+ const abiCoder = new ethers.utils.AbiCoder();
+
+ const l2ERC20BridgeImplAddr = computeL2Create2Address(
+ applyL1ToL2Alias(erc20Bridge.address),
+ L2_ERC20_BRIDGE_IMPLEMENTATION_BYTECODE,
+ '0x',
+ ethers.constants.HashZero
+ );
+
+ const proxyInitializationParams = L2_ERC20_BRIDGE_INTERFACE.encodeFunctionData('initialize', [
+ erc20Bridge.address,
+ hashL2Bytecode(L2_STANDARD_ERC20_PROXY_BYTECODE),
+ governorAddress
+ ]);
+ const l2ERC20BridgeProxyAddr = computeL2Create2Address(
+ applyL1ToL2Alias(erc20Bridge.address),
+ L2_ERC20_BRIDGE_PROXY_BYTECODE,
+ ethers.utils.arrayify(
+ abiCoder.encode(
+ ['address', 'address', 'bytes'],
+ [l2ERC20BridgeImplAddr, governorAddress, proxyInitializationParams]
+ )
+ ),
+ ethers.constants.HashZero
+ );
+
+ const l2StandardToken = computeL2Create2Address(
+ l2ERC20BridgeProxyAddr,
+ L2_STANDARD_ERC20_IMPLEMENTATION_BYTECODE,
+ '0x',
+ ethers.constants.HashZero
+ );
+ const l2TokenFactoryAddr = computeL2Create2Address(
+ l2ERC20BridgeProxyAddr,
+ L2_STANDARD_ERC20_PROXY_FACTORY_BYTECODE,
+ ethers.utils.arrayify(abiCoder.encode(['address'], [l2StandardToken])),
+ ethers.constants.HashZero
+ );
+
+ const independentInitialization = [
+ zkSync.requestL2Transaction(
+ ethers.constants.AddressZero,
+ 0,
+ '0x',
+ priorityTxMaxGasLimit,
+ DEFAULT_L2_GAS_PRICE_PER_PUBDATA,
+ [L2_STANDARD_ERC20_PROXY_FACTORY_BYTECODE, L2_STANDARD_ERC20_IMPLEMENTATION_BYTECODE],
+ deployWallet.address,
+ { gasPrice, nonce }
+ ),
+ erc20Bridge.initialize(
+ [
+ L2_ERC20_BRIDGE_IMPLEMENTATION_BYTECODE,
+ L2_ERC20_BRIDGE_PROXY_BYTECODE,
+ L2_STANDARD_ERC20_PROXY_BYTECODE
+ ],
+ l2TokenFactoryAddr,
+ governorAddress,
+ {
+ gasPrice,
+ nonce: nonce + 1
+ }
+ )
+ ];
+
+ const txs = await Promise.all(independentInitialization);
+ const receipts = await Promise.all(txs.map((tx) => tx.wait()));
+
+ console.log(`ERC20 bridge initialized, gasUsed: ${receipts[1].gasUsed.toString()}`);
+ console.log(`CONTRACTS_L2_ERC20_BRIDGE_ADDR=${await erc20Bridge.l2Bridge()}`);
+ });
+
+ await program.parseAsync(process.argv);
+}
+
+main()
+ .then(() => process.exit(0))
+ .catch((err) => {
+ console.error('Error:', err);
+ process.exit(1);
+ });
diff --git a/ethereum/scripts/initialize-l1-allow-list.ts b/ethereum/scripts/initialize-l1-allow-list.ts
new file mode 100644
index 000000000..bdad8ef03
--- /dev/null
+++ b/ethereum/scripts/initialize-l1-allow-list.ts
@@ -0,0 +1,56 @@
+import { Command } from 'commander';
+import { Wallet } from 'ethers';
+import { Deployer } from '../src.ts/deploy';
+import * as fs from 'fs';
+import * as path from 'path';
+import { web3Provider } from './utils';
+
+const provider = web3Provider();
+const testConfigPath = path.join(process.env.ZKSYNC_HOME as string, `etc/test_config/constant`);
+const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' }));
+
+export enum AccessMode {
+ Closed = 0,
+ SpecialAccessOnly = 1,
+ Public = 2
+}
+
+async function main() {
+ const program = new Command();
+
+ program.version('0.1.0').name('initialize-l1-allow-list');
+
+ program
+ .option('--private-key ')
+ .option('--nonce ')
+ .action(async (cmd) => {
+ const wallet = cmd.privateKey
+ ? new Wallet(cmd.privateKey, provider)
+ : Wallet.fromMnemonic(
+ process.env.MNEMONIC ? process.env.MNEMONIC : ethTestConfig.mnemonic,
+ "m/44'/60'/0'/0/1"
+ ).connect(provider);
+ console.log(`Using wallet: ${wallet.address}`);
+
+ const nonce = cmd.nonce ? parseInt(cmd.nonce) : await wallet.getTransactionCount();
+ console.log(`Using nonce: ${nonce}`);
+
+ const deployer = new Deployer({ deployWallet: wallet });
+
+ const allowListContract = deployer.l1AllowList(wallet);
+ await allowListContract.setBatchAccessMode(
+ [deployer.addresses.ZkSync.DiamondProxy, deployer.addresses.Bridges.ERC20BridgeProxy],
+ [AccessMode.Public, AccessMode.Public],
+ { nonce }
+ );
+ });
+
+ await program.parseAsync(process.argv);
+}
+
+main()
+ .then(() => process.exit(0))
+ .catch((err) => {
+ console.error('Error:', err);
+ process.exit(1);
+ });
diff --git a/ethereum/scripts/read-variable.ts b/ethereum/scripts/read-variable.ts
new file mode 100644
index 000000000..0f30cddff
--- /dev/null
+++ b/ethereum/scripts/read-variable.ts
@@ -0,0 +1,369 @@
+import { Command } from 'commander';
+import { web3Provider } from './utils';
+import { BigNumber } from 'ethers';
+import { ethers } from 'ethers';
+import * as hre from 'hardhat';
+
+const provider = web3Provider();
+
+const cache: Map = new Map();
+
+async function getStorageAt(address: string, slot: BigNumber): Promise {
+ if (!cache.has(slot)) {
+ cache.set(slot, await provider.getStorageAt(address, slot));
+ }
+ return cache.get(slot);
+}
+
+// Read bytes from storage like hex string
+async function readBytes(slot: BigNumber, shift: number, bytes: number, address: string): Promise {
+ const data = await getStorageAt(address, slot);
+ return '0x' + data.substr(66 - bytes * 2 - shift * 2, bytes * 2);
+}
+
+// Read dynamic sized bytes (encoding: bytes)
+async function readDynamicBytes(slot: BigNumber, address: string): Promise {
+ const data = await getStorageAt(address, slot);
+ if (Number.parseInt(data.substr(64, 2), 16) % 2 === 0) {
+ const length = Number.parseInt(data.substr(64, 2), 16) / 2;
+ return '0x' + data.substr(2, 2 * length);
+ } else {
+ const length = (Number.parseInt(data, 16) - 1) / 2;
+ const firstSlot = BigNumber.from(ethers.utils.solidityKeccak256(['uint'], [slot]));
+ const slots = [];
+ for (let slotShift = 0; slotShift * 32 < length; slotShift++) {
+ slots.push(getStorageAt(address, firstSlot.add(slotShift)));
+ }
+
+ const lastLength = length % 32;
+ let hex: string = '0x';
+ for (let i = 0; i < slots.length; i++) {
+ if (i === slots.length - 1) {
+ hex += (await slots[i]).substr(2, lastLength * 2);
+ } else {
+ hex += (await slots[i]).substr(2, 64);
+ }
+ }
+ return hex;
+ }
+}
+
+// Functions for read all types, except user defined structs and arrays
+async function readString(slot: BigNumber, address: string): Promise {
+ return ethers.utils.toUtf8String(await readDynamicBytes(slot, address));
+}
+
+async function readNumber(slot: BigNumber, shift: number, label: string, address: string): Promise {
+ let bytes: number;
+ if (label.substr(0, 3) === 'int') {
+ bytes = +label.substring(3, label.length) / 8;
+ } else {
+ bytes = +label.substring(4, label.length) / 8;
+ }
+ let data: string = await readBytes(slot, shift, bytes, address);
+ data = ethers.utils.hexZeroPad(data, 32);
+ return ethers.utils.defaultAbiCoder.decode([label], data).toString();
+}
+
+async function readBoolean(slot: BigNumber, shift: number, address: string): Promise {
+ return (await readNumber(slot, shift, 'uint8', address)) !== '0';
+}
+
+async function readAddress(slot: BigNumber, shift: number, address: string): Promise {
+ return readBytes(slot, shift, 20, address);
+}
+
+async function readEnum(slot: BigNumber, shift: number, bytes: number, address: string): Promise {
+ return await readNumber(slot, shift, 'uint' + bytes * 8, address);
+}
+
+let types: any;
+
+async function readPrimitive(slot: BigNumber, shift: number, address: string, type: string): Promise {
+ if (type.substr(0, 5) === 't_int' || type.substr(0, 6) === 't_uint') {
+ return readNumber(slot, shift, types[type].label, address);
+ }
+ if (type === 't_bool') {
+ return readBoolean(slot, shift, address);
+ }
+ if (type === 't_address' || type === 't_address_payable') {
+ return readAddress(slot, shift, address);
+ }
+ if (type === 't_bytes_storage') {
+ return readDynamicBytes(slot, address);
+ }
+ if (type.substr(0, 7) === 't_bytes') {
+ return readBytes(slot, shift, types[type].numberOfBytes, address);
+ }
+ if (type === 't_string_storage') {
+ return readString(slot, address);
+ }
+ if (type.substr(0, 6) === 't_enum') {
+ return readEnum(slot, shift, types[type].numberOfBytes, address);
+ }
+}
+
+// Read user defined struct
+async function readStruct(slot: BigNumber, address: string, type: string): Promise