diff --git a/Cargo.lock b/Cargo.lock index 83673d239c0..1c32fed0874 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -179,6 +179,95 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +[[package]] +name = "alloy-consensus" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy.git?rev=974d488bab5e21e9f17452a39a4bfa56677367b2#974d488bab5e21e9f17452a39a4bfa56677367b2" +dependencies = [ + "alloy-eips", + "alloy-network", + "alloy-primitives", + "alloy-rlp", +] + +[[package]] +name = "alloy-eips" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy.git?rev=974d488bab5e21e9f17452a39a4bfa56677367b2#974d488bab5e21e9f17452a39a4bfa56677367b2" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "serde", + "thiserror", +] + +[[package]] +name = "alloy-json-rpc" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy.git?rev=974d488bab5e21e9f17452a39a4bfa56677367b2#974d488bab5e21e9f17452a39a4bfa56677367b2" +dependencies = [ + "alloy-primitives", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "alloy-network" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy.git?rev=974d488bab5e21e9f17452a39a4bfa56677367b2#974d488bab5e21e9f17452a39a4bfa56677367b2" +dependencies = [ + "alloy-eips", + "alloy-json-rpc", + "alloy-primitives", + "alloy-rlp", + "serde", +] + +[[package]] +name = "alloy-primitives" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4b6fb2b432ff223d513db7f908937f63c252bee0af9b82bfd25b0a5dd1eb0d8" +dependencies = [ + "alloy-rlp", + "bytes", + "cfg-if", + "const-hex", + "derive_more", + "hex-literal", + "itoa", + "k256 0.13.3", + "keccak-asm", + "proptest", + "rand", + "ruint", + "serde", + "tiny-keccak", +] + +[[package]] +name = "alloy-rlp" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d58d9f5da7b40e9bfff0b7e7816700be4019db97d4b6359fe7f94a9e22e42ac" +dependencies = [ + "alloy-rlp-derive", + "arrayvec", + "bytes", +] + +[[package]] +name = "alloy-rlp-derive" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a047897373be4bbb0224c1afdabca92648dc57a9c9ef6e7b0be3aff7a859c83" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "amcl" version = "0.3.0" @@ -229,6 +318,130 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" +[[package]] +name = "ark-ff" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" +dependencies = [ + "ark-ff-asm 0.3.0", + "ark-ff-macros 0.3.0", + "ark-serialize 0.3.0", + "ark-std 0.3.0", + "derivative", + "num-bigint", + "num-traits", + "paste", + "rustc_version 0.3.3", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" +dependencies = [ + "ark-ff-asm 0.4.2", + "ark-ff-macros 0.4.2", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "derivative", + "digest 0.10.7", + "itertools", + "num-bigint", + "num-traits", + "paste", + "rustc_version 0.4.0", + "zeroize", +] + +[[package]] +name = "ark-ff-asm" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-asm" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" +dependencies = [ + "num-bigint", + "num-traits", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" +dependencies = [ + "num-bigint", + "num-traits", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-serialize" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" +dependencies = [ + "ark-std 0.3.0", + "digest 0.9.0", +] + +[[package]] +name = "ark-serialize" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" +dependencies = [ + "ark-std 0.4.0", + "digest 0.10.7", + "num-bigint", +] + +[[package]] +name = "ark-std" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" +dependencies = [ + "num-traits", + "rand", +] + +[[package]] +name = "ark-std" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" +dependencies = [ + "num-traits", + "rand", +] + [[package]] name = "arrayref" version = "0.3.7" @@ -485,7 +698,7 @@ checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" dependencies = [ "futures", "pharos", - "rustc_version", + "rustc_version 0.4.0", ] [[package]] @@ -838,6 +1051,21 @@ dependencies = [ "which", ] +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + [[package]] name = "bitflags" version = "1.3.2" @@ -1119,7 +1347,7 @@ checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a" dependencies = [ "camino", "cargo-platform", - "semver", + "semver 1.0.21", "serde", "serde_json", "thiserror", @@ -1331,6 +1559,19 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "const-hex" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5104de16b218eddf8e34ffe2f86f74bfa4e61e95a1b89732fccf6325efd0557" +dependencies = [ + "cfg-if", + "cpufeatures", + "hex", + "proptest", + "serde", +] + [[package]] name = "const-oid" version = "0.9.6" @@ -1593,7 +1834,7 @@ dependencies = [ "digest 0.10.7", "fiat-crypto", "platforms 3.3.0", - "rustc_version", + "rustc_version 0.4.0", "subtle", "zeroize", ] @@ -1820,7 +2061,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "rustc_version", + "rustc_version 0.4.0", "syn 1.0.109", ] @@ -2755,6 +2996,8 @@ dependencies = [ name = "execution_layer" version = "0.1.0" dependencies = [ + "alloy-consensus", + "alloy-rlp", "arc-swap", "async-trait", "builder_client", @@ -2848,6 +3091,17 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +[[package]] +name = "fastrlp" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" +dependencies = [ + "arrayvec", + "auto_impl", + "bytes", +] + [[package]] name = "ff" version = "0.12.1" @@ -2887,7 +3141,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38e2275cc4e4fc009b0669731a1e5ab7ebf11f469eaede2bab9309a5b4d6057f" dependencies = [ "memoffset", - "rustc_version", + "rustc_version 0.4.0", ] [[package]] @@ -3454,6 +3708,15 @@ name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] + +[[package]] +name = "hex-literal" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" [[package]] name = "hex_fmt" @@ -4188,6 +4451,16 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "keccak-asm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb8515fff80ed850aea4a1595f2e519c003e2a00a82fe168ebf5269196caf444" +dependencies = [ + "digest 0.10.7", + "sha3-asm", +] + [[package]] name = "keccak-hash" version = "0.10.0" @@ -5618,6 +5891,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -5986,6 +6260,17 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +[[package]] +name = "pest" +version = "2.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "219c0dcc30b6a27553f9cc242972b67f75b60eb0db71f0b5462f38b058c41546" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + [[package]] name = "pharos" version = "0.5.3" @@ -5993,7 +6278,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" dependencies = [ "futures", - "rustc_version", + "rustc_version 0.4.0", ] [[package]] @@ -6373,6 +6658,26 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "proptest" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +dependencies = [ + "bit-set", + "bit-vec", + "bitflags 2.4.2", + "lazy_static", + "num-traits", + "rand", + "rand_chacha", + "rand_xorshift", + "regex-syntax 0.8.2", + "rusty-fork", + "tempfile", + "unarray", +] + [[package]] name = "proto_array" version = "0.2.0" @@ -6865,6 +7170,36 @@ dependencies = [ "tokio", ] +[[package]] +name = "ruint" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608a5726529f2f0ef81b8fde9873c4bb829d6b5b5ca6be4d97345ddf0749c825" +dependencies = [ + "alloy-rlp", + "ark-ff 0.3.0", + "ark-ff 0.4.2", + "bytes", + "fastrlp", + "num-bigint", + "num-traits", + "parity-scale-codec 3.6.9", + "primitive-types 0.12.2", + "proptest", + "rand", + "rlp", + "ruint-macro", + "serde", + "valuable", + "zeroize", +] + +[[package]] +name = "ruint-macro" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e666a5496a0b2186dbcd0ff6106e29e093c15591bde62c20d3842007c6978a09" + [[package]] name = "rusqlite" version = "0.28.0" @@ -6897,13 +7232,22 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" +[[package]] +name = "rustc_version" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +dependencies = [ + "semver 0.11.0", +] + [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver", + "semver 1.0.21", ] [[package]] @@ -6993,6 +7337,18 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + [[package]] name = "rw-stream-sink" version = "0.4.0" @@ -7159,6 +7515,15 @@ dependencies = [ "libc", ] +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser", +] + [[package]] name = "semver" version = "1.0.21" @@ -7168,6 +7533,15 @@ dependencies = [ "serde", ] +[[package]] +name = "semver-parser" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +dependencies = [ + "pest", +] + [[package]] name = "send_wrapper" version = "0.6.0" @@ -7367,6 +7741,16 @@ dependencies = [ "keccak", ] +[[package]] +name = "sha3-asm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bac61da6b35ad76b195eb4771210f947734321a8d81d7738e1580d953bc7a15e" +dependencies = [ + "cc", + "cfg-if", +] + [[package]] name = "sharded-slab" version = "0.1.7" @@ -7659,7 +8043,7 @@ dependencies = [ "curve25519-dalek", "rand_core", "ring 0.17.7", - "rustc_version", + "rustc_version 0.4.0", "sha2 0.10.8", "subtle", ] @@ -8621,6 +9005,12 @@ dependencies = [ "tree_hash_derive", ] +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + [[package]] name = "uint" version = "0.9.5" @@ -8634,6 +9024,12 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + [[package]] name = "unescape" version = "0.1.0" @@ -8911,6 +9307,15 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + [[package]] name = "waker-fn" version = "1.1.1" @@ -9496,7 +9901,7 @@ dependencies = [ "js-sys", "log", "pharos", - "rustc_version", + "rustc_version 0.4.0", "send_wrapper", "thiserror", "wasm-bindgen", diff --git a/Dockerfile b/Dockerfile index a8dadf2ad57..901c1b83d63 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.73.0-bullseye AS builder +FROM rust:1.75.0-bullseye AS builder RUN apt-get update && apt-get -y upgrade && apt-get install -y cmake libclang-dev COPY . lighthouse ARG FEATURES diff --git a/beacon_node/beacon_chain/src/execution_payload.rs b/beacon_node/beacon_chain/src/execution_payload.rs index e25976c2a57..fd790c88429 100644 --- a/beacon_node/beacon_chain/src/execution_payload.rs +++ b/beacon_node/beacon_chain/src/execution_payload.rs @@ -81,14 +81,10 @@ impl PayloadNotifier { match notify_execution_layer { NotifyExecutionLayer::No if chain.config.optimistic_finalized_sync => { - // Verify the block hash here in Lighthouse and immediately mark the block as - // optimistically imported. This saves a lot of roundtrips to the EL. - let execution_layer = chain - .execution_layer - .as_ref() - .ok_or(ExecutionPayloadError::NoExecutionConnection)?; - - if let Err(e) = execution_layer.verify_payload_block_hash(block_message) { + // Create a NewPayloadRequest (no clones required) and check optimistic sync verifications + let new_payload_request: NewPayloadRequest = + block_message.try_into()?; + if let Err(e) = new_payload_request.perform_optimistic_sync_verifications() { warn!( chain.log, "Falling back to slow block hash verification"; @@ -143,11 +139,8 @@ async fn notify_new_payload<'a, T: BeaconChainTypes>( .as_ref() .ok_or(ExecutionPayloadError::NoExecutionConnection)?; - let new_payload_request: NewPayloadRequest = block.try_into()?; - let execution_block_hash = new_payload_request.block_hash(); - let new_payload_response = execution_layer - .notify_new_payload(new_payload_request) - .await; + let execution_block_hash = block.execution_payload()?.block_hash(); + let new_payload_response = execution_layer.notify_new_payload(block.try_into()?).await; match new_payload_response { Ok(status) => match status { diff --git a/beacon_node/execution_layer/Cargo.toml b/beacon_node/execution_layer/Cargo.toml index 7f652689806..7fee3721d8f 100644 --- a/beacon_node/execution_layer/Cargo.toml +++ b/beacon_node/execution_layer/Cargo.toml @@ -51,3 +51,5 @@ hash-db = "0.15.2" pretty_reqwest_error = { workspace = true } arc-swap = "1.6.0" eth2_network_config = { workspace = true } +alloy-rlp = "0.3" +alloy-consensus = { git = "https://github.com/alloy-rs/alloy.git", rev = "974d488bab5e21e9f17452a39a4bfa56677367b2" } diff --git a/beacon_node/execution_layer/src/block_hash.rs b/beacon_node/execution_layer/src/block_hash.rs index 5ba61beafca..074ef8b0c14 100644 --- a/beacon_node/execution_layer/src/block_hash.rs +++ b/beacon_node/execution_layer/src/block_hash.rs @@ -1,92 +1,61 @@ use crate::{ json_structures::JsonWithdrawal, keccak::{keccak256, KeccakHasher}, - metrics, Error, ExecutionLayer, }; use ethers_core::utils::rlp::RlpStream; use keccak_hash::KECCAK_EMPTY_LIST_RLP; use triehash::ordered_trie_root; use types::{ - map_execution_block_header_fields_base, Address, BeaconBlockRef, EthSpec, ExecutionBlockHash, + map_execution_block_header_fields_base, Address, EthSpec, ExecutionBlockHash, ExecutionBlockHeader, ExecutionPayloadRef, Hash256, Hash64, Uint256, }; -impl ExecutionLayer { - /// Calculate the block hash of an execution block. - /// - /// Return `(block_hash, transactions_root)`, where `transactions_root` is the root of the RLP - /// transactions. - pub fn calculate_execution_block_hash( - payload: ExecutionPayloadRef, - parent_beacon_block_root: Hash256, - ) -> (ExecutionBlockHash, Hash256) { - // Calculate the transactions root. - // We're currently using a deprecated Parity library for this. We should move to a - // better alternative when one appears, possibly following Reth. - let rlp_transactions_root = ordered_trie_root::( - payload.transactions().iter().map(|txn_bytes| &**txn_bytes), - ); - - // Calculate withdrawals root (post-Capella). - let rlp_withdrawals_root = if let Ok(withdrawals) = payload.withdrawals() { - Some(ordered_trie_root::( - withdrawals.iter().map(|withdrawal| { - rlp_encode_withdrawal(&JsonWithdrawal::from(withdrawal.clone())) - }), - )) - } else { - None - }; - - let rlp_blob_gas_used = payload.blob_gas_used().ok(); - let rlp_excess_blob_gas = payload.excess_blob_gas().ok(); - - // Calculate parent beacon block root (post-Deneb). - let rlp_parent_beacon_block_root = rlp_excess_blob_gas - .as_ref() - .map(|_| parent_beacon_block_root); - - // Construct the block header. - let exec_block_header = ExecutionBlockHeader::from_payload( - payload, - KECCAK_EMPTY_LIST_RLP.as_fixed_bytes().into(), - rlp_transactions_root, - rlp_withdrawals_root, - rlp_blob_gas_used, - rlp_excess_blob_gas, - rlp_parent_beacon_block_root, - ); - - // Hash the RLP encoding of the block header. - let rlp_block_header = rlp_encode_block_header(&exec_block_header); - ( - ExecutionBlockHash::from_root(keccak256(&rlp_block_header)), - rlp_transactions_root, - ) - } - - /// Verify `payload.block_hash` locally within Lighthouse. - /// - /// No remote calls to the execution client will be made, so this is quite a cheap check. - pub fn verify_payload_block_hash(&self, block: BeaconBlockRef) -> Result<(), Error> { - let payload = block.execution_payload()?.execution_payload_ref(); - let parent_beacon_block_root = block.parent_root(); - - let _timer = metrics::start_timer(&metrics::EXECUTION_LAYER_VERIFY_BLOCK_HASH); - - let (header_hash, rlp_transactions_root) = - Self::calculate_execution_block_hash(payload, parent_beacon_block_root); - - if header_hash != payload.block_hash() { - return Err(Error::BlockHashMismatch { - computed: header_hash, - payload: payload.block_hash(), - transactions_root: rlp_transactions_root, - }); - } - - Ok(()) - } +/// Calculate the block hash of an execution block. +/// +/// Return `(block_hash, transactions_root)`, where `transactions_root` is the root of the RLP +/// transactions. +pub fn calculate_execution_block_hash( + payload: ExecutionPayloadRef, + parent_beacon_block_root: Option, +) -> (ExecutionBlockHash, Hash256) { + // Calculate the transactions root. + // We're currently using a deprecated Parity library for this. We should move to a + // better alternative when one appears, possibly following Reth. + let rlp_transactions_root = ordered_trie_root::( + payload.transactions().iter().map(|txn_bytes| &**txn_bytes), + ); + + // Calculate withdrawals root (post-Capella). + let rlp_withdrawals_root = if let Ok(withdrawals) = payload.withdrawals() { + Some(ordered_trie_root::( + withdrawals + .iter() + .map(|withdrawal| rlp_encode_withdrawal(&JsonWithdrawal::from(withdrawal.clone()))), + )) + } else { + None + }; + + let rlp_blob_gas_used = payload.blob_gas_used().ok(); + let rlp_excess_blob_gas = payload.excess_blob_gas().ok(); + + // Construct the block header. + let exec_block_header = ExecutionBlockHeader::from_payload( + payload, + KECCAK_EMPTY_LIST_RLP.as_fixed_bytes().into(), + rlp_transactions_root, + rlp_withdrawals_root, + rlp_blob_gas_used, + rlp_excess_blob_gas, + parent_beacon_block_root, + ); + + // Hash the RLP encoding of the block header. + let rlp_block_header = rlp_encode_block_header(&exec_block_header); + ( + ExecutionBlockHash::from_root(keccak256(&rlp_block_header)), + rlp_transactions_root, + ) } /// RLP encode a withdrawal. diff --git a/beacon_node/execution_layer/src/engine_api.rs b/beacon_node/execution_layer/src/engine_api.rs index 19b9a58eb66..e20009e2858 100644 --- a/beacon_node/execution_layer/src/engine_api.rs +++ b/beacon_node/execution_layer/src/engine_api.rs @@ -17,7 +17,6 @@ pub use json_structures::{JsonWithdrawal, TransitionConfigurationV1}; use pretty_reqwest_error::PrettyReqwestError; use reqwest::StatusCode; use serde::{Deserialize, Serialize}; -use state_processing::per_block_processing::deneb::kzg_commitment_to_versioned_hash; use std::convert::TryFrom; use strum::IntoStaticStr; use superstruct::superstruct; @@ -26,14 +25,16 @@ pub use types::{ ExecutionPayloadRef, FixedVector, ForkName, Hash256, Transactions, Uint256, VariableList, Withdrawal, Withdrawals, }; -use types::{ - BeaconStateError, ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge, - KzgProofs, VersionedHash, -}; +use types::{ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge, KzgProofs}; pub mod auth; pub mod http; pub mod json_structures; +mod new_payload_request; + +pub use new_payload_request::{ + NewPayloadRequest, NewPayloadRequestCapella, NewPayloadRequestDeneb, NewPayloadRequestMerge, +}; pub const LATEST_TAG: &str = "latest"; @@ -571,110 +572,6 @@ impl ExecutionPayloadBodyV1 { } } -#[superstruct( - variants(Merge, Capella, Deneb), - variant_attributes(derive(Clone, Debug, PartialEq),), - map_into(ExecutionPayload), - map_ref_into(ExecutionPayloadRef), - cast_error( - ty = "BeaconStateError", - expr = "BeaconStateError::IncorrectStateVariant" - ), - partial_getter_error( - ty = "BeaconStateError", - expr = "BeaconStateError::IncorrectStateVariant" - ) -)] -#[derive(Clone, Debug, PartialEq)] -pub struct NewPayloadRequest { - #[superstruct(only(Merge), partial_getter(rename = "execution_payload_merge"))] - pub execution_payload: ExecutionPayloadMerge, - #[superstruct(only(Capella), partial_getter(rename = "execution_payload_capella"))] - pub execution_payload: ExecutionPayloadCapella, - #[superstruct(only(Deneb), partial_getter(rename = "execution_payload_deneb"))] - pub execution_payload: ExecutionPayloadDeneb, - #[superstruct(only(Deneb))] - pub versioned_hashes: Vec, - #[superstruct(only(Deneb))] - pub parent_beacon_block_root: Hash256, -} - -impl NewPayloadRequest { - pub fn parent_hash(&self) -> ExecutionBlockHash { - match self { - Self::Merge(payload) => payload.execution_payload.parent_hash, - Self::Capella(payload) => payload.execution_payload.parent_hash, - Self::Deneb(payload) => payload.execution_payload.parent_hash, - } - } - - pub fn block_hash(&self) -> ExecutionBlockHash { - match self { - Self::Merge(payload) => payload.execution_payload.block_hash, - Self::Capella(payload) => payload.execution_payload.block_hash, - Self::Deneb(payload) => payload.execution_payload.block_hash, - } - } - - pub fn block_number(&self) -> u64 { - match self { - Self::Merge(payload) => payload.execution_payload.block_number, - Self::Capella(payload) => payload.execution_payload.block_number, - Self::Deneb(payload) => payload.execution_payload.block_number, - } - } - - pub fn into_execution_payload(self) -> ExecutionPayload { - map_new_payload_request_into_execution_payload!(self, |request, cons| { - cons(request.execution_payload) - }) - } -} - -impl<'a, E: EthSpec> TryFrom> for NewPayloadRequest { - type Error = BeaconStateError; - - fn try_from(block: BeaconBlockRef<'a, E>) -> Result { - match block { - BeaconBlockRef::Base(_) | BeaconBlockRef::Altair(_) => { - Err(Self::Error::IncorrectStateVariant) - } - BeaconBlockRef::Merge(block_ref) => Ok(Self::Merge(NewPayloadRequestMerge { - execution_payload: block_ref.body.execution_payload.execution_payload.clone(), - })), - BeaconBlockRef::Capella(block_ref) => Ok(Self::Capella(NewPayloadRequestCapella { - execution_payload: block_ref.body.execution_payload.execution_payload.clone(), - })), - BeaconBlockRef::Deneb(block_ref) => Ok(Self::Deneb(NewPayloadRequestDeneb { - execution_payload: block_ref.body.execution_payload.execution_payload.clone(), - versioned_hashes: block_ref - .body - .blob_kzg_commitments - .iter() - .map(kzg_commitment_to_versioned_hash) - .collect(), - parent_beacon_block_root: block_ref.parent_root, - })), - } - } -} - -impl TryFrom> for NewPayloadRequest { - type Error = BeaconStateError; - - fn try_from(payload: ExecutionPayload) -> Result { - match payload { - ExecutionPayload::Merge(payload) => Ok(Self::Merge(NewPayloadRequestMerge { - execution_payload: payload, - })), - ExecutionPayload::Capella(payload) => Ok(Self::Capella(NewPayloadRequestCapella { - execution_payload: payload, - })), - ExecutionPayload::Deneb(_) => Err(Self::Error::IncorrectStateVariant), - } - } -} - #[derive(Clone, Copy, Debug)] pub struct EngineCapabilities { pub new_payload_v1: bool, diff --git a/beacon_node/execution_layer/src/engine_api/http.rs b/beacon_node/execution_layer/src/engine_api/http.rs index ac7dfa57e92..df0f79c61e2 100644 --- a/beacon_node/execution_layer/src/engine_api/http.rs +++ b/beacon_node/execution_layer/src/engine_api/http.rs @@ -803,10 +803,10 @@ impl HttpJsonRpc { pub async fn new_payload_v3( &self, - new_payload_request_deneb: NewPayloadRequestDeneb, + new_payload_request_deneb: NewPayloadRequestDeneb<'_, T>, ) -> Result { let params = json!([ - JsonExecutionPayload::V3(new_payload_request_deneb.execution_payload.into()), + JsonExecutionPayload::V3(new_payload_request_deneb.execution_payload.clone().into()), new_payload_request_deneb.versioned_hashes, new_payload_request_deneb.parent_beacon_block_root, ]); @@ -1079,7 +1079,7 @@ impl HttpJsonRpc { // new_payload that the execution engine supports pub async fn new_payload( &self, - new_payload_request: NewPayloadRequest, + new_payload_request: NewPayloadRequest<'_, T>, ) -> Result { let engine_capabilities = self.get_engine_capabilities(None).await?; match new_payload_request { diff --git a/beacon_node/execution_layer/src/engine_api/new_payload_request.rs b/beacon_node/execution_layer/src/engine_api/new_payload_request.rs new file mode 100644 index 00000000000..b1385399e89 --- /dev/null +++ b/beacon_node/execution_layer/src/engine_api/new_payload_request.rs @@ -0,0 +1,332 @@ +use crate::{block_hash::calculate_execution_block_hash, metrics, Error}; + +use crate::versioned_hashes::verify_versioned_hashes; +use state_processing::per_block_processing::deneb::kzg_commitment_to_versioned_hash; +use superstruct::superstruct; +use types::{ + BeaconBlockRef, BeaconStateError, EthSpec, ExecutionBlockHash, ExecutionPayload, + ExecutionPayloadRef, Hash256, VersionedHash, +}; +use types::{ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge}; + +#[superstruct( + variants(Merge, Capella, Deneb), + variant_attributes(derive(Clone, Debug, PartialEq),), + map_into(ExecutionPayload), + map_ref_into(ExecutionPayloadRef), + cast_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ), + partial_getter_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ) +)] +#[derive(Clone, Debug, PartialEq)] +pub struct NewPayloadRequest<'block, E: EthSpec> { + #[superstruct(only(Merge), partial_getter(rename = "execution_payload_merge"))] + pub execution_payload: &'block ExecutionPayloadMerge, + #[superstruct(only(Capella), partial_getter(rename = "execution_payload_capella"))] + pub execution_payload: &'block ExecutionPayloadCapella, + #[superstruct(only(Deneb), partial_getter(rename = "execution_payload_deneb"))] + pub execution_payload: &'block ExecutionPayloadDeneb, + #[superstruct(only(Deneb))] + pub versioned_hashes: Vec, + #[superstruct(only(Deneb))] + pub parent_beacon_block_root: Hash256, +} + +impl<'block, E: EthSpec> NewPayloadRequest<'block, E> { + pub fn parent_hash(&self) -> ExecutionBlockHash { + match self { + Self::Merge(payload) => payload.execution_payload.parent_hash, + Self::Capella(payload) => payload.execution_payload.parent_hash, + Self::Deneb(payload) => payload.execution_payload.parent_hash, + } + } + + pub fn block_hash(&self) -> ExecutionBlockHash { + match self { + Self::Merge(payload) => payload.execution_payload.block_hash, + Self::Capella(payload) => payload.execution_payload.block_hash, + Self::Deneb(payload) => payload.execution_payload.block_hash, + } + } + + pub fn block_number(&self) -> u64 { + match self { + Self::Merge(payload) => payload.execution_payload.block_number, + Self::Capella(payload) => payload.execution_payload.block_number, + Self::Deneb(payload) => payload.execution_payload.block_number, + } + } + + pub fn execution_payload_ref(&self) -> ExecutionPayloadRef<'block, E> { + match self { + Self::Merge(request) => ExecutionPayloadRef::Merge(request.execution_payload), + Self::Capella(request) => ExecutionPayloadRef::Capella(request.execution_payload), + Self::Deneb(request) => ExecutionPayloadRef::Deneb(request.execution_payload), + } + } + + pub fn into_execution_payload(self) -> ExecutionPayload { + match self { + Self::Merge(request) => ExecutionPayload::Merge(request.execution_payload.clone()), + Self::Capella(request) => ExecutionPayload::Capella(request.execution_payload.clone()), + Self::Deneb(request) => ExecutionPayload::Deneb(request.execution_payload.clone()), + } + } + + /// Performs the required verifications of the payload when the chain is optimistically syncing. + /// + /// ## Specification + /// + /// Performs the verifications in the `verify_and_notify_new_payload` function: + /// + /// https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.2/specs/deneb/beacon-chain.md#modified-verify_and_notify_new_payload + pub fn perform_optimistic_sync_verifications(&self) -> Result<(), Error> { + self.verfiy_payload_block_hash()?; + self.verify_versioned_hashes()?; + + Ok(()) + } + + /// Verify the block hash is consistent locally within Lighthouse. + /// + /// ## Specification + /// + /// Equivalent to `is_valid_block_hash` in the spec: + /// https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.2/specs/deneb/beacon-chain.md#is_valid_block_hash + pub fn verfiy_payload_block_hash(&self) -> Result<(), Error> { + let payload = self.execution_payload_ref(); + let parent_beacon_block_root = self.parent_beacon_block_root().ok().cloned(); + + let _timer = metrics::start_timer(&metrics::EXECUTION_LAYER_VERIFY_BLOCK_HASH); + + let (header_hash, rlp_transactions_root) = + calculate_execution_block_hash(payload, parent_beacon_block_root); + + if header_hash != self.block_hash() { + return Err(Error::BlockHashMismatch { + computed: header_hash, + payload: payload.block_hash(), + transactions_root: rlp_transactions_root, + }); + } + + Ok(()) + } + + /// Verify the versioned hashes computed by the blob transactions match the versioned hashes computed from the commitments. + /// + /// ## Specification + /// + /// Equivalent to `is_valid_versioned_hashes` in the spec: + /// https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.2/specs/deneb/beacon-chain.md#is_valid_versioned_hashes + pub fn verify_versioned_hashes(&self) -> Result<(), Error> { + if let Ok(versioned_hashes) = self.versioned_hashes() { + verify_versioned_hashes(self.execution_payload_ref(), versioned_hashes) + .map_err(Error::VerifyingVersionedHashes)?; + } + Ok(()) + } +} + +impl<'a, E: EthSpec> TryFrom> for NewPayloadRequest<'a, E> { + type Error = BeaconStateError; + + fn try_from(block: BeaconBlockRef<'a, E>) -> Result { + match block { + BeaconBlockRef::Base(_) | BeaconBlockRef::Altair(_) => { + Err(Self::Error::IncorrectStateVariant) + } + BeaconBlockRef::Merge(block_ref) => Ok(Self::Merge(NewPayloadRequestMerge { + execution_payload: &block_ref.body.execution_payload.execution_payload, + })), + BeaconBlockRef::Capella(block_ref) => Ok(Self::Capella(NewPayloadRequestCapella { + execution_payload: &block_ref.body.execution_payload.execution_payload, + })), + BeaconBlockRef::Deneb(block_ref) => Ok(Self::Deneb(NewPayloadRequestDeneb { + execution_payload: &block_ref.body.execution_payload.execution_payload, + versioned_hashes: block_ref + .body + .blob_kzg_commitments + .iter() + .map(kzg_commitment_to_versioned_hash) + .collect(), + parent_beacon_block_root: block_ref.parent_root, + })), + } + } +} + +impl<'a, E: EthSpec> TryFrom> for NewPayloadRequest<'a, E> { + type Error = BeaconStateError; + + fn try_from(payload: ExecutionPayloadRef<'a, E>) -> Result { + match payload { + ExecutionPayloadRef::Merge(payload) => Ok(Self::Merge(NewPayloadRequestMerge { + execution_payload: payload, + })), + ExecutionPayloadRef::Capella(payload) => Ok(Self::Capella(NewPayloadRequestCapella { + execution_payload: payload, + })), + ExecutionPayloadRef::Deneb(_) => Err(Self::Error::IncorrectStateVariant), + } + } +} + +#[cfg(test)] +mod test { + use crate::versioned_hashes::Error as VersionedHashError; + use crate::{Error, NewPayloadRequest}; + use state_processing::per_block_processing::deneb::kzg_commitment_to_versioned_hash; + use types::{BeaconBlock, ExecPayload, ExecutionBlockHash, Hash256, MainnetEthSpec}; + + #[test] + fn test_optimistic_sync_verifications_valid_block() { + let beacon_block = get_valid_beacon_block(); + let new_payload_request = NewPayloadRequest::try_from(beacon_block.to_ref()) + .expect("should create new payload request"); + + assert!( + new_payload_request + .perform_optimistic_sync_verifications() + .is_ok(), + "validations should pass" + ); + } + + #[test] + fn test_optimistic_sync_verifications_bad_block_hash() { + let mut beacon_block = get_valid_beacon_block(); + let correct_block_hash = beacon_block + .body() + .execution_payload() + .expect("should get payload") + .block_hash(); + let invalid_block_hash = ExecutionBlockHash(Hash256::repeat_byte(0x42)); + + // now mutate the block hash + beacon_block + .body_mut() + .execution_payload_deneb_mut() + .expect("should get payload") + .execution_payload + .block_hash = invalid_block_hash; + + let new_payload_request = NewPayloadRequest::try_from(beacon_block.to_ref()) + .expect("should create new payload request"); + let verification_result = new_payload_request.perform_optimistic_sync_verifications(); + println!("verification_result: {:?}", verification_result); + let got_expected_result = match verification_result { + Err(Error::BlockHashMismatch { + computed, payload, .. + }) => computed == correct_block_hash && payload == invalid_block_hash, + _ => false, + }; + assert!(got_expected_result, "should return expected error"); + } + + #[test] + fn test_optimistic_sync_verifications_bad_versioned_hashes() { + let mut beacon_block = get_valid_beacon_block(); + + let mut commitments: Vec<_> = beacon_block + .body() + .blob_kzg_commitments() + .expect("should get commitments") + .clone() + .into(); + + let correct_versioned_hash = kzg_commitment_to_versioned_hash( + commitments.last().expect("should get last commitment"), + ); + + // mutate the last commitment + commitments + .last_mut() + .expect("should get last commitment") + .0[0] = 0x42; + + // calculate versioned hash from mutated commitment + let bad_versioned_hash = kzg_commitment_to_versioned_hash( + commitments.last().expect("should get last commitment"), + ); + + *beacon_block + .body_mut() + .blob_kzg_commitments_mut() + .expect("should get commitments") = commitments.into(); + + let new_payload_request = NewPayloadRequest::try_from(beacon_block.to_ref()) + .expect("should create new payload request"); + let verification_result = new_payload_request.perform_optimistic_sync_verifications(); + println!("verification_result: {:?}", verification_result); + + let got_expected_result = match verification_result { + Err(Error::VerifyingVersionedHashes(VersionedHashError::VersionHashMismatch { + expected, + found, + })) => expected == bad_versioned_hash && found == correct_versioned_hash, + _ => false, + }; + assert!(got_expected_result, "should return expected error"); + } + + fn get_valid_beacon_block() -> BeaconBlock { + BeaconBlock::Deneb(serde_json::from_str(r#"{ + "slot": "88160", + "proposer_index": "583", + "parent_root": "0x60770cd86a497ca3aa2e91f1687aa3ebafac87af52c30a920b5f40bd9e930eb6", + "state_root": "0x4a0e0abbcbcf576f2cb7387c4289ab13b8a128e32127642f056143d6164941a6", + "body": { + "randao_reveal": "0xb5253d5739496abc4f67c7c92e39e46cca452c2fdfc5275e3e0426a012aa62df82f47f7dece348e28db4bb212f0e793d187120bbd47b8031ed79344116eb4128f0ce0b05ba18cd615bb13966c1bd7d89e23cc769c8e4d8e4a63755f623ac3bed", + "eth1_data": { + "deposit_root": "0xe4785ac914d8673797f886e3151ce2647f81ae070c7ddb6845e65fd1c47d1222", + "deposit_count": "1181", + "block_hash": "0x010671bdfbfce6b0071984a06a7ded6deef13b4f8fdbae402c606a7a0c8780d1" + }, + "graffiti": "0x6c6f6465737461722f6765746800000000000000000000000000000000000000", + "proposer_slashings": [], + "attester_slashings": [], + "attestations": [], + "deposits": [], + "voluntary_exits": [], + "sync_aggregate": { + "sync_committee_bits": "0xfebffffffebfff7fff7f7fffbbefffff6affffffffbfffffefffebfffdbf77fff7fd77ffffefffdff7ffffeffffffe7e5ffffffdefffff7ffbffff7fffffffff", + "sync_committee_signature": "0x91939b5baf2a6f52d405b6dd396f5346ec435eca7d25912c91cc6a2f7030d870d68bebe4f2b21872a06929ff4cf3e5e9191053cb43eb24ebe34b9a75fb88a3acd06baf329c87f68bd664b49891260c698d7bca0f5365870b5b2b3a76f582156c" + }, + "execution_payload": { + "parent_hash": "0xa6f3ed782a992f79ad38da2af91b3e8923c71b801c50bc9033bb35a2e1da885f", + "fee_recipient": "0xf97e180c050e5ab072211ad2c213eb5aee4df134", + "state_root": "0x3bfd1a7f309ed35048c349a8daf01815bdc09a6d5df86ea77d1056f248ba2017", + "receipts_root": "0xcb5b8ffea57cd0fa87194d49bc8bb7fad08c93c9934b886489503c328d15fd36", + "logs_bloom": "0x002000000000000000000000800000000000000000001040000000000000000000000001000000000000000000000000000000000000100000000020000c0800000000000000008000000008000000200000800000000000000000000000000000000000000000008000000000008000000000000000000002000010000000000000000000000000000000000000000000000000000000080000004000000000800000000000000000000100000000000000000000000000000000000800000000000102000000000000000000000000000000080000001000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "prev_randao": "0xb2693020177d99ffbd4c267023be172d759e7306ff51b0e7d677d3148fbd7f1d", + "block_number": "74807", + "gas_limit": "30000000", + "gas_used": "128393", + "timestamp": "1697039520", + "extra_data": "0xd883010d03846765746888676f312e32312e31856c696e7578", + "base_fee_per_gas": "7", + "block_hash": "0xc64f3a43c64aeb98518a237f6279fa03095b9f95ca673c860ad7f16fb9340062", + "transactions": [ + "0x02f9017a8501a1f0ff4382317585012a05f2008512a05f2000830249f094c1b0bc605e2c808aa0867bfc98e51a1fe3e9867f80b901040cc7326300000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000036e534e16b8920d000000000000000000000000fb3e9c7cb92443931ee6b5b9728598d4eb9618c1000000000000000000000000fc7360b3b28cf4204268a8354dbec60720d155d2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000009a054a063f0fe7b9c68de8df91aaa5e96c15ab540000000000000000000000000c8d41b8fcc066cdabaf074d78e5153e8ce018a9c080a07dd9be0d014ffcd5b6883d0917c66b74ba51f0d976c8fc5674af192af6fa9450a02dad2c660974c125f5f22b1e6e8862a292e08cc2b4cafda35af650ee62868a43", + "0x03f8db8501a1f0ff430d84773594008504a817c8008252089454e594b6de0aa4b0188cd1549dd7ba715a455d078080c08504a817c800f863a001253ce00f525e3495cffa0b865eadb90a4c5ee812185cc796af74b6ec0a5dd7a0010720372a4d7dcab84413ed0cfc164fb91fb6ef1562ec2f7a82e912a1d9e129a0015a73e97950397896ed2c47dcab7c0360220bcfb413a8f210a7b6e6264e698880a04402cb0f13c17ef41dca106b1e1520c7aadcbe62984d81171e29914f587d67c1a02db62a8edb581917958e4a3884e7eececbaec114c5ee496e238033e896f997ac" + ], + "withdrawals": [], + "blob_gas_used": "393216", + "excess_blob_gas": "58720256" + }, + "bls_to_execution_changes": [], + "blob_kzg_commitments": [ + "0xa7accb7a25224a8c2e0cee9cd569fc1798665bfbfe780e08945fa9098ec61da4061f5b04e750a88d3340a801850a54fa", + "0xac7b47f99836510ae9076dc5f5da1f370679dea1d47073307a14cbb125cdc7822ae619637135777cb40e13d897fd00a7", + "0x997794110b9655833a88ad5a4ec40a3dc7964877bfbeb04ca1abe1d51bdc43e20e4c5757028896d298d7da954a6f14a1" + ] + } + }"#).expect("should decode")) + } +} diff --git a/beacon_node/execution_layer/src/lib.rs b/beacon_node/execution_layer/src/lib.rs index 868d8194466..664ceabb6cd 100644 --- a/beacon_node/execution_layer/src/lib.rs +++ b/beacon_node/execution_layer/src/lib.rs @@ -61,6 +61,7 @@ mod metrics; pub mod payload_cache; mod payload_status; pub mod test_utils; +mod versioned_hashes; /// Indicates the default jwt authenticated execution endpoint. pub const DEFAULT_EXECUTION_ENDPOINT: &str = "http://localhost:8551/"; @@ -141,6 +142,7 @@ pub enum Error { InvalidBlobConversion(String), BeaconStateError(BeaconStateError), PayloadTypeMismatch, + VerifyingVersionedHashes(versioned_hashes::Error), } impl From for Error { @@ -1321,7 +1323,7 @@ impl ExecutionLayer { /// Maps to the `engine_newPayload` JSON-RPC call. pub async fn notify_new_payload( &self, - new_payload_request: NewPayloadRequest, + new_payload_request: NewPayloadRequest<'_, T>, ) -> Result { let _timer = metrics::start_timer_vec( &metrics::EXECUTION_LAYER_REQUEST_TIMES, diff --git a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs index 182cad50faf..6af988fa88f 100644 --- a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs +++ b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs @@ -699,7 +699,7 @@ pub fn generate_blobs( Ok((bundle, transactions.into())) } -fn static_valid_tx() -> Result, String> { +pub fn static_valid_tx() -> Result, String> { // This is a real transaction hex encoded, but we don't care about the contents of the transaction. let transaction: EthersTransaction = serde_json::from_str( r#"{ diff --git a/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs b/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs index 7afeafc321e..77c2410ab1d 100644 --- a/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs +++ b/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs @@ -244,7 +244,7 @@ impl MockExecutionLayer { // TODO: again consider forks let status = self .el - .notify_new_payload(payload.try_into().unwrap()) + .notify_new_payload(payload.to_ref().try_into().unwrap()) .await .unwrap(); assert_eq!(status, PayloadStatus::Valid); diff --git a/beacon_node/execution_layer/src/test_utils/mod.rs b/beacon_node/execution_layer/src/test_utils/mod.rs index b509494703b..425329a520a 100644 --- a/beacon_node/execution_layer/src/test_utils/mod.rs +++ b/beacon_node/execution_layer/src/test_utils/mod.rs @@ -25,8 +25,8 @@ use warp::{http::StatusCode, Filter, Rejection}; use crate::EngineCapabilities; pub use execution_block_generator::{ - generate_blobs, generate_genesis_block, generate_genesis_header, generate_pow_block, Block, - ExecutionBlockGenerator, + generate_blobs, generate_genesis_block, generate_genesis_header, generate_pow_block, + static_valid_tx, Block, ExecutionBlockGenerator, }; pub use hook::Hook; pub use mock_builder::{MockBuilder, Operation}; diff --git a/beacon_node/execution_layer/src/versioned_hashes.rs b/beacon_node/execution_layer/src/versioned_hashes.rs new file mode 100644 index 00000000000..37bd35646d9 --- /dev/null +++ b/beacon_node/execution_layer/src/versioned_hashes.rs @@ -0,0 +1,135 @@ +extern crate alloy_consensus; +extern crate alloy_rlp; +use alloy_consensus::TxEnvelope; +use alloy_rlp::Decodable; +use types::{EthSpec, ExecutionPayloadRef, Hash256, Unsigned, VersionedHash}; + +#[derive(Debug)] +pub enum Error { + DecodingTransaction(String), + LengthMismatch { expected: usize, found: usize }, + VersionHashMismatch { expected: Hash256, found: Hash256 }, +} + +pub fn verify_versioned_hashes( + execution_payload: ExecutionPayloadRef, + expected_versioned_hashes: &[VersionedHash], +) -> Result<(), Error> { + let versioned_hashes = + extract_versioned_hashes_from_transactions::(execution_payload.transactions())?; + if versioned_hashes.len() != expected_versioned_hashes.len() { + return Err(Error::LengthMismatch { + expected: expected_versioned_hashes.len(), + found: versioned_hashes.len(), + }); + } + for (found, expected) in versioned_hashes + .iter() + .zip(expected_versioned_hashes.iter()) + { + if found != expected { + return Err(Error::VersionHashMismatch { + expected: *expected, + found: *found, + }); + } + } + + Ok(()) +} + +pub fn extract_versioned_hashes_from_transactions( + transactions: &types::Transactions, +) -> Result, Error> { + let mut versioned_hashes = Vec::new(); + + for tx in transactions { + match beacon_tx_to_tx_envelope(tx)? { + TxEnvelope::Eip4844(signed_tx_eip4844) => { + versioned_hashes.extend( + signed_tx_eip4844 + .tx() + .blob_versioned_hashes + .iter() + .map(|fb| Hash256::from(fb.0)), + ); + } + // enumerating all variants explicitly to make pattern irrefutable + // in case new types are added in the future which also have blobs + TxEnvelope::Legacy(_) + | TxEnvelope::TaggedLegacy(_) + | TxEnvelope::Eip2930(_) + | TxEnvelope::Eip1559(_) => {} + } + } + + Ok(versioned_hashes) +} + +pub fn beacon_tx_to_tx_envelope( + tx: &types::Transaction, +) -> Result { + let tx_bytes = Vec::from(tx.clone()); + TxEnvelope::decode(&mut tx_bytes.as_slice()) + .map_err(|e| Error::DecodingTransaction(e.to_string())) +} + +#[cfg(test)] +mod test { + use super::*; + use crate::test_utils::static_valid_tx; + use alloy_consensus::{TxKind, TxLegacy}; + + type E = types::MainnetEthSpec; + + #[test] + fn test_decode_static_transaction() { + let valid_tx = static_valid_tx::().expect("should give me known valid transaction"); + let tx_envelope = beacon_tx_to_tx_envelope(&valid_tx).expect("should decode tx"); + let TxEnvelope::Legacy(signed_tx) = tx_envelope else { + panic!("should decode to legacy transaction"); + }; + + assert!(matches!( + signed_tx.tx(), + TxLegacy { + chain_id: Some(0x01), + nonce: 0x15, + gas_price: 0x4a817c800, + to: TxKind::Call(..), + .. + } + )); + } + + #[test] + fn test_extract_versioned_hashes() { + use serde::Deserialize; + + #[derive(Deserialize)] + #[serde(transparent)] + struct TestTransactions( + #[serde(with = "ssz_types::serde_utils::list_of_hex_var_list")] types::Transactions, + ); + + let TestTransactions(raw_transactions): TestTransactions = serde_json::from_str(r#"[ + "0x03f901388501a1f0ff430f843b9aca00843b9aca0082520894e7249813d8ccf6fa95a2203f46a64166073d58878080c002f8c6a0012e98362c814f1724262c0d211a1463418a5f6382a8d457b37a2698afbe7b5ea00100ef985761395dfa8ed5ce91f3f2180b612401909e4cb8f33b90c8a454d9baa0013d45411623b90d90f916e4025ada74b453dd4ca093c017c838367c9de0f801a001753e2af0b1e70e7ef80541355b2a035cc9b2c177418bb2a4402a9b346cf84da0011789b520a8068094a92aa0b04db8d8ef1c6c9818947c5210821732b8744049a0011c4c4f95597305daa5f62bf5f690e37fa11f5de05a95d05cac4e2119e394db80a0ccd86a742af0e042d08cbb35d910ddc24bbc6538f9e53be6620d4b6e1bb77662a01a8bacbc614940ac2f5c23ffc00a122c9f085046883de65c88ab0edb859acb99", + "0x02f9017a8501a1f0ff4382363485012a05f2008512a05f2000830249f094c1b0bc605e2c808aa0867bfc98e51a1fe3e9867f80b901040cc7326300000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000009445a285baa43e00000000000000000000000000c500931f24edb821cef6e28f7adb33b38578c82000000000000000000000000fc7360b3b28cf4204268a8354dbec60720d155d2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000009a054a063f0fe7b9c68de8df91aaa5e96c15ab540000000000000000000000000c8d41b8fcc066cdabaf074d78e5153e8ce018a9c080a008e14475c1173cd9f5740c24c08b793f9e16c36c08fa73769db95050e31e3396a019767dcdda26c4a774ca28c9df15d0c20e43bd07bd33ee0f84d6096cb5a1ebed" + ]"#).expect("should get raw transactions"); + let expected_versioned_hashes = vec![ + "0x012e98362c814f1724262c0d211a1463418a5f6382a8d457b37a2698afbe7b5e", + "0x0100ef985761395dfa8ed5ce91f3f2180b612401909e4cb8f33b90c8a454d9ba", + "0x013d45411623b90d90f916e4025ada74b453dd4ca093c017c838367c9de0f801", + "0x01753e2af0b1e70e7ef80541355b2a035cc9b2c177418bb2a4402a9b346cf84d", + "0x011789b520a8068094a92aa0b04db8d8ef1c6c9818947c5210821732b8744049", + "0x011c4c4f95597305daa5f62bf5f690e37fa11f5de05a95d05cac4e2119e394db", + ] + .into_iter() + .map(|tx| Hash256::from_slice(&hex::decode(&tx[2..]).expect("should decode hex"))) + .collect::>(); + + let versioned_hashes = extract_versioned_hashes_from_transactions::(&raw_transactions) + .expect("should get versioned hashes"); + assert_eq!(versioned_hashes, expected_versioned_hashes); + } +} diff --git a/book/src/help_vc.md b/book/src/help_vc.md index 02819804315..3d2519aac57 100644 --- a/book/src/help_vc.md +++ b/book/src/help_vc.md @@ -26,6 +26,9 @@ FLAGS: but is only safe if slashing protection is enabled on the remote signer and is implemented correctly. DO NOT ENABLE THIS FLAG UNLESS YOU ARE CERTAIN THAT SLASHING PROTECTION IS ENABLED ON THE REMOTE SIGNER. YOU WILL GET SLASHED IF YOU USE THIS FLAG WITHOUT ENABLING WEB3SIGNER'S SLASHING PROTECTION. + --distributed + Enables functionality required for running the validator in a distributed validator cluster. + --enable-doppelganger-protection If this flag is set, Lighthouse will delay startup for three epochs and monitor for messages on the network by any of the validators managed by this client. This will result in three (possibly four) epochs worth of diff --git a/common/eth2_network_config/built_in_network_configs/mainnet/config.yaml b/common/eth2_network_config/built_in_network_configs/mainnet/config.yaml index 019eda43243..b29ecfc6d38 100644 --- a/common/eth2_network_config/built_in_network_configs/mainnet/config.yaml +++ b/common/eth2_network_config/built_in_network_configs/mainnet/config.yaml @@ -1,9 +1,15 @@ # Mainnet config # Extends the mainnet preset -CONFIG_NAME: 'mainnet' PRESET_BASE: 'mainnet' +# Free-form short name of the network that this configuration applies to - known +# canonical network names include: +# * 'mainnet' - there can be only one +# * 'prater' - testnet +# Must match the regex: [a-z0-9\-] +CONFIG_NAME: 'mainnet' + # Transition # --------------------------------------------------------------- # Estimated on Sept 15, 2022 @@ -12,6 +18,8 @@ TERMINAL_TOTAL_DIFFICULTY: 58750000000000000000000 TERMINAL_BLOCK_HASH: 0x0000000000000000000000000000000000000000000000000000000000000000 TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH: 18446744073709551615 + + # Genesis # --------------------------------------------------------------- # `2**14` (= 16,384) @@ -32,22 +40,16 @@ GENESIS_DELAY: 604800 # Altair ALTAIR_FORK_VERSION: 0x01000000 -ALTAIR_FORK_EPOCH: 74240 -# Merge +ALTAIR_FORK_EPOCH: 74240 # Oct 27, 2021, 10:56:23am UTC +# Bellatrix BELLATRIX_FORK_VERSION: 0x02000000 -BELLATRIX_FORK_EPOCH: 144896 # Sept 6, 2022, 11:34:47am UTC +BELLATRIX_FORK_EPOCH: 144896 # Sept 6, 2022, 11:34:47am UTC # Capella CAPELLA_FORK_VERSION: 0x03000000 -CAPELLA_FORK_EPOCH: 194048 # April 12, 2023, 10:27:35pm UTC +CAPELLA_FORK_EPOCH: 194048 # April 12, 2023, 10:27:35pm UTC # Deneb DENEB_FORK_VERSION: 0x04000000 -DENEB_FORK_EPOCH: 18446744073709551615 -# Sharding -SHARDING_FORK_VERSION: 0x03000000 -SHARDING_FORK_EPOCH: 18446744073709551615 - -# TBD, 2**32 is a placeholder. Merge transition approach is in active R&D. -TRANSITION_TOTAL_DIFFICULTY: 4294967296 +DENEB_FORK_EPOCH: 269568 # March 13, 2024, 01:55:35pm UTC # Time parameters @@ -74,16 +76,22 @@ INACTIVITY_SCORE_RECOVERY_RATE: 16 EJECTION_BALANCE: 16000000000 # 2**2 (= 4) MIN_PER_EPOCH_CHURN_LIMIT: 4 -# 2**3 (= 8) -MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: 8 # 2**16 (= 65,536) CHURN_LIMIT_QUOTIENT: 65536 - +# [New in Deneb:EIP7514] 2**3 (= 8) +MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: 8 # Fork choice # --------------------------------------------------------------- # 40% PROPOSER_SCORE_BOOST: 40 +# 20% +REORG_HEAD_WEIGHT_THRESHOLD: 20 +# 160% +REORG_PARENT_WEIGHT_THRESHOLD: 160 +# `2` epochs +REORG_MAX_EPOCHS_SINCE_FINALIZATION: 2 + # Deposit contract # --------------------------------------------------------------- @@ -92,17 +100,43 @@ DEPOSIT_CHAIN_ID: 1 DEPOSIT_NETWORK_ID: 1 DEPOSIT_CONTRACT_ADDRESS: 0x00000000219ab540356cBB839Cbe05303d7705Fa -# Network + +# Networking # --------------------------------------------------------------- -SUBNETS_PER_NODE: 2 +# `10 * 2**20` (= 10485760, 10 MiB) GOSSIP_MAX_SIZE: 10485760 +# `2**10` (= 1024) +MAX_REQUEST_BLOCKS: 1024 +# `2**8` (= 256) +EPOCHS_PER_SUBNET_SUBSCRIPTION: 256 +# `MIN_VALIDATOR_WITHDRAWABILITY_DELAY + CHURN_LIMIT_QUOTIENT // 2` (= 33024, ~5 months) MIN_EPOCHS_FOR_BLOCK_REQUESTS: 33024 +# `10 * 2**20` (=10485760, 10 MiB) MAX_CHUNK_SIZE: 10485760 +# 5s TTFB_TIMEOUT: 5 +# 10s RESP_TIMEOUT: 10 +ATTESTATION_PROPAGATION_SLOT_RANGE: 32 +# 500ms +MAXIMUM_GOSSIP_CLOCK_DISPARITY: 500 MESSAGE_DOMAIN_INVALID_SNAPPY: 0x00000000 MESSAGE_DOMAIN_VALID_SNAPPY: 0x01000000 +# 2 subnets per node +SUBNETS_PER_NODE: 2 +# 2**8 (= 64) ATTESTATION_SUBNET_COUNT: 64 ATTESTATION_SUBNET_EXTRA_BITS: 0 +# ceillog2(ATTESTATION_SUBNET_COUNT) + ATTESTATION_SUBNET_EXTRA_BITS ATTESTATION_SUBNET_PREFIX_BITS: 6 ATTESTATION_SUBNET_SHUFFLING_PREFIX_BITS: 3 + +# Deneb +# `2**7` (=128) +MAX_REQUEST_BLOCKS_DENEB: 128 +# MAX_REQUEST_BLOCKS_DENEB * MAX_BLOBS_PER_BLOCK +MAX_REQUEST_BLOB_SIDECARS: 768 +# `2**12` (= 4096 epochs, ~18 days) +MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096 +# `6` +BLOB_SIDECAR_SUBNET_COUNT: 6 diff --git a/common/eth2_network_config/src/lib.rs b/common/eth2_network_config/src/lib.rs index 565b8d78902..a76a8320aa8 100644 --- a/common/eth2_network_config/src/lib.rs +++ b/common/eth2_network_config/src/lib.rs @@ -583,6 +583,8 @@ mod tests { } else { GenesisStateSource::Unknown }; + // With Deneb enabled by default we must set a trusted setup here. + let kzg_trusted_setup = get_trusted_setup_from_config(&config).unwrap(); let testnet = Eth2NetworkConfig { deposit_contract_deploy_block, @@ -593,7 +595,7 @@ mod tests { .map(Encode::as_ssz_bytes) .map(Into::into), config, - kzg_trusted_setup: None, + kzg_trusted_setup: Some(kzg_trusted_setup), }; testnet diff --git a/consensus/types/presets/mainnet/deneb.yaml b/consensus/types/presets/mainnet/deneb.yaml index 6d2fb4abde9..0f56b8bdfac 100644 --- a/consensus/types/presets/mainnet/deneb.yaml +++ b/consensus/types/presets/mainnet/deneb.yaml @@ -8,5 +8,5 @@ FIELD_ELEMENTS_PER_BLOB: 4096 MAX_BLOB_COMMITMENTS_PER_BLOCK: 4096 # `uint64(6)` MAX_BLOBS_PER_BLOCK: 6 -# `floorlog2(BLOB_KZG_COMMITMENTS_GINDEX) + 1 + ceillog2(MAX_BLOB_COMMITMENTS_PER_BLOCK)` = 4 + 1 + 12 = 17 +# `floorlog2(get_generalized_index(BeaconBlockBody, 'blob_kzg_commitments')) + 1 + ceillog2(MAX_BLOB_COMMITMENTS_PER_BLOCK)` = 4 + 1 + 12 = 17 KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 17 diff --git a/consensus/types/src/chain_spec.rs b/consensus/types/src/chain_spec.rs index a182c0f98d6..c32c67fa330 100644 --- a/consensus/types/src/chain_spec.rs +++ b/consensus/types/src/chain_spec.rs @@ -681,7 +681,7 @@ impl ChainSpec { * Deneb hard fork params */ deneb_fork_version: [0x04, 0x00, 0x00, 0x00], - deneb_fork_epoch: None, + deneb_fork_epoch: Some(Epoch::new(269568)), /* * Network specific diff --git a/lcli/Dockerfile b/lcli/Dockerfile index aed3628cf32..4f5c3f2972f 100644 --- a/lcli/Dockerfile +++ b/lcli/Dockerfile @@ -1,7 +1,7 @@ # `lcli` requires the full project to be in scope, so this should be built either: # - from the `lighthouse` dir with the command: `docker build -f ./lcli/Dockerflie .` # - from the current directory with the command: `docker build -f ./Dockerfile ../` -FROM rust:1.73.0-bullseye AS builder +FROM rust:1.75.0-bullseye AS builder RUN apt-get update && apt-get -y upgrade && apt-get install -y cmake libclang-dev COPY . lighthouse ARG FEATURES diff --git a/lighthouse/Cargo.toml b/lighthouse/Cargo.toml index 8517c66c386..b14227e7edf 100644 --- a/lighthouse/Cargo.toml +++ b/lighthouse/Cargo.toml @@ -4,7 +4,7 @@ version = "4.6.0" authors = ["Sigma Prime "] edition = { workspace = true } autotests = false -rust-version = "1.73.0" +rust-version = "1.75.0" [features] default = ["slasher-lmdb"] diff --git a/lighthouse/tests/beacon_node.rs b/lighthouse/tests/beacon_node.rs index f97f17a6677..94996eb1a26 100644 --- a/lighthouse/tests/beacon_node.rs +++ b/lighthouse/tests/beacon_node.rs @@ -55,6 +55,12 @@ impl CommandLineTest { } fn run_with_zero_port(&mut self) -> CompletedTest { + // Required since Deneb was enabled on mainnet. + self.cmd.arg("--allow-insecure-genesis-sync"); + self.run_with_zero_port_and_no_genesis_sync() + } + + fn run_with_zero_port_and_no_genesis_sync(&mut self) -> CompletedTest { self.cmd.arg("-z"); self.run() } @@ -93,16 +99,16 @@ fn staking_flag() { } #[test] -fn allow_insecure_genesis_sync() { - CommandLineTest::new() - .run_with_zero_port() - .with_config(|config| { - assert_eq!(config.allow_insecure_genesis_sync, false); - }); +#[should_panic] +fn allow_insecure_genesis_sync_default() { + CommandLineTest::new().run_with_zero_port_and_no_genesis_sync(); +} +#[test] +fn allow_insecure_genesis_sync_enabled() { CommandLineTest::new() .flag("allow-insecure-genesis-sync", None) - .run_with_zero_port() + .run_with_zero_port_and_no_genesis_sync() .with_config(|config| { assert_eq!(config.allow_insecure_genesis_sync, true); }); @@ -851,6 +857,7 @@ fn network_port_flag_over_ipv4() { let port = 0; CommandLineTest::new() .flag("port", Some(port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -867,6 +874,7 @@ fn network_port_flag_over_ipv4() { let port = 9000; CommandLineTest::new() .flag("port", Some(port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -886,6 +894,7 @@ fn network_port_flag_over_ipv6() { CommandLineTest::new() .flag("listen-address", Some("::1")) .flag("port", Some(port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -903,6 +912,7 @@ fn network_port_flag_over_ipv6() { CommandLineTest::new() .flag("listen-address", Some("::1")) .flag("port", Some(port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -925,6 +935,7 @@ fn network_port_flag_over_ipv4_and_ipv6() { .flag("listen-address", Some("::1")) .flag("port", Some(port.to_string().as_str())) .flag("port6", Some(port6.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -954,6 +965,7 @@ fn network_port_flag_over_ipv4_and_ipv6() { .flag("listen-address", Some("::1")) .flag("port", Some(port.to_string().as_str())) .flag("port6", Some(port6.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -983,6 +995,7 @@ fn network_port_and_discovery_port_flags_over_ipv4() { CommandLineTest::new() .flag("port", Some(tcp4_port.to_string().as_str())) .flag("discovery-port", Some(disc4_port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -1003,6 +1016,7 @@ fn network_port_and_discovery_port_flags_over_ipv6() { .flag("listen-address", Some("::1")) .flag("port", Some(tcp6_port.to_string().as_str())) .flag("discovery-port", Some(disc6_port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -1028,6 +1042,7 @@ fn network_port_and_discovery_port_flags_over_ipv4_and_ipv6() { .flag("discovery-port", Some(disc4_port.to_string().as_str())) .flag("port6", Some(tcp6_port.to_string().as_str())) .flag("discovery-port6", Some(disc6_port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -1067,6 +1082,7 @@ fn network_port_discovery_quic_port_flags_over_ipv4_and_ipv6() { .flag("port6", Some(tcp6_port.to_string().as_str())) .flag("discovery-port6", Some(disc6_port.to_string().as_str())) .flag("quic-port6", Some(quic6_port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -1293,6 +1309,7 @@ fn enr_match_flag_over_ipv4() { .flag("listen-address", Some("127.0.0.2")) .flag("discovery-port", Some(udp4_port.to_string().as_str())) .flag("port", Some(tcp4_port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -1324,6 +1341,7 @@ fn enr_match_flag_over_ipv6() { .flag("listen-address", Some(ADDR)) .flag("discovery-port", Some(udp6_port.to_string().as_str())) .flag("port", Some(tcp6_port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -1364,6 +1382,7 @@ fn enr_match_flag_over_ipv4_and_ipv6() { .flag("listen-address", Some(IPV6_ADDR)) .flag("discovery-port6", Some(udp6_port.to_string().as_str())) .flag("port6", Some(tcp6_port.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| { assert_eq!( @@ -1490,6 +1509,7 @@ fn http_port_flag() { .flag("http", None) .flag("http-port", Some(port1.to_string().as_str())) .flag("port", Some(port2.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| assert_eq!(config.http_api.listen_port, port1)); } @@ -1647,6 +1667,7 @@ fn metrics_port_flag() { .flag("metrics", None) .flag("metrics-port", Some(port1.to_string().as_str())) .flag("port", Some(port2.to_string().as_str())) + .flag("allow-insecure-genesis-sync", None) .run() .with_config(|config| assert_eq!(config.http_metrics.listen_port, port1)); } diff --git a/testing/execution_engine_integration/src/test_rig.rs b/testing/execution_engine_integration/src/test_rig.rs index b0701e80a1b..bfa56f63c0d 100644 --- a/testing/execution_engine_integration/src/test_rig.rs +++ b/testing/execution_engine_integration/src/test_rig.rs @@ -381,7 +381,7 @@ impl TestRig { let status = self .ee_a .execution_layer - .notify_new_payload(valid_payload.clone().try_into().unwrap()) + .notify_new_payload(valid_payload.to_ref().try_into().unwrap()) .await .unwrap(); assert_eq!(status, PayloadStatus::Valid); @@ -435,7 +435,7 @@ impl TestRig { let status = self .ee_a .execution_layer - .notify_new_payload(invalid_payload.try_into().unwrap()) + .notify_new_payload(invalid_payload.to_ref().try_into().unwrap()) .await .unwrap(); assert!(matches!( @@ -507,7 +507,7 @@ impl TestRig { let status = self .ee_a .execution_layer - .notify_new_payload(second_payload.clone().try_into().unwrap()) + .notify_new_payload(second_payload.to_ref().try_into().unwrap()) .await .unwrap(); assert_eq!(status, PayloadStatus::Valid); @@ -559,7 +559,7 @@ impl TestRig { let status = self .ee_b .execution_layer - .notify_new_payload(second_payload.clone().try_into().unwrap()) + .notify_new_payload(second_payload.to_ref().try_into().unwrap()) .await .unwrap(); assert!(matches!(status, PayloadStatus::Syncing)); @@ -597,7 +597,7 @@ impl TestRig { let status = self .ee_b .execution_layer - .notify_new_payload(valid_payload.clone().try_into().unwrap()) + .notify_new_payload(valid_payload.to_ref().try_into().unwrap()) .await .unwrap(); assert_eq!(status, PayloadStatus::Valid); @@ -611,7 +611,7 @@ impl TestRig { let status = self .ee_b .execution_layer - .notify_new_payload(second_payload.clone().try_into().unwrap()) + .notify_new_payload(second_payload.to_ref().try_into().unwrap()) .await .unwrap(); assert_eq!(status, PayloadStatus::Valid); diff --git a/validator_client/Cargo.toml b/validator_client/Cargo.toml index 90a82b7e3b2..8e587c6155f 100644 --- a/validator_client/Cargo.toml +++ b/validator_client/Cargo.toml @@ -10,6 +10,7 @@ path = "src/lib.rs" [dev-dependencies] tokio = { workspace = true } +itertools = { workspace = true } [dependencies] tree_hash = { workspace = true } @@ -51,7 +52,6 @@ ring = { workspace = true } rand = { workspace = true, features = ["small_rng"] } lighthouse_metrics = { workspace = true } lazy_static = { workspace = true } -itertools = { workspace = true } monitoring_api = { workspace = true } sensitive_url = { workspace = true } task_executor = { workspace = true } diff --git a/validator_client/src/cli.rs b/validator_client/src/cli.rs index e4e5a2f1307..16a265212e5 100644 --- a/validator_client/src/cli.rs +++ b/validator_client/src/cli.rs @@ -145,6 +145,12 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> { future.") .takes_value(false) ) + .arg( + Arg::with_name("distributed") + .long("distributed") + .help("Enables functionality required for running the validator in a distributed validator cluster.") + .takes_value(false) + ) /* REST API related arguments */ .arg( Arg::with_name("http") diff --git a/validator_client/src/config.rs b/validator_client/src/config.rs index 7ac9e3e3bc7..ae59829a3e6 100644 --- a/validator_client/src/config.rs +++ b/validator_client/src/config.rs @@ -84,6 +84,8 @@ pub struct Config { pub builder_boost_factor: Option, /// If true, Lighthouse will prefer builder proposals, if available. pub prefer_builder_proposals: bool, + /// Whether we are running with distributed network support. + pub distributed: bool, pub web3_signer_keep_alive_timeout: Option, pub web3_signer_max_idle_connections: Option, } @@ -130,6 +132,7 @@ impl Default for Config { produce_block_v3: false, builder_boost_factor: None, prefer_builder_proposals: false, + distributed: false, web3_signer_keep_alive_timeout: Some(Duration::from_secs(90)), web3_signer_max_idle_connections: None, } @@ -233,6 +236,10 @@ impl Config { config.beacon_nodes_tls_certs = Some(tls_certs.split(',').map(PathBuf::from).collect()); } + if cli_args.is_present("distributed") { + config.distributed = true; + } + if cli_args.is_present("disable-run-on-all") { warn!( log, diff --git a/validator_client/src/duties_service.rs b/validator_client/src/duties_service.rs index 26747f81110..290803e257a 100644 --- a/validator_client/src/duties_service.rs +++ b/validator_client/src/duties_service.rs @@ -6,7 +6,7 @@ //! The `DutiesService` is also responsible for sending events to the `BlockService` which trigger //! block production. -mod sync; +pub mod sync; use crate::beacon_node_fallback::{ApiTopic, BeaconNodeFallback, OfflineOnFailure, RequireSynced}; use crate::http_metrics::metrics::{get_int_gauge, set_int_gauge, ATTESTATION_DUTY}; @@ -42,6 +42,9 @@ const HISTORICAL_DUTIES_EPOCHS: u64 = 2; /// At start-up selection proofs will be computed with less lookahead out of necessity. const SELECTION_PROOF_SLOT_LOOKAHEAD: u64 = 8; +/// The attestation selection proof lookahead for those running with the --distributed flag. +const SELECTION_PROOF_SLOT_LOOKAHEAD_DVT: u64 = 1; + /// Fraction of a slot at which selection proof signing should happen (2 means half way). const SELECTION_PROOF_SCHEDULE_DENOM: u32 = 2; @@ -211,16 +214,21 @@ pub struct DutiesService { /// proposals for any validators which are not registered locally. pub proposers: RwLock, /// Map from validator index to sync committee duties. - pub sync_duties: SyncDutiesMap, + pub sync_duties: SyncDutiesMap, /// Provides the canonical list of locally-managed validators. pub validator_store: Arc>, /// Tracks the current slot. pub slot_clock: T, /// Provides HTTP access to remote beacon nodes. pub beacon_nodes: Arc>, - pub enable_high_validator_count_metrics: bool, + /// The runtime for spawning tasks. pub context: RuntimeContext, + /// The current chain spec. pub spec: ChainSpec, + //// Whether we permit large validator counts in the metrics. + pub enable_high_validator_count_metrics: bool, + /// If this validator is running in distributed mode. + pub distributed: bool, } impl DutiesService { @@ -997,7 +1005,13 @@ async fn fill_in_selection_proofs( continue; }; - let lookahead_slot = current_slot + SELECTION_PROOF_SLOT_LOOKAHEAD; + let selection_lookahead = if duties_service.distributed { + SELECTION_PROOF_SLOT_LOOKAHEAD_DVT + } else { + SELECTION_PROOF_SLOT_LOOKAHEAD + }; + + let lookahead_slot = current_slot + selection_lookahead; let mut relevant_duties = duties_by_slot.split_off(&lookahead_slot); std::mem::swap(&mut relevant_duties, &mut duties_by_slot); diff --git a/validator_client/src/duties_service/sync.rs b/validator_client/src/duties_service/sync.rs index de42fa587ef..3618b47146f 100644 --- a/validator_client/src/duties_service/sync.rs +++ b/validator_client/src/duties_service/sync.rs @@ -7,18 +7,18 @@ use crate::{ }; use futures::future::join_all; -use itertools::Itertools; use parking_lot::{MappedRwLockReadGuard, RwLock, RwLockReadGuard, RwLockWriteGuard}; use slog::{crit, debug, info, warn}; use slot_clock::SlotClock; use std::collections::{HashMap, HashSet}; +use std::marker::PhantomData; use std::sync::Arc; -use types::{ - ChainSpec, Epoch, EthSpec, PublicKeyBytes, Slot, SyncDuty, SyncSelectionProof, SyncSubnetId, -}; +use types::{ChainSpec, EthSpec, PublicKeyBytes, Slot, SyncDuty, SyncSelectionProof, SyncSubnetId}; -/// Number of epochs in advance to compute selection proofs. +/// Number of epochs in advance to compute selection proofs when not in `distributed` mode. pub const AGGREGATION_PRE_COMPUTE_EPOCHS: u64 = 2; +/// Number of slots in advance to compute selection proofs when in `distributed` mode. +pub const AGGREGATION_PRE_COMPUTE_SLOTS_DISTRIBUTED: u64 = 1; /// Top-level data-structure containing sync duty information. /// @@ -32,9 +32,12 @@ pub const AGGREGATION_PRE_COMPUTE_EPOCHS: u64 = 2; /// 2. One-at-a-time locking. For the innermost locks on the aggregator duties, all of the functions /// in this file take care to only lock one validator at a time. We never hold a lock while /// trying to obtain another one (hence no lock ordering issues). -pub struct SyncDutiesMap { +pub struct SyncDutiesMap { /// Map from sync committee period to duties for members of that sync committee. committees: RwLock>, + /// Whether we are in `distributed` mode and using reduced lookahead for aggregate pre-compute. + distributed: bool, + _phantom: PhantomData, } /// Duties for a single sync committee period. @@ -59,8 +62,8 @@ pub struct ValidatorDuties { /// Aggregator duties for a single validator. pub struct AggregatorDuties { - /// The epoch up to which aggregation proofs have already been computed (inclusive). - pre_compute_epoch: RwLock>, + /// The slot up to which aggregation proofs have already been computed (inclusive). + pre_compute_slot: RwLock>, /// Map from slot & subnet ID to proof that this validator is an aggregator. /// /// The slot is the slot at which the signed contribution and proof should be broadcast, @@ -82,15 +85,15 @@ pub struct SlotDuties { pub aggregators: HashMap>, } -impl Default for SyncDutiesMap { - fn default() -> Self { +impl SyncDutiesMap { + pub fn new(distributed: bool) -> Self { Self { committees: RwLock::new(HashMap::new()), + distributed, + _phantom: PhantomData, } } -} -impl SyncDutiesMap { /// Check if duties are already known for all of the given validators for `committee_period`. fn all_duties_known(&self, committee_period: u64, validator_indices: &[u64]) -> bool { self.committees @@ -104,22 +107,34 @@ impl SyncDutiesMap { }) } + /// Number of slots in advance to compute selection proofs + fn aggregation_pre_compute_slots(&self) -> u64 { + if self.distributed { + AGGREGATION_PRE_COMPUTE_SLOTS_DISTRIBUTED + } else { + E::slots_per_epoch() * AGGREGATION_PRE_COMPUTE_EPOCHS + } + } + /// Prepare for pre-computation of selection proofs for `committee_period`. /// - /// Return the epoch up to which proofs should be pre-computed, as well as a vec of - /// `(previous_pre_compute_epoch, sync_duty)` pairs for all validators which need to have proofs + /// Return the slot up to which proofs should be pre-computed, as well as a vec of + /// `(previous_pre_compute_slot, sync_duty)` pairs for all validators which need to have proofs /// computed. See `fill_in_aggregation_proofs` for the actual calculation. fn prepare_for_aggregator_pre_compute( &self, committee_period: u64, - current_epoch: Epoch, + current_slot: Slot, spec: &ChainSpec, - ) -> (Epoch, Vec<(Epoch, SyncDuty)>) { - let default_start_epoch = - std::cmp::max(current_epoch, first_epoch_of_period(committee_period, spec)); - let pre_compute_epoch = std::cmp::min( - current_epoch + AGGREGATION_PRE_COMPUTE_EPOCHS, - last_epoch_of_period(committee_period, spec), + ) -> (Slot, Vec<(Slot, SyncDuty)>) { + let default_start_slot = std::cmp::max( + current_slot, + first_slot_of_period::(committee_period, spec), + ); + let pre_compute_lookahead_slots = self.aggregation_pre_compute_slots(); + let pre_compute_slot = std::cmp::min( + current_slot + pre_compute_lookahead_slots, + last_slot_of_period::(committee_period, spec), ); let pre_compute_duties = self.committees.read().get(&committee_period).map_or_else( @@ -130,18 +145,18 @@ impl SyncDutiesMap { .values() .filter_map(|maybe_duty| { let duty = maybe_duty.as_ref()?; - let old_pre_compute_epoch = duty + let old_pre_compute_slot = duty .aggregation_duties - .pre_compute_epoch + .pre_compute_slot .write() - .replace(pre_compute_epoch); + .replace(pre_compute_slot); - match old_pre_compute_epoch { + match old_pre_compute_slot { // No proofs pre-computed previously, compute all from the start of - // the period or the current epoch (whichever is later). - None => Some((default_start_epoch, duty.duty.clone())), + // the period or the current slot (whichever is later). + None => Some((default_start_slot, duty.duty.clone())), // Proofs computed up to `prev`, start from the subsequent epoch. - Some(prev) if prev < pre_compute_epoch => { + Some(prev) if prev < pre_compute_slot => { Some((prev + 1, duty.duty.clone())) } // Proofs already known, no need to compute. @@ -151,7 +166,7 @@ impl SyncDutiesMap { .collect() }, ); - (pre_compute_epoch, pre_compute_duties) + (pre_compute_slot, pre_compute_duties) } fn get_or_create_committee_duties<'a, 'b>( @@ -176,7 +191,7 @@ impl SyncDutiesMap { /// Get duties for all validators for the given `wall_clock_slot`. /// /// This is the entry-point for the sync committee service. - pub fn get_duties_for_slot( + pub fn get_duties_for_slot( &self, wall_clock_slot: Slot, spec: &ChainSpec, @@ -253,7 +268,7 @@ impl ValidatorDuties { Self { duty, aggregation_duties: AggregatorDuties { - pre_compute_epoch: RwLock::new(None), + pre_compute_slot: RwLock::new(None), proofs: RwLock::new(HashMap::new()), }, } @@ -265,12 +280,12 @@ fn epoch_offset(spec: &ChainSpec) -> u64 { spec.epochs_per_sync_committee_period.as_u64() / 2 } -fn first_epoch_of_period(sync_committee_period: u64, spec: &ChainSpec) -> Epoch { - spec.epochs_per_sync_committee_period * sync_committee_period +fn first_slot_of_period(sync_committee_period: u64, spec: &ChainSpec) -> Slot { + (spec.epochs_per_sync_committee_period * sync_committee_period).start_slot(E::slots_per_epoch()) } -fn last_epoch_of_period(sync_committee_period: u64, spec: &ChainSpec) -> Epoch { - first_epoch_of_period(sync_committee_period + 1, spec) - 1 +fn last_slot_of_period(sync_committee_period: u64, spec: &ChainSpec) -> Slot { + first_slot_of_period::(sync_committee_period + 1, spec) - 1 } pub async fn poll_sync_committee_duties( @@ -278,11 +293,11 @@ pub async fn poll_sync_committee_duties( ) -> Result<(), Error> { let sync_duties = &duties_service.sync_duties; let spec = &duties_service.spec; - let current_epoch = duties_service + let current_slot = duties_service .slot_clock .now() - .ok_or(Error::UnableToReadSlotClock)? - .epoch(E::slots_per_epoch()); + .ok_or(Error::UnableToReadSlotClock)?; + let current_epoch = current_slot.epoch(E::slots_per_epoch()); // If the Altair fork is yet to be activated, do not attempt to poll for duties. if spec @@ -330,8 +345,8 @@ pub async fn poll_sync_committee_duties( } // Pre-compute aggregator selection proofs for the current period. - let (current_pre_compute_epoch, new_pre_compute_duties) = sync_duties - .prepare_for_aggregator_pre_compute(current_sync_committee_period, current_epoch, spec); + let (current_pre_compute_slot, new_pre_compute_duties) = sync_duties + .prepare_for_aggregator_pre_compute(current_sync_committee_period, current_slot, spec); if !new_pre_compute_duties.is_empty() { let sub_duties_service = duties_service.clone(); @@ -341,8 +356,8 @@ pub async fn poll_sync_committee_duties( sub_duties_service, &new_pre_compute_duties, current_sync_committee_period, - current_epoch, - current_pre_compute_epoch, + current_slot, + current_pre_compute_slot, ) .await }, @@ -368,11 +383,14 @@ pub async fn poll_sync_committee_duties( } // Pre-compute aggregator selection proofs for the next period. - if (current_epoch + AGGREGATION_PRE_COMPUTE_EPOCHS).sync_committee_period(spec)? + let aggregate_pre_compute_lookahead_slots = sync_duties.aggregation_pre_compute_slots(); + if (current_slot + aggregate_pre_compute_lookahead_slots) + .epoch(E::slots_per_epoch()) + .sync_committee_period(spec)? == next_sync_committee_period { - let (pre_compute_epoch, new_pre_compute_duties) = sync_duties - .prepare_for_aggregator_pre_compute(next_sync_committee_period, current_epoch, spec); + let (pre_compute_slot, new_pre_compute_duties) = sync_duties + .prepare_for_aggregator_pre_compute(next_sync_committee_period, current_slot, spec); if !new_pre_compute_duties.is_empty() { let sub_duties_service = duties_service.clone(); @@ -382,8 +400,8 @@ pub async fn poll_sync_committee_duties( sub_duties_service, &new_pre_compute_duties, next_sync_committee_period, - current_epoch, - pre_compute_epoch, + current_slot, + pre_compute_slot, ) .await }, @@ -495,10 +513,10 @@ pub async fn poll_sync_committee_duties_for_period( duties_service: Arc>, - pre_compute_duties: &[(Epoch, SyncDuty)], + pre_compute_duties: &[(Slot, SyncDuty)], sync_committee_period: u64, - current_epoch: Epoch, - pre_compute_epoch: Epoch, + current_slot: Slot, + pre_compute_slot: Slot, ) { let log = duties_service.context.log(); @@ -506,16 +524,16 @@ pub async fn fill_in_aggregation_proofs( log, "Calculating sync selection proofs"; "period" => sync_committee_period, - "current_epoch" => current_epoch, - "pre_compute_epoch" => pre_compute_epoch + "current_slot" => current_slot, + "pre_compute_slot" => pre_compute_slot ); - // Generate selection proofs for each validator at each slot, one epoch at a time. - for epoch in (current_epoch.as_u64()..=pre_compute_epoch.as_u64()).map(Epoch::new) { + // Generate selection proofs for each validator at each slot, one slot at a time. + for slot in (current_slot.as_u64()..=pre_compute_slot.as_u64()).map(Slot::new) { let mut validator_proofs = vec![]; - for (validator_start_epoch, duty) in pre_compute_duties { - // Proofs are already known at this epoch for this validator. - if epoch < *validator_start_epoch { + for (validator_start_slot, duty) in pre_compute_duties { + // Proofs are already known at this slot for this validator. + if slot < *validator_start_slot { continue; } @@ -533,67 +551,64 @@ pub async fn fill_in_aggregation_proofs( // Create futures to produce proofs. let duties_service_ref = &duties_service; - let futures = epoch - .slot_iter(E::slots_per_epoch()) - .cartesian_product(&subnet_ids) - .map(|(duty_slot, subnet_id)| async move { - // Construct proof for prior slot. - let slot = duty_slot - 1; - - let proof = match duties_service_ref - .validator_store - .produce_sync_selection_proof(&duty.pubkey, slot, *subnet_id) - .await - { - Ok(proof) => proof, - Err(ValidatorStoreError::UnknownPubkey(pubkey)) => { - // A pubkey can be missing when a validator was recently - // removed via the API. - debug!( - log, - "Missing pubkey for sync selection proof"; - "pubkey" => ?pubkey, - "pubkey" => ?duty.pubkey, - "slot" => slot, - ); - return None; - } - Err(e) => { - warn!( - log, - "Unable to sign selection proof"; - "error" => ?e, - "pubkey" => ?duty.pubkey, - "slot" => slot, - ); - return None; - } - }; - - match proof.is_aggregator::() { - Ok(true) => { - debug!( - log, - "Validator is sync aggregator"; - "validator_index" => duty.validator_index, - "slot" => slot, - "subnet_id" => %subnet_id, - ); - Some(((slot, *subnet_id), proof)) - } - Ok(false) => None, - Err(e) => { - warn!( - log, - "Error determining is_aggregator"; - "pubkey" => ?duty.pubkey, - "slot" => slot, - "error" => ?e, - ); - None - } + let futures = subnet_ids.iter().map(|subnet_id| async move { + // Construct proof for prior slot. + let proof_slot = slot - 1; + + let proof = match duties_service_ref + .validator_store + .produce_sync_selection_proof(&duty.pubkey, proof_slot, *subnet_id) + .await + { + Ok(proof) => proof, + Err(ValidatorStoreError::UnknownPubkey(pubkey)) => { + // A pubkey can be missing when a validator was recently + // removed via the API. + debug!( + log, + "Missing pubkey for sync selection proof"; + "pubkey" => ?pubkey, + "pubkey" => ?duty.pubkey, + "slot" => proof_slot, + ); + return None; + } + Err(e) => { + warn!( + log, + "Unable to sign selection proof"; + "error" => ?e, + "pubkey" => ?duty.pubkey, + "slot" => proof_slot, + ); + return None; } - }); + }; + + match proof.is_aggregator::() { + Ok(true) => { + debug!( + log, + "Validator is sync aggregator"; + "validator_index" => duty.validator_index, + "slot" => proof_slot, + "subnet_id" => %subnet_id, + ); + Some(((proof_slot, *subnet_id), proof)) + } + Ok(false) => None, + Err(e) => { + warn!( + log, + "Error determining is_aggregator"; + "pubkey" => ?duty.pubkey, + "slot" => proof_slot, + "error" => ?e, + ); + None + } + } + }); // Execute all the futures in parallel, collecting any successful results. let proofs = join_all(futures) @@ -635,7 +650,7 @@ pub async fn fill_in_aggregation_proofs( debug!( log, "Finished computing sync selection proofs"; - "epoch" => epoch, + "slot" => slot, "updated_validators" => num_validators_updated, ); } diff --git a/validator_client/src/lib.rs b/validator_client/src/lib.rs index 4828f43a0d8..52de95a3735 100644 --- a/validator_client/src/lib.rs +++ b/validator_client/src/lib.rs @@ -39,7 +39,7 @@ use account_utils::validator_definitions::ValidatorDefinitions; use attestation_service::{AttestationService, AttestationServiceBuilder}; use block_service::{BlockService, BlockServiceBuilder}; use clap::ArgMatches; -use duties_service::DutiesService; +use duties_service::{sync::SyncDutiesMap, DutiesService}; use environment::RuntimeContext; use eth2::{reqwest::ClientBuilder, types::Graffiti, BeaconNodeHttpClient, StatusCode, Timeouts}; use http_api::ApiSecret; @@ -451,13 +451,14 @@ impl ProductionValidatorClient { let duties_service = Arc::new(DutiesService { attesters: <_>::default(), proposers: <_>::default(), - sync_duties: <_>::default(), + sync_duties: SyncDutiesMap::new(config.distributed), slot_clock: slot_clock.clone(), beacon_nodes: beacon_nodes.clone(), validator_store: validator_store.clone(), spec: context.eth2_config.spec.clone(), context: duties_context, enable_high_validator_count_metrics: config.enable_high_validator_count_metrics, + distributed: config.distributed, }); // Update the metrics server. diff --git a/validator_client/src/sync_committee_service.rs b/validator_client/src/sync_committee_service.rs index 90b62cd3b44..f7abb3855a3 100644 --- a/validator_client/src/sync_committee_service.rs +++ b/validator_client/src/sync_committee_service.rs @@ -161,7 +161,7 @@ impl SyncCommitteeService { let Some(slot_duties) = self .duties_service .sync_duties - .get_duties_for_slot::(slot, &self.duties_service.spec) + .get_duties_for_slot(slot, &self.duties_service.spec) else { debug!(log, "No duties known for slot {}", slot); return Ok(()); @@ -548,7 +548,7 @@ impl SyncCommitteeService { match self .duties_service .sync_duties - .get_duties_for_slot::(duty_slot, spec) + .get_duties_for_slot(duty_slot, spec) { Some(duties) => subscriptions.extend(subscriptions_from_sync_duties( duties.duties,