diff --git a/crates/common/src/config/mux.rs b/crates/common/src/config/mux.rs index 7510102c..1e6451c9 100644 --- a/crates/common/src/config/mux.rs +++ b/crates/common/src/config/mux.rs @@ -16,7 +16,7 @@ use tracing::{debug, info}; use url::Url; use super::{load_optional_env_var, PbsConfig, RelayConfig, MUX_PATH_ENV}; -use crate::{pbs::RelayClient, types::Chain}; +use crate::{config::remove_duplicate_keys, pbs::RelayClient, types::Chain}; #[derive(Debug, Deserialize, Serialize)] pub struct PbsMuxes { @@ -164,7 +164,7 @@ impl MuxKeysLoader { chain: Chain, rpc_url: Option, ) -> eyre::Result> { - match self { + let keys = match self { Self::File(config_path) => { // First try loading from env let path: PathBuf = load_optional_env_var(&get_mux_env(mux_id)) @@ -192,7 +192,11 @@ impl MuxKeysLoader { } NORegistry::SSV => fetch_ssv_pubkeys(chain, U256::from(*node_operator_id)).await, }, - } + }?; + + // Remove duplicates + let deduped_keys = remove_duplicate_keys(keys); + Ok(deduped_keys) } } @@ -277,8 +281,6 @@ async fn fetch_lido_registry_keys( } ensure!(keys.len() == total_keys as usize, "expected {total_keys} keys, got {}", keys.len()); - let unique = keys.iter().collect::>(); - ensure!(unique.len() == keys.len(), "found duplicate keys in registry"); Ok(keys) } @@ -326,9 +328,6 @@ async fn fetch_ssv_pubkeys( } } - let unique = pubkeys.iter().collect::>(); - ensure!(unique.len() == pubkeys.len(), "found duplicate keys in registry"); - Ok(pubkeys) } diff --git a/crates/common/src/config/utils.rs b/crates/common/src/config/utils.rs index 67c367c5..d94fd826 100644 --- a/crates/common/src/config/utils.rs +++ b/crates/common/src/config/utils.rs @@ -1,5 +1,6 @@ use std::{collections::HashMap, path::Path}; +use alloy::rpc::types::beacon::BlsPublicKey; use eyre::{bail, Context, Result}; use serde::de::DeserializeOwned; @@ -30,6 +31,20 @@ pub fn load_jwt_secrets() -> Result> { decode_string_to_map(&jwt_secrets) } +/// Removes duplicate entries from a vector of BlsPublicKey +pub fn remove_duplicate_keys(keys: Vec) -> Vec { + let mut unique_keys = Vec::new(); + let mut key_set = std::collections::HashSet::new(); + + for key in keys { + if key_set.insert(key) { + unique_keys.push(key); + } + } + + unique_keys +} + fn decode_string_to_map(raw: &str) -> Result> { // trim the string and split for comma raw.trim() @@ -57,4 +72,16 @@ mod tests { assert_eq!(map.get(&ModuleId("KEY".into())), Some(&"VALUE".to_string())); assert_eq!(map.get(&ModuleId("KEY2".into())), Some(&"value2".to_string())); } + + #[test] + fn test_remove_duplicate_keys() { + let key1 = BlsPublicKey::from([1; 48]); + let key2 = BlsPublicKey::from([2; 48]); + let keys = vec![key1, key2, key1]; + + let unique_keys = remove_duplicate_keys(keys); + assert_eq!(unique_keys.len(), 2); + assert!(unique_keys.contains(&key1)); + assert!(unique_keys.contains(&key2)); + } }