Skip to content

CBST2-09: Add duplicate key removal to the Lido and SSV key fetchers #318

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jul 4, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 7 additions & 8 deletions crates/common/src/config/mux.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use tracing::{debug, info};
use url::Url;

use super::{load_optional_env_var, PbsConfig, RelayConfig, MUX_PATH_ENV};
use crate::{pbs::RelayClient, types::Chain};
use crate::{config::remove_duplicate_keys, pbs::RelayClient, types::Chain};

#[derive(Debug, Deserialize, Serialize)]
pub struct PbsMuxes {
Expand Down Expand Up @@ -164,7 +164,7 @@ impl MuxKeysLoader {
chain: Chain,
rpc_url: Option<Url>,
) -> eyre::Result<Vec<BlsPublicKey>> {
match self {
let keys = match self {
Self::File(config_path) => {
// First try loading from env
let path: PathBuf = load_optional_env_var(&get_mux_env(mux_id))
Expand Down Expand Up @@ -192,7 +192,11 @@ impl MuxKeysLoader {
}
NORegistry::SSV => fetch_ssv_pubkeys(chain, U256::from(*node_operator_id)).await,
},
}
}?;

// Remove duplicates
let deduped_keys = remove_duplicate_keys(keys);
Ok(deduped_keys)
}
}

Expand Down Expand Up @@ -277,8 +281,6 @@ async fn fetch_lido_registry_keys(
}

ensure!(keys.len() == total_keys as usize, "expected {total_keys} keys, got {}", keys.len());
let unique = keys.iter().collect::<HashSet<_>>();
ensure!(unique.len() == keys.len(), "found duplicate keys in registry");

Ok(keys)
}
Expand Down Expand Up @@ -326,9 +328,6 @@ async fn fetch_ssv_pubkeys(
}
}

let unique = pubkeys.iter().collect::<HashSet<_>>();
ensure!(unique.len() == pubkeys.len(), "found duplicate keys in registry");

Ok(pubkeys)
}

Expand Down
27 changes: 27 additions & 0 deletions crates/common/src/config/utils.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::{collections::HashMap, path::Path};

use alloy::rpc::types::beacon::BlsPublicKey;
use eyre::{bail, Context, Result};
use serde::de::DeserializeOwned;

Expand Down Expand Up @@ -30,6 +31,20 @@ pub fn load_jwt_secrets() -> Result<HashMap<ModuleId, String>> {
decode_string_to_map(&jwt_secrets)
}

/// Removes duplicate entries from a vector of BlsPublicKey
pub fn remove_duplicate_keys(keys: Vec<BlsPublicKey>) -> Vec<BlsPublicKey> {
let mut unique_keys = Vec::new();
let mut key_set = std::collections::HashSet::new();

for key in keys {
if key_set.insert(key) {
unique_keys.push(key);
}
}

unique_keys
}

fn decode_string_to_map(raw: &str) -> Result<HashMap<ModuleId, String>> {
// trim the string and split for comma
raw.trim()
Expand Down Expand Up @@ -57,4 +72,16 @@ mod tests {
assert_eq!(map.get(&ModuleId("KEY".into())), Some(&"VALUE".to_string()));
assert_eq!(map.get(&ModuleId("KEY2".into())), Some(&"value2".to_string()));
}

#[test]
fn test_remove_duplicate_keys() {
let key1 = BlsPublicKey::from([1; 48]);
let key2 = BlsPublicKey::from([2; 48]);
let keys = vec![key1, key2, key1];

let unique_keys = remove_duplicate_keys(keys);
assert_eq!(unique_keys.len(), 2);
assert!(unique_keys.contains(&key1));
assert!(unique_keys.contains(&key2));
}
}
Loading