Skip to content

Commit

Permalink
Merge pull request #147 from HerodotusDev/refactor/trait
Browse files Browse the repository at this point in the history
chore: cleanup primitives
  • Loading branch information
rkdud007 authored Sep 15, 2024
2 parents 826e44a + 0a6164e commit 4ccd4f5
Show file tree
Hide file tree
Showing 38 changed files with 337 additions and 176 deletions.
2 changes: 1 addition & 1 deletion cli/src/commands/run_datalake.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ pub struct RunDatalakeArgs {
pub aggregate_fn_id: AggregationFunction,
/// Optional context for applying conditions on the aggregate function "count".
/// Format: "{operator}.{value}" (e.g., "eq.100" for equality, "gt.100" for greater-than).
/// Supported operators are in the [`Operator`] enum.
/// Supported operators are in the Operator enum.
pub aggregate_fn_ctx: Option<FunctionContext>,

#[command(subcommand)]
Expand Down
2 changes: 1 addition & 1 deletion cli/src/interactive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ pub async fn run_interactive() -> anyhow::Result<()> {
start_index,
end_index,
increment,
IncludedTypes::from(&included_types),
IncludedTypes::from_bytes(&included_types),
);
DatalakeEnvelope::TransactionsInBlock(transactions_datalake)
}
Expand Down
4 changes: 2 additions & 2 deletions hdp/src/cairo_runner/dry_run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ mod tests {
let result = dry_runner.run(input).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].fetch_keys.len(), 3);
assert_eq!(result[0].result, Uint256::from_strs("0x0", "0x0").unwrap());
assert_eq!(result[0].result, Uint256::ZERO);
assert_eq!(
result[0].program_hash,
felt!("0x04df21eb479ae4416fbdc00abab6fab43bff0b8083be4d1fd8602c8fbfbd2274")
Expand Down Expand Up @@ -233,7 +233,7 @@ mod tests {
println!("Fetch key {}: {:?}", i, key);
}

assert_eq!(module.result, Uint256::from_strs("0x0", "0x0").unwrap());
assert_eq!(module.result, Uint256::ZERO);
assert_eq!(
module.program_hash,
felt!("0xc8580f74b6e6e04d8073602ad0c0d55538b56bf8307fefebb6b65b1bbf2a27")
Expand Down
5 changes: 5 additions & 0 deletions hdp/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
//! The Data Processor CLI serves as an essential tool for developers working with Cairo programs and zkVM environments.
//! Its primary function is to translate human-readable requests into a format compatible with Cairo programs,
//! enabling commands to be executed over the Cairo VM and generating executable outputs.
//! This transformation is a crucial preprocessing step that prepares data for off-chain computations in zkVM environments.
pub mod cairo_runner;
pub mod constant;
pub mod hdp_run;
Expand Down
4 changes: 2 additions & 2 deletions hdp/src/preprocessor/compile/datalake.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ mod tests {
start_index: 0,
end_index: 10,
increment: 1,
included_types: IncludedTypes::from(&[1, 1, 1, 1]),
included_types: IncludedTypes::ALL,
sampled_property: TransactionsCollection::Transactions(
TransactionField::GasLimit,
),
Expand All @@ -187,7 +187,7 @@ mod tests {
start_index: 0,
end_index: 11,
increment: 1,
included_types: IncludedTypes::from(&[1, 1, 1, 1]),
included_types: IncludedTypes::ALL,
sampled_property: TransactionsCollection::TranasactionReceipts(
TransactionReceiptField::Success,
),
Expand Down
2 changes: 1 addition & 1 deletion hdp/src/preprocessor/compile/module.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ impl Compilable for ModuleVec {

// Extract the dry run module result
let dry_run_module = dry_run_results.into_iter().next().unwrap();
let commit_results = vec![dry_run_module.result.to_combined_string().into()];
let commit_results = vec![dry_run_module.result.into()];

// 3. Categorize fetch keys by chain ID
let categorized_keys = categorize_fetch_keys(dry_run_module.fetch_keys);
Expand Down
9 changes: 6 additions & 3 deletions hdp/src/preprocessor/module_registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,10 @@ impl ModuleRegistry {
local_class_path: Option<PathBuf>,
module_inputs: Vec<String>,
) -> Result<ExtendedModule, ModuleRegistryError> {
let program_hash =
program_hash.map(|program_hash| FieldElement::from_hex_be(&program_hash).unwrap());
let program_hash = program_hash.map(|program_hash| {
FieldElement::from_hex_be(&program_hash)
.expect("program hash cannot be converted to FieldElement")
});
let module_inputs: Result<Vec<ModuleInput>, _> = module_inputs
.into_iter()
.map(|input| ModuleInput::from_str(&input))
Expand Down Expand Up @@ -103,7 +105,8 @@ impl ModuleRegistry {
};

let program_hash = casm.compiled_class_hash();
let converted_hash = FieldElement::from_bytes_be(&program_hash.to_bytes_be()).unwrap();
let converted_hash = FieldElement::from_bytes_be(&program_hash.to_bytes_be())
.expect("program hash cannot be converted to FieldElement");
info!("program Hash: {:#?}", converted_hash);

let module = Module {
Expand Down
33 changes: 10 additions & 23 deletions hdp/src/primitives/block/account.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ impl Account {
}

pub fn rlp_decode(mut rlp: &[u8]) -> Self {
<Account>::decode(&mut rlp).unwrap()
<Account>::decode(&mut rlp).expect("rlp decode failed.")
}
}

Expand All @@ -49,18 +49,15 @@ impl From<&EIP1186AccountProofResponse> for Account {
mod tests {
use super::*;
use alloy::hex;
use alloy::primitives::U256;
use std::str::FromStr;
use alloy::primitives::{b256, U256};

#[test]
fn test_get_account_rlp() {
let account = Account::new(
U64::from(1),
U256::from(0),
B256::from_str("0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185")
.unwrap(),
B256::from_str("0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c")
.unwrap(),
b256!("1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185"),
b256!("cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"),
);
let account_rlp = account.rlp_encode();
assert_eq!(
Expand All @@ -71,10 +68,8 @@ mod tests {
let account = Account::new(
U64::from(2),
U256::from(0),
B256::from_str("0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185")
.unwrap(),
B256::from_str("0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c")
.unwrap(),
b256!("1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185"),
b256!("cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"),
);
let account_rlp = account.rlp_encode();
assert_eq!(
Expand All @@ -85,10 +80,8 @@ mod tests {
let account = Account::new(
U64::from(2),
U256::from(0x1),
B256::from_str("0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185")
.unwrap(),
B256::from_str("0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c")
.unwrap(),
b256!("1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185"),
b256!("cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"),
);
let account_rlp = account.rlp_encode();
assert_eq!(
Expand All @@ -106,14 +99,8 @@ mod tests {
Account::new(
U64::from(1),
U256::from(0),
B256::from_str(
"0x1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185"
)
.unwrap(),
B256::from_str(
"0xcd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"
)
.unwrap()
b256!("1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185"),
b256!("cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c")
)
);
}
Expand Down
12 changes: 3 additions & 9 deletions hdp/src/primitives/block/header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ impl Header {
}

/// Heavy function that will calculate hash of data and will *not* save the change to metadata.
/// Use [`Header::seal`], [`SealedHeader`] and unlock if you need hash to be persistent.
pub fn hash_slow(&self) -> B256 {
keccak256(alloy_rlp::encode(self))
}
Expand Down Expand Up @@ -343,7 +342,7 @@ impl Header {
}

/// Block header returned from RPC
/// https://ethereum.org/en/developers/docs/apis/json-rpc#eth_getblockbynumber
/// <https://ethereum.org/en/developers/docs/apis/json-rpc#eth_getblockbynumber>
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct BlockHeaderFromRpc {
Expand Down Expand Up @@ -378,8 +377,8 @@ impl BlockHeaderFromRpc {
}
}

impl From<&BlockHeaderFromRpc> for Header {
fn from(value: &BlockHeaderFromRpc) -> Self {
impl From<BlockHeaderFromRpc> for Header {
fn from(value: BlockHeaderFromRpc) -> Self {
Self {
parent_hash: B256::from_str(&value.parent_hash).expect("Invalid hex string"),
ommers_hash: B256::from_str(&value.sha3_uncles).expect("Invalid hex string"),
Expand All @@ -400,23 +399,18 @@ impl From<&BlockHeaderFromRpc> for Header {
nonce: u64::from_str_radix(&value.nonce[2..], 16).expect("Invalid hex string"),
base_fee_per_gas: value
.base_fee_per_gas
.clone()
.map(|x| u64::from_str_radix(&x[2..], 16).expect("Invalid hex string")),
withdrawals_root: value
.withdrawals_root
.clone()
.map(|x| B256::from_str(&x).expect("Invalid hex string")),
blob_gas_used: value
.blob_gas_used
.clone()
.map(|x| u64::from_str_radix(&x[2..], 16).expect("Invalid hex string")),
excess_blob_gas: value
.excess_blob_gas
.clone()
.map(|x| u64::from_str_radix(&x[2..], 16).expect("Invalid hex string")),
parent_beacon_block_root: value
.parent_beacon_block_root
.clone()
.map(|x| B256::from_str(&x).expect("Invalid hex string")),
}
}
Expand Down
3 changes: 1 addition & 2 deletions hdp/src/primitives/processed_types/cairo_format/account.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@ impl AsCairoFormat for BaseProcessedAccount {
type Output = ProcessedAccount;

fn as_cairo_format(&self) -> Self::Output {
let address_chunk_result =
FieldElementVectorUnit::from_bytes(self.address.as_ref()).unwrap();
let address_chunk_result = FieldElementVectorUnit::from_bytes(self.address.as_ref());
let account_key = &self.account_key;
let proofs = self
.proofs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@ impl AsCairoFormat for BaseProcessedDatalakeCompute {
type Output = ProcessedDatalakeCompute;

fn as_cairo_format(&self) -> Self::Output {
let computational_task_felts =
FieldElementVectorUnit::from_bytes(&self.encoded_task).unwrap();
let datalake_felts = FieldElementVectorUnit::from_bytes(&self.encoded_datalake).unwrap();
let computational_task_felts = FieldElementVectorUnit::from_bytes(&self.encoded_task);
let datalake_felts = FieldElementVectorUnit::from_bytes(&self.encoded_datalake);
ProcessedDatalakeCompute {
task_bytes_len: computational_task_felts.bytes_len,
encoded_task: computational_task_felts.felts,
Expand Down
27 changes: 18 additions & 9 deletions hdp/src/primitives/processed_types/cairo_format/felt_vec_unit.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use anyhow::Result;
use serde::Serialize;
use serde_with::serde_as;
use starknet::core::serde::unsigned_field_element::UfeHex;
Expand All @@ -7,15 +6,25 @@ use starknet_crypto::FieldElement;
#[serde_as]
#[derive(Serialize, Debug)]
pub struct FieldElementVectorUnit {
/// Chunked vector of field elements
#[serde_as(as = "Vec<UfeHex>")]
pub felts: Vec<FieldElement>,
/// Length of the original byte array before chunking into field elements
pub bytes_len: u64,
}

impl FieldElementVectorUnit {
pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
/// Converts a byte slice into a `FieldElementVectorUnit`.
///
/// This function takes a slice of bytes and converts it into a `FieldElementVectorUnit`,
/// which consists of a vector of `FieldElement`s and the length of the original byte slice.
///
/// # Panics
///
/// This function will panic if the input byte slice is empty.
pub fn from_bytes(bytes: &[u8]) -> Self {
if bytes.is_empty() {
return Err(anyhow::anyhow!("Empty hex input"));
panic!("Cannot convert to FieldElementVectorUnit from empty bytes")
}
let bytes_len = bytes.len() as u64;
let felts = bytes
Expand All @@ -30,7 +39,7 @@ impl FieldElementVectorUnit {
})
.collect();

Ok(Self { felts, bytes_len })
Self { felts, bytes_len }
}
}

Expand All @@ -41,16 +50,16 @@ mod tests {
use super::*;

#[test]
#[should_panic(expected = "Cannot convert to FieldElementVectorUnit from empty bytes")]
fn test_empty_bytes() {
let bytes = hex::decode("").unwrap();
let result = FieldElementVectorUnit::from_bytes(&bytes);
assert!(result.is_err());
FieldElementVectorUnit::from_bytes(&bytes);
}

#[test]
fn test_single_byte_bytes() {
let bytes = hex::decode("0x01").unwrap();
let result = FieldElementVectorUnit::from_bytes(&bytes).unwrap();
let result = FieldElementVectorUnit::from_bytes(&bytes);
assert_eq!(result.bytes_len, 1);
assert_eq!(result.felts.len(), 1);
assert_eq!(result.felts[0], FieldElement::from_hex_be("0x1").unwrap());
Expand All @@ -59,7 +68,7 @@ mod tests {
#[test]
fn test_single_chunk_bytes() {
let bytes = hex::decode("0x1234567890abcdef").unwrap();
let result = FieldElementVectorUnit::from_bytes(&bytes).unwrap();
let result = FieldElementVectorUnit::from_bytes(&bytes);
assert_eq!(result.bytes_len, 8);
assert_eq!(result.felts.len(), 1);
assert_eq!(
Expand All @@ -71,7 +80,7 @@ mod tests {
#[test]
fn test_multiple_chunks_bytes() {
let bytes = hex::decode("0x1234567890abcdef1122334455667788").unwrap();
let result = FieldElementVectorUnit::from_bytes(&bytes).unwrap();
let result = FieldElementVectorUnit::from_bytes(&bytes);
assert_eq!(result.bytes_len, 16);
assert_eq!(result.felts.len(), 2);
assert_eq!(
Expand Down
2 changes: 1 addition & 1 deletion hdp/src/primitives/processed_types/cairo_format/header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ impl AsCairoFormat for BaseProcessedHeader {
type Output = ProcessedHeader;

fn as_cairo_format(&self) -> Self::Output {
let felts_unit = FieldElementVectorUnit::from_bytes(&self.rlp).unwrap();
let felts_unit = FieldElementVectorUnit::from_bytes(&self.rlp);
let proof = self.proof.clone();
ProcessedHeader {
rlp: felts_unit.felts,
Expand Down
2 changes: 1 addition & 1 deletion hdp/src/primitives/processed_types/cairo_format/module.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ impl AsCairoFormat for BaseProcessedModule {
type Output = ProcessedModule;

fn as_cairo_format(&self) -> Self::Output {
let module_task_felts = FieldElementVectorUnit::from_bytes(&self.encoded_task).unwrap();
let module_task_felts = FieldElementVectorUnit::from_bytes(&self.encoded_task);
ProcessedModule {
module_class: self.module_class.clone(),
encoded_task: module_task_felts.felts,
Expand Down
2 changes: 1 addition & 1 deletion hdp/src/primitives/processed_types/cairo_format/mpt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ impl AsCairoFormat for BaseProcessedMPTProof {
let proof_felts: Vec<FieldElementVectorUnit> = self
.proof
.iter()
.map(|proof| FieldElementVectorUnit::from_bytes(proof).unwrap())
.map(|proof| FieldElementVectorUnit::from_bytes(proof))
.collect();

let proof_bytes_len = proof_felts.iter().map(|f| f.bytes_len).collect();
Expand Down
2 changes: 1 addition & 1 deletion hdp/src/primitives/processed_types/cairo_format/receipt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ impl AsCairoFormat for BaseProcessedReceipt {
let proof_felts: Vec<FieldElementVectorUnit> = self
.proof
.iter()
.map(|proof| FieldElementVectorUnit::from_bytes(proof).unwrap())
.map(|proof| FieldElementVectorUnit::from_bytes(proof))
.collect();

let proof_bytes_len = proof_felts.iter().map(|f| f.bytes_len).collect();
Expand Down
5 changes: 2 additions & 3 deletions hdp/src/primitives/processed_types/cairo_format/storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,8 @@ impl AsCairoFormat for BaseProcessedStorage {
type Output = ProcessedStorage;

fn as_cairo_format(&self) -> Self::Output {
let address_chunk_result =
FieldElementVectorUnit::from_bytes(self.address.as_ref()).unwrap();
let slot_chunk_result = FieldElementVectorUnit::from_bytes(self.slot.as_ref()).unwrap();
let address_chunk_result = FieldElementVectorUnit::from_bytes(self.address.as_ref());
let slot_chunk_result = FieldElementVectorUnit::from_bytes(self.slot.as_ref());
let storage_key = self.storage_key;
let proofs = self
.proofs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ impl AsCairoFormat for BaseProcessedTransaction {
let proof_felts: Vec<FieldElementVectorUnit> = self
.proof
.iter()
.map(|proof| FieldElementVectorUnit::from_bytes(proof).unwrap())
.map(|proof| FieldElementVectorUnit::from_bytes(proof))
.collect();

let proof_bytes_len = proof_felts.iter().map(|f| f.bytes_len).collect();
Expand Down
Loading

0 comments on commit 4ccd4f5

Please sign in to comment.