Skip to content

Commit

Permalink
cargo doc
Browse files Browse the repository at this point in the history
  • Loading branch information
rkdud007 committed Sep 15, 2024
1 parent 96fb67f commit 0a6164e
Show file tree
Hide file tree
Showing 10 changed files with 24 additions and 16 deletions.
2 changes: 1 addition & 1 deletion cli/src/commands/run_datalake.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ pub struct RunDatalakeArgs {
pub aggregate_fn_id: AggregationFunction,
/// Optional context for applying conditions on the aggregate function "count".
/// Format: "{operator}.{value}" (e.g., "eq.100" for equality, "gt.100" for greater-than).
/// Supported operators are in the [`Operator`] enum.
/// Supported operators are in the Operator enum.
pub aggregate_fn_ctx: Option<FunctionContext>,

#[command(subcommand)]
Expand Down
5 changes: 5 additions & 0 deletions hdp/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
//! The Data Processor CLI serves as an essential tool for developers working with Cairo programs and zkVM environments.
//! Its primary function is to translate human-readable requests into a format compatible with Cairo programs,
//! enabling commands to be executed over the Cairo VM and generating executable outputs.
//! This transformation is a crucial preprocessing step that prepares data for off-chain computations in zkVM environments.
pub mod cairo_runner;
pub mod constant;
pub mod hdp_run;
Expand Down
3 changes: 1 addition & 2 deletions hdp/src/primitives/block/header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ impl Header {
}

/// Heavy function that will calculate hash of data and will *not* save the change to metadata.
/// Use [`Header::seal`], [`SealedHeader`] and unlock if you need hash to be persistent.
pub fn hash_slow(&self) -> B256 {
keccak256(alloy_rlp::encode(self))
}
Expand Down Expand Up @@ -343,7 +342,7 @@ impl Header {
}

/// Block header returned from RPC
/// https://ethereum.org/en/developers/docs/apis/json-rpc#eth_getblockbynumber
/// <https://ethereum.org/en/developers/docs/apis/json-rpc#eth_getblockbynumber>
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct BlockHeaderFromRpc {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,13 @@ impl DatalakeCodecs for TransactionsInBlockDatalake {
}
}

/// Get the commitment hash of the [`TransactionsDatalake`]
/// Get the commitment hash of the [`TransactionsInBlockDatalake`]
fn commit(&self) -> B256 {
let encoded_datalake = self.encode().expect("Encoding failed");
keccak256(encoded_datalake)
}

/// Decode the encoded transactions datalake hex string into a [`TransactionsDatalake`]
/// Decode the encoded transactions datalake hex string into a [`TransactionsInBlockDatalake`]
fn decode(encoded: &[u8]) -> Result<Self> {
let abi_type: DynSolType =
"(uint256,uint256, uint256, uint256, uint256, uint256, uint256, bytes)".parse()?;
Expand Down
4 changes: 2 additions & 2 deletions hdp/src/primitives/solidity_types/traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ pub trait Codecs {
Self: Sized;
}

/// Codecs for [`DatalakeCompute`]
/// Codecs for datalake task
pub trait DatalakeComputeCodecs {
fn decode(encoded_datalake: &[u8], encoded_compute: &[u8]) -> Result<Self>
where
Expand All @@ -31,7 +31,7 @@ pub trait DatalakeComputeCodecs {
fn commit(&self) -> B256;
}

/// Codecs for [`BatchedDatalakeCompute`]
/// Codecs for vector of datalake task
pub trait BatchedDatalakeComputeCodecs {
fn decode(encoded_datalake: &[u8], encoded_compute: &[u8]) -> Result<Self>
where
Expand Down
4 changes: 2 additions & 2 deletions hdp/src/primitives/task/datalake/datalake_type.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@ use anyhow::{bail, Result};
use serde::{Deserialize, Serialize};
use std::str::FromStr;

/// Identifier for a [`BlockSampledDatalake`] type.
/// Identifier for a BlockSampledDatalake
pub const BLOCK_SAMPLED_DATALAKE_TYPE_ID: u8 = 0;

/// Identifier for an [`TransactionsDatalake`] type.
/// Identifier for a TransactionsDatalake
pub const TRANSACTIONS_IN_BLOCK_DATALAKE_TYPE_ID: u8 = 1;

#[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq)]
Expand Down
6 changes: 3 additions & 3 deletions hdp/src/provider/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@ pub enum ProviderError {
#[error("MMR not found")]
MmrNotFound,

/// Error from the [`Indexer`]
/// Error from the [`IndexerError`]
#[error("Failed from indexer")]
IndexerError(#[from] IndexerError),

/// Error from [`RpcProvider`]
/// Error from [`RpcProviderError`]
#[error("Failed to get proofs: {0}")]
EvmRpcProviderError(#[from] RpcProviderError),

Expand All @@ -34,7 +34,7 @@ pub enum ProviderError {
FetchKeyError(String),
}

/// Error from [`RpcProvider`]
/// Error from rpc
#[derive(Error, Debug)]
pub enum RpcProviderError {
#[error("Failed to send proofs with mpsc")]
Expand Down
2 changes: 1 addition & 1 deletion hdp/src/provider/indexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ impl ChainId {

/// Indexer client for fetching MMR and headers proof from Herodotus Indexer
///
/// For more information, see: https://rs-indexer.api.herodotus.cloud/swagger
/// For more information, see: <https://rs-indexer.api.herodotus.cloud/swagger>
///
/// How to use:
/// ```rust
Expand Down
2 changes: 1 addition & 1 deletion hdp/src/provider/starknet/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
use starknet_types_core::{felt::Felt, hash::StarkHash};

/// Codebase is from https://github.com/eqlabs/pathfinder/tree/ae81d84b7c4157891069bd02ef810a29b60a94e3
/// Codebase is from <https://github.com/eqlabs/pathfinder/tree/ae81d84b7c4157891069bd02ef810a29b60a94e3>
/// Holds the membership/non-membership of a contract and its associated
/// contract contract if the contract exists.
Expand Down
8 changes: 6 additions & 2 deletions justfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,14 @@ clean:
clippy:
cargo clippy --all-targets --all-features -- -Dwarnings

# Generate documentation for the project
docs:
cargo doc --no-deps

# Execute all unit tests in the workspace
test:
cargo llvm-cov nextest --features test_utils

# Run the entire CI pipeline including format, clippy, and test checks
run-ci-flow: format clippy test
# Run the entire CI pipeline including format, clippy, docs, and test checks
run-ci-flow: format clippy docs test
@echo "CI flow completed"

0 comments on commit 0a6164e

Please sign in to comment.