From eb141b6431ce107010ffd6e391ccb3336f2141d8 Mon Sep 17 00:00:00 2001 From: codingp110 Date: Tue, 19 Aug 2025 10:36:50 +0530 Subject: [PATCH 1/6] feat: add utilities for testing persistence Added the following functions to test if persistence of `bdk_chain` is happening correctly. - `persist_txgraph_changeset` - `persist_indexer_changeset` - `persist_local_chain_changeset` - `persist_last_seen`, `persist_last_evicted`, `persist_first_seen` - `persist_txouts` - `persist_txs` - `persist_anchors` - `persist_last_revealed` - `persist_spk_cache` Even though the first three tests cover every part of the `ChangeSet`s , the other tests are retained so as to help in unit testing. --- crates/testenv/Cargo.toml | 5 +- crates/testenv/src/lib.rs | 1 + crates/testenv/src/persist_test_utils.rs | 721 +++++++++++++++++++++++ 3 files changed, 725 insertions(+), 2 deletions(-) create mode 100644 crates/testenv/src/persist_test_utils.rs diff --git a/crates/testenv/Cargo.toml b/crates/testenv/Cargo.toml index eff58a41c..e2b42b10f 100644 --- a/crates/testenv/Cargo.toml +++ b/crates/testenv/Cargo.toml @@ -16,8 +16,10 @@ readme = "README.md" workspace = true [dependencies] -bdk_chain = { path = "../chain", version = "0.23.1", default-features = false } +bdk_chain = { path = "../chain", version = "0.23.1", default-features = false, features = ["miniscript"]} electrsd = { version = "0.28.0", features = [ "legacy" ], default-features = false } +anyhow = "1.0.98" +tempfile = "3.20.0" [dev-dependencies] bdk_testenv = { path = "." } @@ -27,6 +29,5 @@ default = ["std", "download"] download = ["electrsd/bitcoind_25_0", "electrsd/esplora_a33e97e1"] std = ["bdk_chain/std"] serde = ["bdk_chain/serde"] - [package.metadata.docs.rs] no-default-features = true diff --git a/crates/testenv/src/lib.rs b/crates/testenv/src/lib.rs index 9faf43bf2..ac3cc6326 100644 --- a/crates/testenv/src/lib.rs +++ b/crates/testenv/src/lib.rs @@ -1,5 +1,6 @@ #![cfg_attr(coverage_nightly, feature(coverage_attribute))] +pub mod persist_test_utils; pub mod utils; use bdk_chain::{ diff --git a/crates/testenv/src/persist_test_utils.rs b/crates/testenv/src/persist_test_utils.rs new file mode 100644 index 000000000..82a42dd5a --- /dev/null +++ b/crates/testenv/src/persist_test_utils.rs @@ -0,0 +1,721 @@ +use crate::block_id; +use crate::hash; +use bdk_chain::bitcoin; +use bdk_chain::miniscript::{Descriptor, DescriptorPublicKey}; +use bdk_chain::{ + bitcoin::{ + absolute, key::Secp256k1, transaction, Address, Amount, OutPoint, ScriptBuf, Transaction, + TxIn, TxOut, Txid, + }, + indexer::keychain_txout, + local_chain, tx_graph, ConfirmationBlockTime, DescriptorExt, DescriptorId, Merge, SpkIterator, +}; +use std::collections::{BTreeMap, BTreeSet}; +use std::path::Path; +use std::str::FromStr; +use std::sync::Arc; + +fn create_one_inp_one_out_tx(txid: Txid, amount: u64) -> Transaction { + Transaction { + version: transaction::Version::ONE, + lock_time: absolute::LockTime::ZERO, + input: vec![TxIn { + previous_output: OutPoint::new(txid, 0), + ..TxIn::default() + }], + output: vec![TxOut { + value: Amount::from_sat(amount), + script_pubkey: Address::from_str("bcrt1q3qtze4ys45tgdvguj66zrk4fu6hq3a3v9pfly5") + .unwrap() + .assume_checked() + .script_pubkey(), + }], + } +} + +fn spk_at_index(descriptor: &Descriptor, index: u32) -> ScriptBuf { + descriptor + .derived_descriptor(&Secp256k1::verification_only(), index) + .expect("must derive") + .script_pubkey() +} + +pub fn persist_txgraph_changeset( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should load empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); + + let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { + block_id: block_id!(910425, "Rust"), + confirmation_time: 1755416660, + }; + + let mut tx_graph_changeset1 = ChangeSet:: { + txs: [tx1.clone()].into(), + txouts: [ + ( + OutPoint::new(hash!("BDK"), 0), + TxOut { + value: Amount::from_sat(1300), + script_pubkey: Address::from_str( + "bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl", + ) + .unwrap() + .assume_checked() + .script_pubkey(), + }, + ), + ( + OutPoint::new(hash!("Bitcoin_fixes_things"), 0), + TxOut { + value: Amount::from_sat(1400), + script_pubkey: Address::from_str( + "bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl", + ) + .unwrap() + .assume_checked() + .script_pubkey(), + }, + ), + ] + .into(), + anchors: [(conf_anchor, tx1.compute_txid())].into(), + last_seen: [(tx1.compute_txid(), 1755416650)].into(), + first_seen: [(tx1.compute_txid(), 1755416655)].into(), + last_evicted: [(tx1.compute_txid(), 1755416660)].into(), + }; + + persist(&mut store, &tx_graph_changeset1).expect("should persist changeset"); + + let changeset = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset, tx_graph_changeset1); + + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + + let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { + block_id: block_id!(910426, "BOSS"), + confirmation_time: 1755416700, + }; + + let tx_graph_changeset2 = ChangeSet:: { + txs: [tx2.clone()].into(), + txouts: [( + OutPoint::new(hash!("REDB"), 0), + TxOut { + value: Amount::from_sat(10000), + script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") + .unwrap() + .assume_checked() + .script_pubkey(), + }, + )] + .into(), + anchors: [(conf_anchor, tx2.compute_txid())].into(), + last_seen: [(tx2.compute_txid(), 1755416700)].into(), + first_seen: [(tx2.compute_txid(), 1755416670)].into(), + last_evicted: [(tx2.compute_txid(), 1755416760)].into(), + }; + + persist(&mut store, &tx_graph_changeset2).expect("should persist changeset"); + + let changeset = initialize(&mut store).expect("should load persisted changeset"); + + tx_graph_changeset1.merge(tx_graph_changeset2); + + assert_eq!(tx_graph_changeset1, changeset); +} + +fn parse_descriptor(descriptor: &str) -> Descriptor { + let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only(); + Descriptor::::parse_descriptor(&secp, descriptor) + .unwrap() + .0 +} + +pub fn persist_indexer_changeset( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, +{ + use crate::utils::DESCRIPTORS; + use keychain_txout::ChangeSet; + + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let descriptor_ids = DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); + let descs = DESCRIPTORS.map(parse_descriptor); + + let mut changeset = ChangeSet { + last_revealed: [(descriptor_ids[0], 1), (descriptor_ids[1], 100)].into(), + spk_cache: [ + ( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 0..=26).collect(), + ), + ( + descriptor_ids[1], + SpkIterator::new_with_range(&descs[1], 0..=125).collect(), + ), + ] + .into(), + }; + + persist(&mut store, &changeset).expect("should persist keychain_txout"); + + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + + assert_eq!(changeset_read, changeset); + + let changeset_new = ChangeSet { + last_revealed: [(descriptor_ids[0], 2)].into(), + spk_cache: [( + descriptor_ids[0], + [(27, spk_at_index(&descs[0], 27))].into(), + )] + .into(), + }; + + persist(&mut store, &changeset_new).expect("should persist second changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load merged changesets"); + changeset.merge(changeset_new); + + assert_eq!(changeset_read_new, changeset); +} + +pub fn persist_local_chain_changeset( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &local_chain::ChangeSet) -> anyhow::Result<()>, +{ + use local_chain::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let changeset = ChangeSet { + blocks: [(910425, Some(hash!("B"))), (910426, Some(hash!("D")))].into(), + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read, changeset); + + // create another local_chain_changeset, persist that and read it + let changeset_new = ChangeSet { + blocks: [(910427, Some(hash!("K")))].into(), + }; + + persist(&mut store, &changeset_new).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + + let changeset = ChangeSet { + blocks: [ + (910425, Some(hash!("B"))), + (910426, Some(hash!("D"))), + (910427, Some(hash!("K"))), + ] + .into(), + }; + + assert_eq!(changeset, changeset_read_new); +} + +pub fn persist_last_seen( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = + initialize(&mut store).expect("store should initialize and we should get empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + + // try persisting and reading last_seen + let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); + let mut last_seen: BTreeMap = [ + (tx1.compute_txid(), 1755416700), + (tx2.compute_txid(), 1755416800), + ] + .into(); + + let changeset = ChangeSet:: { + txs, + last_seen: last_seen.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.last_seen, last_seen); + + // persist another last_seen and see if what is read is same as merged one + let txs_new: BTreeSet> = [tx3.clone()].into(); + let last_seen_new: BTreeMap = [(tx3.compute_txid(), 1755417800)].into(); + + let changeset = ChangeSet:: { + txs: txs_new, + last_seen: last_seen_new.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + last_seen.merge(last_seen_new); + assert_eq!(changeset_read_new.last_seen, last_seen); +} + +pub fn persist_last_evicted( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = + initialize(&mut store).expect("store should initialize and we should get empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BDK"), 30_000)); + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + + // try persisting and reading last_evicted + let mut last_evicted: BTreeMap = [ + (tx1.compute_txid(), 1755416600), + (tx2.compute_txid(), 1755416060), + ] + .into(); + + let changeset = ChangeSet:: { + last_evicted: last_evicted.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.last_evicted, last_evicted); + + // persist another last_evicted and see if what is read is same as merged one + let last_evicted_new: BTreeMap = [(tx3.compute_txid(), 1755416700)].into(); + + let changeset = ChangeSet:: { + last_evicted: last_evicted_new.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + last_evicted.merge(last_evicted_new); + assert_eq!(changeset_read_new.last_evicted, last_evicted); +} + +pub fn persist_first_seen( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = + initialize(&mut store).expect("store should initialize and we should get empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + + // try persisting and reading first_seen + let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); + let mut first_seen: BTreeMap = [ + (tx1.compute_txid(), 1755416600), + (tx2.compute_txid(), 1755416600), + ] + .into(); + + let changeset = ChangeSet:: { + txs, + first_seen: first_seen.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.first_seen, first_seen); + + // persist another first_seen and see if what is read is same as merged one + let txs_new: BTreeSet> = [tx3.clone()].into(); + let first_seen_new: BTreeMap = [(tx3.compute_txid(), 1755416700)].into(); + + let changeset = ChangeSet:: { + txs: txs_new, + first_seen: first_seen_new.clone(), + ..ChangeSet::::default() + }; + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + first_seen.merge(first_seen_new); + assert_eq!(changeset_read_new.first_seen, first_seen); +} + +pub fn persist_txouts( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let mut txouts: BTreeMap = [ + ( + OutPoint::new(hash!("B"), 0), + TxOut { + value: Amount::from_sat(1300), + script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") + .unwrap() + .assume_checked() + .script_pubkey(), + }, + ), + ( + OutPoint::new(hash!("D"), 0), + TxOut { + value: Amount::from_sat(1400), + script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") + .unwrap() + .assume_checked() + .script_pubkey(), + }, + ), + ] + .into(); + + let changeset = ChangeSet:: { + txouts: txouts.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read = initialize(&mut store).expect("should load changeset"); + assert_eq!(changeset_read.txouts, txouts); + + let txouts_new: BTreeMap = [( + OutPoint::new(hash!("K"), 0), + TxOut { + value: Amount::from_sat(10000), + script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") + .unwrap() + .assume_checked() + .script_pubkey(), + }, + )] + .into(); + + let changeset = ChangeSet:: { + txouts: txouts_new.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load changeset"); + txouts.merge(txouts_new); + assert_eq!(changeset_read_new.txouts, txouts); +} + +pub fn persist_txs( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + + let mut txs: BTreeSet> = [tx1, tx2.clone()].into(); + + let changeset = ChangeSet:: { + txs: txs.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.txs, txs); + + let txs_new: BTreeSet> = [tx3].into(); + + let changeset = ChangeSet:: { + txs: txs_new.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + txs.merge(txs_new); + assert_eq!(changeset_read_new.txs, txs); +} + +pub fn persist_anchors( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + let tx1 = Arc::new(create_one_inp_one_out_tx(hash!(""), 30_000)); + let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + + let anchor1 = ConfirmationBlockTime { + block_id: block_id!(23, "BTC"), + confirmation_time: 1756838400, + }; + + let anchor2 = ConfirmationBlockTime { + block_id: block_id!(25, "BDK"), + confirmation_time: 1756839600, + }; + + let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); + let mut anchors: BTreeSet<(ConfirmationBlockTime, Txid)> = + [(anchor1, tx1.compute_txid()), (anchor2, tx2.compute_txid())].into(); + + let changeset = ChangeSet:: { + txs, + anchors: anchors.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.anchors, anchors); + + let txs_new: BTreeSet> = [tx3.clone()].into(); + let anchors_new: BTreeSet<(ConfirmationBlockTime, Txid)> = + [(anchor2, tx3.compute_txid())].into(); + + let changeset = ChangeSet:: { + txs: txs_new, + anchors: anchors_new.clone(), + ..ChangeSet::::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + + anchors.merge(anchors_new); + assert_eq!(changeset_read.anchors, anchors); +} + +// check the merge by changing asserts +pub fn persist_last_revealed( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, +{ + use keychain_txout::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); + + let mut last_revealed: BTreeMap = + [(descriptor_ids[0], 1), (descriptor_ids[1], 100)].into(); + + let changeset = ChangeSet { + last_revealed: last_revealed.clone(), + ..ChangeSet::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.last_revealed, last_revealed); + + let last_revealed_new: BTreeMap = [(descriptor_ids[0], 2)].into(); + + let changeset = ChangeSet { + last_revealed: last_revealed_new.clone(), + ..ChangeSet::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + last_revealed.merge(last_revealed_new); + assert_eq!(changeset_read_new.last_revealed, last_revealed); +} + +pub fn persist_spk_cache( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, +{ + use keychain_txout::ChangeSet; + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); + let descs = crate::utils::DESCRIPTORS.map(parse_descriptor); + + let spk_cache: BTreeMap> = [ + ( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 0..=125).collect(), + ), + ( + descriptor_ids[1], + SpkIterator::new_with_range(&descs[0], 0..=25).collect(), + ), + ] + .into(); + + let changeset = ChangeSet { + spk_cache: spk_cache.clone(), + ..ChangeSet::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.spk_cache, spk_cache); + + let spk_cache_new: BTreeMap> = [( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 126..=150).collect(), + )] + .into(); + + let changeset = ChangeSet { + spk_cache: spk_cache_new, + ..ChangeSet::default() + }; + + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + let spk_cache: BTreeMap> = [ + ( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 0..=150).collect(), + ), + ( + descriptor_ids[1], + SpkIterator::new_with_range(&descs[0], 0..=25).collect(), + ), + ] + .into(); + assert_eq!(changeset_read_new.spk_cache, spk_cache); +} From 509f1485a4786b7010a5facfac98321fe318233f Mon Sep 17 00:00:00 2001 From: codingp110 Date: Thu, 14 Aug 2025 23:32:08 +0530 Subject: [PATCH 2/6] test: use test utils to test file_store and sqlite --- crates/chain/tests/test_rusqlite_impl.rs | 227 +++++++++++++++++++++++ crates/file_store/Cargo.toml | 2 + crates/file_store/src/store.rs | 117 ++++++++++++ 3 files changed, 346 insertions(+) create mode 100644 crates/chain/tests/test_rusqlite_impl.rs diff --git a/crates/chain/tests/test_rusqlite_impl.rs b/crates/chain/tests/test_rusqlite_impl.rs new file mode 100644 index 000000000..6067a9864 --- /dev/null +++ b/crates/chain/tests/test_rusqlite_impl.rs @@ -0,0 +1,227 @@ +#![cfg(feature = "rusqlite")] +use bdk_chain::{keychain_txout, local_chain, tx_graph, ConfirmationBlockTime}; +use bdk_testenv::persist_test_utils::{ + persist_anchors, persist_first_seen, persist_indexer_changeset, persist_last_evicted, + persist_last_revealed, persist_last_seen, persist_local_chain_changeset, persist_spk_cache, + persist_txgraph_changeset, persist_txouts, persist_txs, +}; + +#[test] +fn txgraph_is_persisted() { + persist_txgraph_changeset::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn indexer_is_persisted() { + persist_indexer_changeset::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + keychain_txout::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = keychain_txout::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn local_chain_is_persisted() { + persist_local_chain_changeset::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + local_chain::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = local_chain::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn txouts_are_persisted() { + persist_txouts::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn txs_are_persisted() { + persist_txs::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn anchors_are_persisted() { + persist_anchors::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn last_seen_is_persisted() { + persist_last_seen::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn last_evicted_is_persisted() { + persist_last_evicted::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn first_seen_is_persisted() { + persist_first_seen::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn last_revealed_is_persisted() { + persist_last_revealed::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + keychain_txout::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = keychain_txout::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn spk_cache_is_persisted() { + persist_spk_cache::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + keychain_txout::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = keychain_txout::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} diff --git a/crates/file_store/Cargo.toml b/crates/file_store/Cargo.toml index 714c40e1b..a6c0e8a85 100644 --- a/crates/file_store/Cargo.toml +++ b/crates/file_store/Cargo.toml @@ -20,3 +20,5 @@ serde = { version = "1", features = ["derive"] } [dev-dependencies] tempfile = "3" +bdk_testenv = {path = "../testenv"} +bdk_chain = { path = "../chain", version = "0.23.1", default-features = false, features = ["serde"]} \ No newline at end of file diff --git a/crates/file_store/src/store.rs b/crates/file_store/src/store.rs index 7e1867926..f36205af5 100644 --- a/crates/file_store/src/store.rs +++ b/crates/file_store/src/store.rs @@ -295,6 +295,13 @@ mod test { const TEST_MAGIC_BYTES: [u8; TEST_MAGIC_BYTES_LEN] = [98, 100, 107, 102, 115, 49, 49, 49, 49, 49, 49, 49]; + use bdk_chain::{keychain_txout, local_chain, tx_graph, ConfirmationBlockTime}; + use bdk_testenv::persist_test_utils::{ + persist_anchors, persist_first_seen, persist_indexer_changeset, persist_last_evicted, + persist_last_revealed, persist_last_seen, persist_local_chain_changeset, persist_spk_cache, + persist_txgraph_changeset, persist_txouts, persist_txs, + }; + type TestChangeSet = BTreeSet; /// Check behavior of [`Store::create`] and [`Store::load`]. @@ -599,4 +606,114 @@ mod test { // current position matches EOF assert_eq!(current_pointer, expected_pointer); } + + #[test] + fn txgraph_is_persisted() { + persist_txgraph_changeset::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn indexer_is_persisted() { + persist_indexer_changeset::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn local_chain_is_persisted() { + persist_local_chain_changeset::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn txouts_are_persisted() { + persist_txouts::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn txs_are_persisted() { + persist_txs::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn anchors_are_persisted() { + persist_anchors::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn last_seen_is_persisted() { + persist_last_seen::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn last_evicted_is_persisted() { + persist_last_evicted::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn first_seen_is_persisted() { + persist_first_seen::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn last_revealed_is_persisted() { + persist_last_revealed::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn spk_cache_is_persisted() { + persist_spk_cache::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } } From 6e5c9f4f84cb6f1ec5e02027ad96e2b167389bb5 Mon Sep 17 00:00:00 2001 From: codingp110 Date: Fri, 15 Aug 2025 13:19:53 +0530 Subject: [PATCH 3/6] docs: document persist_test_utils module --- crates/testenv/src/persist_test_utils.rs | 142 +++++++++++++++++++++-- 1 file changed, 135 insertions(+), 7 deletions(-) diff --git a/crates/testenv/src/persist_test_utils.rs b/crates/testenv/src/persist_test_utils.rs index 82a42dd5a..3765cea9f 100644 --- a/crates/testenv/src/persist_test_utils.rs +++ b/crates/testenv/src/persist_test_utils.rs @@ -1,3 +1,4 @@ +//! This module provides utility functions for testing custom persistence backends. use crate::block_id; use crate::hash; use bdk_chain::bitcoin; @@ -40,6 +41,14 @@ fn spk_at_index(descriptor: &Descriptor, index: u32) -> Scr .script_pubkey() } +/// tests if [`TxGraph`] is being persisted correctly +/// +/// [`TxGraph`]: +/// [`tx_graph::ChangeSet`]: +/// +/// We create a dummy [`tx_graph::ChangeSet`], persist it and check if loaded `ChangeSet` matches +/// the persisted one. We then create another such dummy `ChangeSet`, persist it and load it to +/// check if merged `ChangeSet` is returned. pub fn persist_txgraph_changeset( file_name: &str, create_store: CreateStore, @@ -51,13 +60,16 @@ pub fn persist_txgraph_changeset( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create the store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should load empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { @@ -100,11 +112,13 @@ pub fn persist_txgraph_changeset( last_evicted: [(tx1.compute_txid(), 1755416660)].into(), }; + // persist and load persist(&mut store, &tx_graph_changeset1).expect("should persist changeset"); let changeset = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset, tx_graph_changeset1); + // create another changeset let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { @@ -131,6 +145,7 @@ pub fn persist_txgraph_changeset( last_evicted: [(tx2.compute_txid(), 1755416760)].into(), }; + // persist, load and check if same as merged persist(&mut store, &tx_graph_changeset2).expect("should persist changeset"); let changeset = initialize(&mut store).expect("should load persisted changeset"); @@ -147,6 +162,15 @@ fn parse_descriptor(descriptor: &str) -> Descriptor { .0 } +/// tests if [`KeychainTxOutIndex`] is being persisted correctly +/// +/// [`KeychainTxOutIndex`]: +/// +/// [`keychain_txout::ChangeSet`]: +/// +/// We create a dummy [`keychain_txout::ChangeSet`], persist it and check if loaded `ChangeSet` +/// matches the persisted one. We then create another such dummy `ChangeSet`, persist it and load it +/// to check if merged `ChangeSet` is returned. pub fn persist_indexer_changeset( file_name: &str, create_store: CreateStore, @@ -160,13 +184,16 @@ pub fn persist_indexer_changeset( use crate::utils::DESCRIPTORS; use keychain_txout::ChangeSet; + // create the store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should load empty changeset"); assert_eq!(changeset, ChangeSet::default()); + // create changeset let descriptor_ids = DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); let descs = DESCRIPTORS.map(parse_descriptor); @@ -185,12 +212,14 @@ pub fn persist_indexer_changeset( .into(), }; + // persist and load persist(&mut store, &changeset).expect("should persist keychain_txout"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read, changeset); + // create another changeset let changeset_new = ChangeSet { last_revealed: [(descriptor_ids[0], 2)].into(), spk_cache: [( @@ -200,6 +229,7 @@ pub fn persist_indexer_changeset( .into(), }; + // persist, load and check if same as merged persist(&mut store, &changeset_new).expect("should persist second changeset"); let changeset_read_new = initialize(&mut store).expect("should load merged changesets"); @@ -208,6 +238,14 @@ pub fn persist_indexer_changeset( assert_eq!(changeset_read_new, changeset); } +/// tests if [`LocalChain`] is being persisted correctly +/// +/// [`LocalChain`]: +/// [`local_chain::ChangeSet`]: +/// +/// We create a dummy [`local_chain::ChangeSet`], persist it and check if loaded `ChangeSet` matches +/// the persisted one. We then create another such dummy `ChangeSet`, persist it and load it to +/// check if merged `ChangeSet` is returned. pub fn persist_local_chain_changeset( file_name: &str, create_store: CreateStore, @@ -219,27 +257,32 @@ pub fn persist_local_chain_changeset( Persist: Fn(&mut Store, &local_chain::ChangeSet) -> anyhow::Result<()>, { use local_chain::ChangeSet; + // create the store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should load empty changeset"); assert_eq!(changeset, ChangeSet::default()); + // create changeset let changeset = ChangeSet { blocks: [(910425, Some(hash!("B"))), (910426, Some(hash!("D")))].into(), }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read, changeset); - // create another local_chain_changeset, persist that and read it + // create another changeset let changeset_new = ChangeSet { blocks: [(910427, Some(hash!("K")))].into(), }; + // persist, load and check if same as merged persist(&mut store, &changeset_new).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); @@ -256,6 +299,12 @@ pub fn persist_local_chain_changeset( assert_eq!(changeset, changeset_read_new); } +/// tests if `last_seen` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `last_seen` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create +/// another such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is +/// returned. pub fn persist_last_seen( file_name: &str, create_store: CreateStore, @@ -267,19 +316,21 @@ pub fn persist_last_seen( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("store should initialize and we should get empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); - // try persisting and reading last_seen let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); let mut last_seen: BTreeMap = [ (tx1.compute_txid(), 1755416700), @@ -292,11 +343,12 @@ pub fn persist_last_seen( last_seen: last_seen.clone(), ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.last_seen, last_seen); - // persist another last_seen and see if what is read is same as merged one + // create another changeset let txs_new: BTreeSet> = [tx3.clone()].into(); let last_seen_new: BTreeMap = [(tx3.compute_txid(), 1755417800)].into(); @@ -305,6 +357,7 @@ pub fn persist_last_seen( last_seen: last_seen_new.clone(), ..ChangeSet::::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); @@ -312,6 +365,12 @@ pub fn persist_last_seen( assert_eq!(changeset_read_new.last_seen, last_seen); } +/// tests if `last_evicted` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `last_evicted` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create +/// another such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is +/// returned. pub fn persist_last_evicted( file_name: &str, create_store: CreateStore, @@ -323,14 +382,17 @@ pub fn persist_last_evicted( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("store should initialize and we should get empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BDK"), 30_000)); let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); @@ -346,17 +408,20 @@ pub fn persist_last_evicted( last_evicted: last_evicted.clone(), ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.last_evicted, last_evicted); - // persist another last_evicted and see if what is read is same as merged one + // create another changeset let last_evicted_new: BTreeMap = [(tx3.compute_txid(), 1755416700)].into(); let changeset = ChangeSet:: { last_evicted: last_evicted_new.clone(), ..ChangeSet::::default() }; + + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); @@ -364,6 +429,12 @@ pub fn persist_last_evicted( assert_eq!(changeset_read_new.last_evicted, last_evicted); } +/// tests if `first_seen` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `first_seen` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create +/// another such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is +/// returned. pub fn persist_first_seen( file_name: &str, create_store: CreateStore, @@ -375,19 +446,21 @@ pub fn persist_first_seen( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("store should initialize and we should get empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); - // try persisting and reading first_seen let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); let mut first_seen: BTreeMap = [ (tx1.compute_txid(), 1755416600), @@ -400,11 +473,12 @@ pub fn persist_first_seen( first_seen: first_seen.clone(), ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.first_seen, first_seen); - // persist another first_seen and see if what is read is same as merged one + // create another changeset let txs_new: BTreeSet> = [tx3.clone()].into(); let first_seen_new: BTreeMap = [(tx3.compute_txid(), 1755416700)].into(); @@ -413,6 +487,7 @@ pub fn persist_first_seen( first_seen: first_seen_new.clone(), ..ChangeSet::::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); @@ -420,6 +495,11 @@ pub fn persist_first_seen( assert_eq!(changeset_read_new.first_seen, first_seen); } +/// tests if `txouts` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `txouts` field populated, persist it and +/// check if loaded `ChangeSet` matches the persisted one. We then create another such dummy +/// `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. pub fn persist_txouts( file_name: &str, create_store: CreateStore, @@ -431,10 +511,12 @@ pub fn persist_txouts( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // initialize store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // create changeset let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); assert_eq!(changeset, ChangeSet::default()); @@ -467,11 +549,13 @@ pub fn persist_txouts( ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load changeset"); assert_eq!(changeset_read.txouts, txouts); + // create another changeset let txouts_new: BTreeMap = [( OutPoint::new(hash!("K"), 0), TxOut { @@ -489,6 +573,7 @@ pub fn persist_txouts( ..ChangeSet::::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load changeset"); @@ -496,6 +581,11 @@ pub fn persist_txouts( assert_eq!(changeset_read_new.txouts, txouts); } +/// tests if `txs` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `txs` field populated, persist it and check +/// if loaded `ChangeSet` matches the persisted one. We then create another such dummy `ChangeSet`, +/// persist it and load it to check if merged `ChangeSet` is returned. pub fn persist_txs( file_name: &str, create_store: CreateStore, @@ -507,13 +597,16 @@ pub fn persist_txs( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); @@ -525,23 +618,31 @@ pub fn persist_txs( ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.txs, txs); let txs_new: BTreeSet> = [tx3].into(); + // create another changeset let changeset = ChangeSet:: { txs: txs_new.clone(), ..ChangeSet::::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); txs.merge(txs_new); assert_eq!(changeset_read_new.txs, txs); } +/// tests if `anchors` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `anchors` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create another +/// such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. pub fn persist_anchors( file_name: &str, create_store: CreateStore, @@ -553,13 +654,16 @@ pub fn persist_anchors( Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, { use tx_graph::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); assert_eq!(changeset, ChangeSet::::default()); + // create changeset let tx1 = Arc::new(create_one_inp_one_out_tx(hash!(""), 30_000)); let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); @@ -584,10 +688,12 @@ pub fn persist_anchors( ..ChangeSet::::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.anchors, anchors); + // create another changeset let txs_new: BTreeSet> = [tx3.clone()].into(); let anchors_new: BTreeSet<(ConfirmationBlockTime, Txid)> = [(anchor2, tx3.compute_txid())].into(); @@ -598,6 +704,7 @@ pub fn persist_anchors( ..ChangeSet::::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); @@ -605,7 +712,11 @@ pub fn persist_anchors( assert_eq!(changeset_read.anchors, anchors); } -// check the merge by changing asserts +/// tests if `last_revealed` field of [`keychain_txout::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`keychain_txout::ChangeSet`] with only `last_revealed` field populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create another +/// such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. pub fn persist_last_revealed( file_name: &str, create_store: CreateStore, @@ -617,13 +728,16 @@ pub fn persist_last_revealed( Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, { use keychain_txout::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); assert_eq!(changeset, ChangeSet::default()); + // create changeset let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); let mut last_revealed: BTreeMap = @@ -634,10 +748,12 @@ pub fn persist_last_revealed( ..ChangeSet::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.last_revealed, last_revealed); + // create another changeset let last_revealed_new: BTreeMap = [(descriptor_ids[0], 2)].into(); let changeset = ChangeSet { @@ -645,12 +761,18 @@ pub fn persist_last_revealed( ..ChangeSet::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); last_revealed.merge(last_revealed_new); assert_eq!(changeset_read_new.last_revealed, last_revealed); } +/// tests if `spk_cache` field of [`keychain_txout::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`keychain_txout::ChangeSet`] with only `spk_cache` field populated, persist +/// it and check if loaded `ChangeSet` matches the persisted one. We then create another such dummy +/// `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. pub fn persist_spk_cache( file_name: &str, create_store: CreateStore, @@ -662,13 +784,16 @@ pub fn persist_spk_cache( Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, { use keychain_txout::ChangeSet; + // create store let temp_dir = tempfile::tempdir().expect("must create tempdir"); let file_path = temp_dir.path().join(file_name); let mut store = create_store(&file_path).expect("store should get created"); + // initialize store let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); assert_eq!(changeset, ChangeSet::default()); + // create changeset let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); let descs = crate::utils::DESCRIPTORS.map(parse_descriptor); @@ -689,10 +814,12 @@ pub fn persist_spk_cache( ..ChangeSet::default() }; + // persist and load persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read = initialize(&mut store).expect("should load persisted changeset"); assert_eq!(changeset_read.spk_cache, spk_cache); + // create another changeset let spk_cache_new: BTreeMap> = [( descriptor_ids[0], SpkIterator::new_with_range(&descs[0], 126..=150).collect(), @@ -704,6 +831,7 @@ pub fn persist_spk_cache( ..ChangeSet::default() }; + // persist, load and check if same as merged persist(&mut store, &changeset).expect("should persist changeset"); let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); let spk_cache: BTreeMap> = [ From 10c42dcd93973bb99960426524b654687cd26a19 Mon Sep 17 00:00:00 2001 From: codingp110 Date: Sun, 14 Sep 2025 22:54:28 +0530 Subject: [PATCH 4/6] fix: correct Cargo.toml and refactor test utils Since there is no way we can enable miniscript/no-std for bdk_chain::miniscript, cargo throws up an error when compiling with `--no-default-features`. Adding miniscript as another dependency does not resolve the issue because bdk_chain/miniscript is also required. Removed unnecessary links from docs as just learned that docs.rs can infers the links to dependencies. --- crates/testenv/Cargo.toml | 3 +- crates/testenv/src/lib.rs | 1 + crates/testenv/src/persist_test_utils.rs | 301 +++++++++++++---------- crates/testenv/src/utils.rs | 73 +++++- 4 files changed, 242 insertions(+), 136 deletions(-) diff --git a/crates/testenv/Cargo.toml b/crates/testenv/Cargo.toml index e2b42b10f..81c94de2f 100644 --- a/crates/testenv/Cargo.toml +++ b/crates/testenv/Cargo.toml @@ -16,7 +16,7 @@ readme = "README.md" workspace = true [dependencies] -bdk_chain = { path = "../chain", version = "0.23.1", default-features = false, features = ["miniscript"]} +bdk_chain = { path = "../chain", version = "0.23.1", default-features = false} electrsd = { version = "0.28.0", features = [ "legacy" ], default-features = false } anyhow = "1.0.98" tempfile = "3.20.0" @@ -26,6 +26,7 @@ bdk_testenv = { path = "." } [features] default = ["std", "download"] +miniscript = ["std", "bdk_chain/miniscript"] download = ["electrsd/bitcoind_25_0", "electrsd/esplora_a33e97e1"] std = ["bdk_chain/std"] serde = ["bdk_chain/serde"] diff --git a/crates/testenv/src/lib.rs b/crates/testenv/src/lib.rs index ac3cc6326..92ef264e9 100644 --- a/crates/testenv/src/lib.rs +++ b/crates/testenv/src/lib.rs @@ -1,5 +1,6 @@ #![cfg_attr(coverage_nightly, feature(coverage_attribute))] +#[cfg(feature = "std")] pub mod persist_test_utils; pub mod utils; diff --git a/crates/testenv/src/persist_test_utils.rs b/crates/testenv/src/persist_test_utils.rs index 3765cea9f..b950acdf5 100644 --- a/crates/testenv/src/persist_test_utils.rs +++ b/crates/testenv/src/persist_test_utils.rs @@ -1,51 +1,29 @@ //! This module provides utility functions for testing custom persistence backends. -use crate::block_id; -use crate::hash; -use bdk_chain::bitcoin; -use bdk_chain::miniscript::{Descriptor, DescriptorPublicKey}; +use crate::{block_id, hash}; +#[cfg(feature = "miniscript")] use bdk_chain::{ - bitcoin::{ - absolute, key::Secp256k1, transaction, Address, Amount, OutPoint, ScriptBuf, Transaction, - TxIn, TxOut, Txid, - }, - indexer::keychain_txout, - local_chain, tx_graph, ConfirmationBlockTime, DescriptorExt, DescriptorId, Merge, SpkIterator, + bitcoin::ScriptBuf, indexer::keychain_txout, DescriptorExt, DescriptorId, SpkIterator, +}; +use bdk_chain::{ + bitcoin::{self, OutPoint, Transaction, TxOut, Txid}, + local_chain, tx_graph, ConfirmationBlockTime, Merge, }; use std::collections::{BTreeMap, BTreeSet}; use std::path::Path; -use std::str::FromStr; use std::sync::Arc; -fn create_one_inp_one_out_tx(txid: Txid, amount: u64) -> Transaction { - Transaction { - version: transaction::Version::ONE, - lock_time: absolute::LockTime::ZERO, - input: vec![TxIn { - previous_output: OutPoint::new(txid, 0), - ..TxIn::default() - }], - output: vec![TxOut { - value: Amount::from_sat(amount), - script_pubkey: Address::from_str("bcrt1q3qtze4ys45tgdvguj66zrk4fu6hq3a3v9pfly5") - .unwrap() - .assume_checked() - .script_pubkey(), - }], - } -} +use crate::utils::{create_test_tx, create_txout}; -fn spk_at_index(descriptor: &Descriptor, index: u32) -> ScriptBuf { - descriptor - .derived_descriptor(&Secp256k1::verification_only(), index) - .expect("must derive") - .script_pubkey() -} +#[cfg(feature = "miniscript")] +use crate::utils::{parse_descriptor, spk_at_index}; + +const ADDRS: [&str; 2] = [ + "bcrt1q3qtze4ys45tgdvguj66zrk4fu6hq3a3v9pfly5", + "bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl", +]; /// tests if [`TxGraph`] is being persisted correctly /// -/// [`TxGraph`]: -/// [`tx_graph::ChangeSet`]: -/// /// We create a dummy [`tx_graph::ChangeSet`], persist it and check if loaded `ChangeSet` matches /// the persisted one. We then create another such dummy `ChangeSet`, persist it and load it to /// check if merged `ChangeSet` is returned. @@ -70,7 +48,14 @@ pub fn persist_txgraph_changeset( assert_eq!(changeset, ChangeSet::::default()); // create changeset - let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); + let tx1 = Arc::new(create_test_tx( + [hash!("BTC")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { block_id: block_id!(910425, "Rust"), @@ -80,29 +65,10 @@ pub fn persist_txgraph_changeset( let mut tx_graph_changeset1 = ChangeSet:: { txs: [tx1.clone()].into(), txouts: [ - ( - OutPoint::new(hash!("BDK"), 0), - TxOut { - value: Amount::from_sat(1300), - script_pubkey: Address::from_str( - "bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl", - ) - .unwrap() - .assume_checked() - .script_pubkey(), - }, - ), + (OutPoint::new(hash!("BDK"), 0), create_txout(1300, ADDRS[1])), ( OutPoint::new(hash!("Bitcoin_fixes_things"), 0), - TxOut { - value: Amount::from_sat(1400), - script_pubkey: Address::from_str( - "bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl", - ) - .unwrap() - .assume_checked() - .script_pubkey(), - }, + create_txout(1400, ADDRS[1]), ), ] .into(), @@ -119,7 +85,14 @@ pub fn persist_txgraph_changeset( assert_eq!(changeset, tx_graph_changeset1); // create another changeset - let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { block_id: block_id!(910426, "BOSS"), @@ -129,14 +102,8 @@ pub fn persist_txgraph_changeset( let tx_graph_changeset2 = ChangeSet:: { txs: [tx2.clone()].into(), txouts: [( - OutPoint::new(hash!("REDB"), 0), - TxOut { - value: Amount::from_sat(10000), - script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") - .unwrap() - .assume_checked() - .script_pubkey(), - }, + OutPoint::new(hash!("Magical_Bitcoin"), 0), + create_txout(10000, ADDRS[1]), )] .into(), anchors: [(conf_anchor, tx2.compute_txid())].into(), @@ -155,22 +122,12 @@ pub fn persist_txgraph_changeset( assert_eq!(tx_graph_changeset1, changeset); } -fn parse_descriptor(descriptor: &str) -> Descriptor { - let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only(); - Descriptor::::parse_descriptor(&secp, descriptor) - .unwrap() - .0 -} - /// tests if [`KeychainTxOutIndex`] is being persisted correctly /// -/// [`KeychainTxOutIndex`]: -/// -/// [`keychain_txout::ChangeSet`]: -/// /// We create a dummy [`keychain_txout::ChangeSet`], persist it and check if loaded `ChangeSet` /// matches the persisted one. We then create another such dummy `ChangeSet`, persist it and load it /// to check if merged `ChangeSet` is returned. +#[cfg(feature = "miniscript")] pub fn persist_indexer_changeset( file_name: &str, create_store: CreateStore, @@ -194,8 +151,8 @@ pub fn persist_indexer_changeset( assert_eq!(changeset, ChangeSet::default()); // create changeset - let descriptor_ids = DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); - let descs = DESCRIPTORS.map(parse_descriptor); + let descriptor_ids = DESCRIPTORS.map(|d| parse_descriptor(d).0.descriptor_id()); + let descs = DESCRIPTORS.map(|desc| parse_descriptor(desc).0); let mut changeset = ChangeSet { last_revealed: [(descriptor_ids[0], 1), (descriptor_ids[1], 100)].into(), @@ -240,9 +197,6 @@ pub fn persist_indexer_changeset( /// tests if [`LocalChain`] is being persisted correctly /// -/// [`LocalChain`]: -/// [`local_chain::ChangeSet`]: -/// /// We create a dummy [`local_chain::ChangeSet`], persist it and check if loaded `ChangeSet` matches /// the persisted one. We then create another such dummy `ChangeSet`, persist it and load it to /// check if merged `ChangeSet` is returned. @@ -327,9 +281,30 @@ pub fn persist_last_seen( assert_eq!(changeset, ChangeSet::::default()); // create changeset - let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); - let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); - let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + let tx1 = Arc::new(create_test_tx( + [hash!("BTC")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + let tx3 = Arc::new(create_test_tx( + [tx2.compute_txid()], + [0], + [19_000], + [ADDRS[0]], + 1, + 0, + )); let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); let mut last_seen: BTreeMap = [ @@ -393,9 +368,30 @@ pub fn persist_last_evicted( assert_eq!(changeset, ChangeSet::::default()); // create changeset - let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BDK"), 30_000)); - let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); - let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + let tx1 = Arc::new(create_test_tx( + [hash!("BDK")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + let tx3 = Arc::new(create_test_tx( + [tx2.compute_txid()], + [0], + [19_000], + [ADDRS[0]], + 1, + 0, + )); // try persisting and reading last_evicted let mut last_evicted: BTreeMap = [ @@ -457,9 +453,30 @@ pub fn persist_first_seen( assert_eq!(changeset, ChangeSet::::default()); // create changeset - let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); - let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); - let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + let tx1 = Arc::new(create_test_tx( + [hash!("BTC")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + let tx3 = Arc::new(create_test_tx( + [tx2.compute_txid()], + [0], + [19_000], + [ADDRS[0]], + 1, + 0, + )); let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); let mut first_seen: BTreeMap = [ @@ -521,26 +538,8 @@ pub fn persist_txouts( assert_eq!(changeset, ChangeSet::default()); let mut txouts: BTreeMap = [ - ( - OutPoint::new(hash!("B"), 0), - TxOut { - value: Amount::from_sat(1300), - script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") - .unwrap() - .assume_checked() - .script_pubkey(), - }, - ), - ( - OutPoint::new(hash!("D"), 0), - TxOut { - value: Amount::from_sat(1400), - script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") - .unwrap() - .assume_checked() - .script_pubkey(), - }, - ), + (OutPoint::new(hash!("B"), 0), create_txout(1300, ADDRS[1])), + (OutPoint::new(hash!("D"), 0), create_txout(1400, ADDRS[1])), ] .into(); @@ -556,17 +555,8 @@ pub fn persist_txouts( assert_eq!(changeset_read.txouts, txouts); // create another changeset - let txouts_new: BTreeMap = [( - OutPoint::new(hash!("K"), 0), - TxOut { - value: Amount::from_sat(10000), - script_pubkey: Address::from_str("bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl") - .unwrap() - .assume_checked() - .script_pubkey(), - }, - )] - .into(); + let txouts_new: BTreeMap = + [(OutPoint::new(hash!("K"), 0), create_txout(10000, ADDRS[1]))].into(); let changeset = ChangeSet:: { txouts: txouts_new.clone(), @@ -607,9 +597,30 @@ pub fn persist_txs( assert_eq!(changeset, ChangeSet::::default()); // create changeset - let tx1 = Arc::new(create_one_inp_one_out_tx(hash!("BTC"), 30_000)); - let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); - let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); + let tx1 = Arc::new(create_test_tx( + [hash!("BTC")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + let tx3 = Arc::new(create_test_tx( + [tx2.compute_txid()], + [0], + [19_000], + [ADDRS[0]], + 1, + 0, + )); let mut txs: BTreeSet> = [tx1, tx2.clone()].into(); @@ -664,10 +675,30 @@ pub fn persist_anchors( assert_eq!(changeset, ChangeSet::::default()); // create changeset - let tx1 = Arc::new(create_one_inp_one_out_tx(hash!(""), 30_000)); - let tx2 = Arc::new(create_one_inp_one_out_tx(tx1.compute_txid(), 20_000)); - let tx3 = Arc::new(create_one_inp_one_out_tx(tx2.compute_txid(), 19_000)); - + let tx1 = Arc::new(create_test_tx( + [hash!("Running_Bitcoin")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + let tx3 = Arc::new(create_test_tx( + [tx2.compute_txid()], + [0], + [19_000], + [ADDRS[0]], + 1, + 0, + )); let anchor1 = ConfirmationBlockTime { block_id: block_id!(23, "BTC"), confirmation_time: 1756838400, @@ -717,6 +748,7 @@ pub fn persist_anchors( /// We create a dummy [`keychain_txout::ChangeSet`] with only `last_revealed` field populated, /// persist it and check if loaded `ChangeSet` matches the persisted one. We then create another /// such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. +#[cfg(feature = "miniscript")] pub fn persist_last_revealed( file_name: &str, create_store: CreateStore, @@ -738,7 +770,7 @@ pub fn persist_last_revealed( assert_eq!(changeset, ChangeSet::default()); // create changeset - let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); + let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).0.descriptor_id()); let mut last_revealed: BTreeMap = [(descriptor_ids[0], 1), (descriptor_ids[1], 100)].into(); @@ -773,6 +805,7 @@ pub fn persist_last_revealed( /// We create a dummy [`keychain_txout::ChangeSet`] with only `spk_cache` field populated, persist /// it and check if loaded `ChangeSet` matches the persisted one. We then create another such dummy /// `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. +#[cfg(feature = "miniscript")] pub fn persist_spk_cache( file_name: &str, create_store: CreateStore, @@ -794,8 +827,8 @@ pub fn persist_spk_cache( assert_eq!(changeset, ChangeSet::default()); // create changeset - let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).descriptor_id()); - let descs = crate::utils::DESCRIPTORS.map(parse_descriptor); + let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).0.descriptor_id()); + let descs = crate::utils::DESCRIPTORS.map(|desc| parse_descriptor(desc).0); let spk_cache: BTreeMap> = [ ( diff --git a/crates/testenv/src/utils.rs b/crates/testenv/src/utils.rs index 93ca1f217..ee4df4176 100644 --- a/crates/testenv/src/utils.rs +++ b/crates/testenv/src/utils.rs @@ -1,4 +1,10 @@ -use bdk_chain::bitcoin; +use bdk_chain::bitcoin::{ + self, absolute, transaction, Address, Amount, OutPoint, Transaction, TxIn, TxOut, Txid, +}; +use core::str::FromStr; + +#[cfg(feature = "miniscript")] +use bdk_chain::miniscript::{descriptor::KeyMap, Descriptor, DescriptorPublicKey}; #[allow(unused_macros)] #[macro_export] @@ -77,6 +83,71 @@ pub fn new_tx(lt: u32) -> bitcoin::Transaction { } } +/// Utility function to create a [`TxOut`] given amount (in satoshis) and address. +pub fn create_txout(sats: u64, addr: &str) -> TxOut { + TxOut { + value: Amount::from_sat(sats), + script_pubkey: Address::from_str(addr) + .unwrap() + .assume_checked() + .script_pubkey(), + } +} + +/// Utility function to create a transaction given txids, vouts of inputs and amounts (in satoshis), +/// addresses of outputs. +/// +/// The locktime should be in the form given to `OP_CHEKCLOCKTIMEVERIFY`. +pub fn create_test_tx( + txids: impl IntoIterator, + vouts: impl IntoIterator, + amounts: impl IntoIterator, + addrs: impl IntoIterator, + version: u32, + locktime: u32, +) -> Transaction { + let input_vec = core::iter::zip(txids, vouts) + .map(|(txid, vout)| TxIn { + previous_output: OutPoint::new(txid, vout), + ..TxIn::default() + }) + .collect(); + let output_vec = core::iter::zip(amounts, addrs) + .map(|(amount, addr)| create_txout(amount, addr)) + .collect(); + let version = transaction::Version::non_standard(version as i32); + assert!(version.is_standard()); + let lock_time = absolute::LockTime::from_consensus(locktime); + assert_eq!(lock_time.to_consensus_u32(), locktime); + Transaction { + version, + lock_time, + input: input_vec, + output: output_vec, + } +} + +/// Generates `script_pubkey` corresponding to `index` on keychain of `descriptor`. +#[cfg(feature = "miniscript")] +pub fn spk_at_index( + descriptor: &Descriptor, + index: u32, +) -> bdk_chain::bitcoin::ScriptBuf { + use bdk_chain::bitcoin::key::Secp256k1; + descriptor + .derived_descriptor(&Secp256k1::verification_only(), index) + .expect("must derive") + .script_pubkey() +} + +/// Parses a descriptor string. +#[cfg(feature = "miniscript")] +pub fn parse_descriptor(descriptor: &str) -> (Descriptor, KeyMap) { + use bdk_chain::bitcoin::key::Secp256k1; + let secp = Secp256k1::signing_only(); + Descriptor::::parse_descriptor(&secp, descriptor).unwrap() +} + #[allow(unused)] pub const DESCRIPTORS: [&str; 7] = [ "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)", From fd9429c8f885556a6f94966bf12dc4a9dfd9ed49 Mon Sep 17 00:00:00 2001 From: codingp110 Date: Wed, 24 Sep 2025 13:50:42 +0530 Subject: [PATCH 5/6] build(testenv): make "miniscript" feature default The miniscript feature is retained so as to not be coupled with "download". --- crates/testenv/Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/testenv/Cargo.toml b/crates/testenv/Cargo.toml index 81c94de2f..e659d245c 100644 --- a/crates/testenv/Cargo.toml +++ b/crates/testenv/Cargo.toml @@ -25,8 +25,8 @@ tempfile = "3.20.0" bdk_testenv = { path = "." } [features] -default = ["std", "download"] -miniscript = ["std", "bdk_chain/miniscript"] +default = ["std" , "download", "miniscript"] +miniscript = ["std", "bdk_chain/default"] download = ["electrsd/bitcoind_25_0", "electrsd/esplora_a33e97e1"] std = ["bdk_chain/std"] serde = ["bdk_chain/serde"] From c4b68735c516b0c5536ba71833fa7ce304f367db Mon Sep 17 00:00:00 2001 From: codingp110 Date: Sat, 27 Sep 2025 23:03:50 +0530 Subject: [PATCH 6/6] build(testenv): modify default feature The feature now depends on `bdk_chain/default`. Now default feature gates the persistence test utils rather than std. --- crates/bitcoind_rpc/Cargo.toml | 2 +- crates/chain/Cargo.toml | 2 +- crates/electrum/Cargo.toml | 2 +- crates/esplora/Cargo.toml | 2 +- crates/testenv/Cargo.toml | 3 +-- crates/testenv/src/lib.rs | 2 +- crates/testenv/src/persist_test_utils.rs | 5 ----- crates/testenv/src/utils.rs | 6 +++--- 8 files changed, 9 insertions(+), 15 deletions(-) diff --git a/crates/bitcoind_rpc/Cargo.toml b/crates/bitcoind_rpc/Cargo.toml index 2b8e03d20..e46b1d25d 100644 --- a/crates/bitcoind_rpc/Cargo.toml +++ b/crates/bitcoind_rpc/Cargo.toml @@ -22,7 +22,7 @@ bdk_core = { path = "../core", version = "0.6.1", default-features = false } [dev-dependencies] bdk_bitcoind_rpc = { path = "." } -bdk_testenv = { path = "../testenv" } +bdk_testenv = { path = "../testenv", features = ["download"]} bdk_chain = { path = "../chain" } [features] diff --git a/crates/chain/Cargo.toml b/crates/chain/Cargo.toml index dd7d2bb36..27bdcb49e 100644 --- a/crates/chain/Cargo.toml +++ b/crates/chain/Cargo.toml @@ -27,7 +27,7 @@ rusqlite = { version = "0.31.0", features = ["bundled"], optional = true } [dev-dependencies] rand = "0.8" proptest = "1.2.0" -bdk_testenv = { path = "../testenv" } +bdk_testenv = { path = "../testenv", features = ["download"]} criterion = { version = "0.2" } [features] diff --git a/crates/electrum/Cargo.toml b/crates/electrum/Cargo.toml index 8fdd7823b..3aa259984 100644 --- a/crates/electrum/Cargo.toml +++ b/crates/electrum/Cargo.toml @@ -17,7 +17,7 @@ bdk_core = { path = "../core", version = "0.6.1" } electrum-client = { version = "0.24.0", features = [ "proxy" ], default-features = false } [dev-dependencies] -bdk_testenv = { path = "../testenv" } +bdk_testenv = { path = "../testenv", features = ["download"]} bdk_chain = { path = "../chain" } criterion = { version = "0.2" } diff --git a/crates/esplora/Cargo.toml b/crates/esplora/Cargo.toml index e4c553f77..1d29f0c21 100644 --- a/crates/esplora/Cargo.toml +++ b/crates/esplora/Cargo.toml @@ -23,7 +23,7 @@ futures = { version = "0.3.26", optional = true } [dev-dependencies] esplora-client = { version = "0.12.0" } bdk_chain = { path = "../chain" } -bdk_testenv = { path = "../testenv" } +bdk_testenv = { path = "../testenv", features = ["download"]} tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] } [features] diff --git a/crates/testenv/Cargo.toml b/crates/testenv/Cargo.toml index e659d245c..ba9466506 100644 --- a/crates/testenv/Cargo.toml +++ b/crates/testenv/Cargo.toml @@ -25,8 +25,7 @@ tempfile = "3.20.0" bdk_testenv = { path = "." } [features] -default = ["std" , "download", "miniscript"] -miniscript = ["std", "bdk_chain/default"] +default = ["bdk_chain/default"] download = ["electrsd/bitcoind_25_0", "electrsd/esplora_a33e97e1"] std = ["bdk_chain/std"] serde = ["bdk_chain/serde"] diff --git a/crates/testenv/src/lib.rs b/crates/testenv/src/lib.rs index 92ef264e9..d6be50b05 100644 --- a/crates/testenv/src/lib.rs +++ b/crates/testenv/src/lib.rs @@ -1,6 +1,6 @@ #![cfg_attr(coverage_nightly, feature(coverage_attribute))] -#[cfg(feature = "std")] +#[cfg(feature = "default")] pub mod persist_test_utils; pub mod utils; diff --git a/crates/testenv/src/persist_test_utils.rs b/crates/testenv/src/persist_test_utils.rs index b950acdf5..9df5fc73d 100644 --- a/crates/testenv/src/persist_test_utils.rs +++ b/crates/testenv/src/persist_test_utils.rs @@ -1,6 +1,5 @@ //! This module provides utility functions for testing custom persistence backends. use crate::{block_id, hash}; -#[cfg(feature = "miniscript")] use bdk_chain::{ bitcoin::ScriptBuf, indexer::keychain_txout, DescriptorExt, DescriptorId, SpkIterator, }; @@ -14,7 +13,6 @@ use std::sync::Arc; use crate::utils::{create_test_tx, create_txout}; -#[cfg(feature = "miniscript")] use crate::utils::{parse_descriptor, spk_at_index}; const ADDRS: [&str; 2] = [ @@ -127,7 +125,6 @@ pub fn persist_txgraph_changeset( /// We create a dummy [`keychain_txout::ChangeSet`], persist it and check if loaded `ChangeSet` /// matches the persisted one. We then create another such dummy `ChangeSet`, persist it and load it /// to check if merged `ChangeSet` is returned. -#[cfg(feature = "miniscript")] pub fn persist_indexer_changeset( file_name: &str, create_store: CreateStore, @@ -748,7 +745,6 @@ pub fn persist_anchors( /// We create a dummy [`keychain_txout::ChangeSet`] with only `last_revealed` field populated, /// persist it and check if loaded `ChangeSet` matches the persisted one. We then create another /// such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. -#[cfg(feature = "miniscript")] pub fn persist_last_revealed( file_name: &str, create_store: CreateStore, @@ -805,7 +801,6 @@ pub fn persist_last_revealed( /// We create a dummy [`keychain_txout::ChangeSet`] with only `spk_cache` field populated, persist /// it and check if loaded `ChangeSet` matches the persisted one. We then create another such dummy /// `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. -#[cfg(feature = "miniscript")] pub fn persist_spk_cache( file_name: &str, create_store: CreateStore, diff --git a/crates/testenv/src/utils.rs b/crates/testenv/src/utils.rs index ee4df4176..75f1d40d2 100644 --- a/crates/testenv/src/utils.rs +++ b/crates/testenv/src/utils.rs @@ -3,7 +3,7 @@ use bdk_chain::bitcoin::{ }; use core::str::FromStr; -#[cfg(feature = "miniscript")] +#[cfg(feature = "default")] use bdk_chain::miniscript::{descriptor::KeyMap, Descriptor, DescriptorPublicKey}; #[allow(unused_macros)] @@ -128,7 +128,7 @@ pub fn create_test_tx( } /// Generates `script_pubkey` corresponding to `index` on keychain of `descriptor`. -#[cfg(feature = "miniscript")] +#[cfg(feature = "default")] pub fn spk_at_index( descriptor: &Descriptor, index: u32, @@ -141,7 +141,7 @@ pub fn spk_at_index( } /// Parses a descriptor string. -#[cfg(feature = "miniscript")] +#[cfg(feature = "default")] pub fn parse_descriptor(descriptor: &str) -> (Descriptor, KeyMap) { use bdk_chain::bitcoin::key::Secp256k1; let secp = Secp256k1::signing_only();