diff --git a/crates/bitcoind_rpc/Cargo.toml b/crates/bitcoind_rpc/Cargo.toml index 2b8e03d20..e46b1d25d 100644 --- a/crates/bitcoind_rpc/Cargo.toml +++ b/crates/bitcoind_rpc/Cargo.toml @@ -22,7 +22,7 @@ bdk_core = { path = "../core", version = "0.6.1", default-features = false } [dev-dependencies] bdk_bitcoind_rpc = { path = "." } -bdk_testenv = { path = "../testenv" } +bdk_testenv = { path = "../testenv", features = ["download"]} bdk_chain = { path = "../chain" } [features] diff --git a/crates/chain/Cargo.toml b/crates/chain/Cargo.toml index dd7d2bb36..27bdcb49e 100644 --- a/crates/chain/Cargo.toml +++ b/crates/chain/Cargo.toml @@ -27,7 +27,7 @@ rusqlite = { version = "0.31.0", features = ["bundled"], optional = true } [dev-dependencies] rand = "0.8" proptest = "1.2.0" -bdk_testenv = { path = "../testenv" } +bdk_testenv = { path = "../testenv", features = ["download"]} criterion = { version = "0.2" } [features] diff --git a/crates/chain/tests/test_rusqlite_impl.rs b/crates/chain/tests/test_rusqlite_impl.rs new file mode 100644 index 000000000..6067a9864 --- /dev/null +++ b/crates/chain/tests/test_rusqlite_impl.rs @@ -0,0 +1,227 @@ +#![cfg(feature = "rusqlite")] +use bdk_chain::{keychain_txout, local_chain, tx_graph, ConfirmationBlockTime}; +use bdk_testenv::persist_test_utils::{ + persist_anchors, persist_first_seen, persist_indexer_changeset, persist_last_evicted, + persist_last_revealed, persist_last_seen, persist_local_chain_changeset, persist_spk_cache, + persist_txgraph_changeset, persist_txouts, persist_txs, +}; + +#[test] +fn txgraph_is_persisted() { + persist_txgraph_changeset::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn indexer_is_persisted() { + persist_indexer_changeset::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + keychain_txout::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = keychain_txout::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn local_chain_is_persisted() { + persist_local_chain_changeset::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + local_chain::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = local_chain::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn txouts_are_persisted() { + persist_txouts::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn txs_are_persisted() { + persist_txs::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn anchors_are_persisted() { + persist_anchors::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn last_seen_is_persisted() { + persist_last_seen::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn last_evicted_is_persisted() { + persist_last_evicted::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn first_seen_is_persisted() { + persist_first_seen::( + "wallet.sqlite", + |path| Ok(bdk_chain::rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + tx_graph::ChangeSet::::init_sqlite_tables(&db_tx)?; + let changeset = tx_graph::ChangeSet::::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn last_revealed_is_persisted() { + persist_last_revealed::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + keychain_txout::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = keychain_txout::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} + +#[test] +fn spk_cache_is_persisted() { + persist_spk_cache::( + "wallet.sqlite", + |path| Ok(rusqlite::Connection::open(path)?), + |db| { + let db_tx = db.transaction()?; + keychain_txout::ChangeSet::init_sqlite_tables(&db_tx)?; + let changeset = keychain_txout::ChangeSet::from_sqlite(&db_tx)?; + db_tx.commit()?; + Ok(changeset) + }, + |db, changeset| { + let db_tx = db.transaction()?; + changeset.persist_to_sqlite(&db_tx)?; + Ok(db_tx.commit()?) + }, + ); +} diff --git a/crates/electrum/Cargo.toml b/crates/electrum/Cargo.toml index 8fdd7823b..3aa259984 100644 --- a/crates/electrum/Cargo.toml +++ b/crates/electrum/Cargo.toml @@ -17,7 +17,7 @@ bdk_core = { path = "../core", version = "0.6.1" } electrum-client = { version = "0.24.0", features = [ "proxy" ], default-features = false } [dev-dependencies] -bdk_testenv = { path = "../testenv" } +bdk_testenv = { path = "../testenv", features = ["download"]} bdk_chain = { path = "../chain" } criterion = { version = "0.2" } diff --git a/crates/esplora/Cargo.toml b/crates/esplora/Cargo.toml index e4c553f77..1d29f0c21 100644 --- a/crates/esplora/Cargo.toml +++ b/crates/esplora/Cargo.toml @@ -23,7 +23,7 @@ futures = { version = "0.3.26", optional = true } [dev-dependencies] esplora-client = { version = "0.12.0" } bdk_chain = { path = "../chain" } -bdk_testenv = { path = "../testenv" } +bdk_testenv = { path = "../testenv", features = ["download"]} tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] } [features] diff --git a/crates/file_store/Cargo.toml b/crates/file_store/Cargo.toml index 714c40e1b..a6c0e8a85 100644 --- a/crates/file_store/Cargo.toml +++ b/crates/file_store/Cargo.toml @@ -20,3 +20,5 @@ serde = { version = "1", features = ["derive"] } [dev-dependencies] tempfile = "3" +bdk_testenv = {path = "../testenv"} +bdk_chain = { path = "../chain", version = "0.23.1", default-features = false, features = ["serde"]} \ No newline at end of file diff --git a/crates/file_store/src/store.rs b/crates/file_store/src/store.rs index 7e1867926..f36205af5 100644 --- a/crates/file_store/src/store.rs +++ b/crates/file_store/src/store.rs @@ -295,6 +295,13 @@ mod test { const TEST_MAGIC_BYTES: [u8; TEST_MAGIC_BYTES_LEN] = [98, 100, 107, 102, 115, 49, 49, 49, 49, 49, 49, 49]; + use bdk_chain::{keychain_txout, local_chain, tx_graph, ConfirmationBlockTime}; + use bdk_testenv::persist_test_utils::{ + persist_anchors, persist_first_seen, persist_indexer_changeset, persist_last_evicted, + persist_last_revealed, persist_last_seen, persist_local_chain_changeset, persist_spk_cache, + persist_txgraph_changeset, persist_txouts, persist_txs, + }; + type TestChangeSet = BTreeSet; /// Check behavior of [`Store::create`] and [`Store::load`]. @@ -599,4 +606,114 @@ mod test { // current position matches EOF assert_eq!(current_pointer, expected_pointer); } + + #[test] + fn txgraph_is_persisted() { + persist_txgraph_changeset::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn indexer_is_persisted() { + persist_indexer_changeset::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn local_chain_is_persisted() { + persist_local_chain_changeset::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn txouts_are_persisted() { + persist_txouts::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn txs_are_persisted() { + persist_txs::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn anchors_are_persisted() { + persist_anchors::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn last_seen_is_persisted() { + persist_last_seen::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn last_evicted_is_persisted() { + persist_last_evicted::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn first_seen_is_persisted() { + persist_first_seen::>, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn last_revealed_is_persisted() { + persist_last_revealed::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } + + #[test] + fn spk_cache_is_persisted() { + persist_spk_cache::, _, _, _>( + "wallet.db", + |path| Ok(Store::create(&TEST_MAGIC_BYTES, path)?), + |db| Ok(db.dump().map(Option::unwrap_or_default)?), + |db, changeset| Ok(db.append(changeset)?), + ); + } } diff --git a/crates/testenv/Cargo.toml b/crates/testenv/Cargo.toml index eff58a41c..ba9466506 100644 --- a/crates/testenv/Cargo.toml +++ b/crates/testenv/Cargo.toml @@ -16,17 +16,18 @@ readme = "README.md" workspace = true [dependencies] -bdk_chain = { path = "../chain", version = "0.23.1", default-features = false } +bdk_chain = { path = "../chain", version = "0.23.1", default-features = false} electrsd = { version = "0.28.0", features = [ "legacy" ], default-features = false } +anyhow = "1.0.98" +tempfile = "3.20.0" [dev-dependencies] bdk_testenv = { path = "." } [features] -default = ["std", "download"] +default = ["bdk_chain/default"] download = ["electrsd/bitcoind_25_0", "electrsd/esplora_a33e97e1"] std = ["bdk_chain/std"] serde = ["bdk_chain/serde"] - [package.metadata.docs.rs] no-default-features = true diff --git a/crates/testenv/src/lib.rs b/crates/testenv/src/lib.rs index 9faf43bf2..d6be50b05 100644 --- a/crates/testenv/src/lib.rs +++ b/crates/testenv/src/lib.rs @@ -1,5 +1,7 @@ #![cfg_attr(coverage_nightly, feature(coverage_attribute))] +#[cfg(feature = "default")] +pub mod persist_test_utils; pub mod utils; use bdk_chain::{ diff --git a/crates/testenv/src/persist_test_utils.rs b/crates/testenv/src/persist_test_utils.rs new file mode 100644 index 000000000..9df5fc73d --- /dev/null +++ b/crates/testenv/src/persist_test_utils.rs @@ -0,0 +1,877 @@ +//! This module provides utility functions for testing custom persistence backends. +use crate::{block_id, hash}; +use bdk_chain::{ + bitcoin::ScriptBuf, indexer::keychain_txout, DescriptorExt, DescriptorId, SpkIterator, +}; +use bdk_chain::{ + bitcoin::{self, OutPoint, Transaction, TxOut, Txid}, + local_chain, tx_graph, ConfirmationBlockTime, Merge, +}; +use std::collections::{BTreeMap, BTreeSet}; +use std::path::Path; +use std::sync::Arc; + +use crate::utils::{create_test_tx, create_txout}; + +use crate::utils::{parse_descriptor, spk_at_index}; + +const ADDRS: [&str; 2] = [ + "bcrt1q3qtze4ys45tgdvguj66zrk4fu6hq3a3v9pfly5", + "bcrt1q8an5jfmpq8w2hr648nn34ecf9zdtxk0qyqtrfl", +]; + +/// tests if [`TxGraph`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`], persist it and check if loaded `ChangeSet` matches +/// the persisted one. We then create another such dummy `ChangeSet`, persist it and load it to +/// check if merged `ChangeSet` is returned. +pub fn persist_txgraph_changeset( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + // create the store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // initialize store + let changeset = initialize(&mut store).expect("should load empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + // create changeset + let tx1 = Arc::new(create_test_tx( + [hash!("BTC")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + + let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { + block_id: block_id!(910425, "Rust"), + confirmation_time: 1755416660, + }; + + let mut tx_graph_changeset1 = ChangeSet:: { + txs: [tx1.clone()].into(), + txouts: [ + (OutPoint::new(hash!("BDK"), 0), create_txout(1300, ADDRS[1])), + ( + OutPoint::new(hash!("Bitcoin_fixes_things"), 0), + create_txout(1400, ADDRS[1]), + ), + ] + .into(), + anchors: [(conf_anchor, tx1.compute_txid())].into(), + last_seen: [(tx1.compute_txid(), 1755416650)].into(), + first_seen: [(tx1.compute_txid(), 1755416655)].into(), + last_evicted: [(tx1.compute_txid(), 1755416660)].into(), + }; + + // persist and load + persist(&mut store, &tx_graph_changeset1).expect("should persist changeset"); + + let changeset = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset, tx_graph_changeset1); + + // create another changeset + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + + let conf_anchor: ConfirmationBlockTime = ConfirmationBlockTime { + block_id: block_id!(910426, "BOSS"), + confirmation_time: 1755416700, + }; + + let tx_graph_changeset2 = ChangeSet:: { + txs: [tx2.clone()].into(), + txouts: [( + OutPoint::new(hash!("Magical_Bitcoin"), 0), + create_txout(10000, ADDRS[1]), + )] + .into(), + anchors: [(conf_anchor, tx2.compute_txid())].into(), + last_seen: [(tx2.compute_txid(), 1755416700)].into(), + first_seen: [(tx2.compute_txid(), 1755416670)].into(), + last_evicted: [(tx2.compute_txid(), 1755416760)].into(), + }; + + // persist, load and check if same as merged + persist(&mut store, &tx_graph_changeset2).expect("should persist changeset"); + + let changeset = initialize(&mut store).expect("should load persisted changeset"); + + tx_graph_changeset1.merge(tx_graph_changeset2); + + assert_eq!(tx_graph_changeset1, changeset); +} + +/// tests if [`KeychainTxOutIndex`] is being persisted correctly +/// +/// We create a dummy [`keychain_txout::ChangeSet`], persist it and check if loaded `ChangeSet` +/// matches the persisted one. We then create another such dummy `ChangeSet`, persist it and load it +/// to check if merged `ChangeSet` is returned. +pub fn persist_indexer_changeset( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, +{ + use crate::utils::DESCRIPTORS; + use keychain_txout::ChangeSet; + + // create the store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // initialize store + let changeset = initialize(&mut store).expect("should load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + // create changeset + let descriptor_ids = DESCRIPTORS.map(|d| parse_descriptor(d).0.descriptor_id()); + let descs = DESCRIPTORS.map(|desc| parse_descriptor(desc).0); + + let mut changeset = ChangeSet { + last_revealed: [(descriptor_ids[0], 1), (descriptor_ids[1], 100)].into(), + spk_cache: [ + ( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 0..=26).collect(), + ), + ( + descriptor_ids[1], + SpkIterator::new_with_range(&descs[1], 0..=125).collect(), + ), + ] + .into(), + }; + + // persist and load + persist(&mut store, &changeset).expect("should persist keychain_txout"); + + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + + assert_eq!(changeset_read, changeset); + + // create another changeset + let changeset_new = ChangeSet { + last_revealed: [(descriptor_ids[0], 2)].into(), + spk_cache: [( + descriptor_ids[0], + [(27, spk_at_index(&descs[0], 27))].into(), + )] + .into(), + }; + + // persist, load and check if same as merged + persist(&mut store, &changeset_new).expect("should persist second changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load merged changesets"); + changeset.merge(changeset_new); + + assert_eq!(changeset_read_new, changeset); +} + +/// tests if [`LocalChain`] is being persisted correctly +/// +/// We create a dummy [`local_chain::ChangeSet`], persist it and check if loaded `ChangeSet` matches +/// the persisted one. We then create another such dummy `ChangeSet`, persist it and load it to +/// check if merged `ChangeSet` is returned. +pub fn persist_local_chain_changeset( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &local_chain::ChangeSet) -> anyhow::Result<()>, +{ + use local_chain::ChangeSet; + // create the store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // initialize store + let changeset = initialize(&mut store).expect("should load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + // create changeset + let changeset = ChangeSet { + blocks: [(910425, Some(hash!("B"))), (910426, Some(hash!("D")))].into(), + }; + + // persist and load + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read, changeset); + + // create another changeset + let changeset_new = ChangeSet { + blocks: [(910427, Some(hash!("K")))].into(), + }; + + // persist, load and check if same as merged + persist(&mut store, &changeset_new).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + + let changeset = ChangeSet { + blocks: [ + (910425, Some(hash!("B"))), + (910426, Some(hash!("D"))), + (910427, Some(hash!("K"))), + ] + .into(), + }; + + assert_eq!(changeset, changeset_read_new); +} + +/// tests if `last_seen` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `last_seen` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create +/// another such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is +/// returned. +pub fn persist_last_seen( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + // create store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // initialize store + let changeset = + initialize(&mut store).expect("store should initialize and we should get empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + // create changeset + let tx1 = Arc::new(create_test_tx( + [hash!("BTC")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + let tx3 = Arc::new(create_test_tx( + [tx2.compute_txid()], + [0], + [19_000], + [ADDRS[0]], + 1, + 0, + )); + + let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); + let mut last_seen: BTreeMap = [ + (tx1.compute_txid(), 1755416700), + (tx2.compute_txid(), 1755416800), + ] + .into(); + + let changeset = ChangeSet:: { + txs, + last_seen: last_seen.clone(), + ..ChangeSet::::default() + }; + // persist and load + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.last_seen, last_seen); + + // create another changeset + let txs_new: BTreeSet> = [tx3.clone()].into(); + let last_seen_new: BTreeMap = [(tx3.compute_txid(), 1755417800)].into(); + + let changeset = ChangeSet:: { + txs: txs_new, + last_seen: last_seen_new.clone(), + ..ChangeSet::::default() + }; + // persist, load and check if same as merged + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + last_seen.merge(last_seen_new); + assert_eq!(changeset_read_new.last_seen, last_seen); +} + +/// tests if `last_evicted` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `last_evicted` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create +/// another such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is +/// returned. +pub fn persist_last_evicted( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + // create store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // initialize store + let changeset = + initialize(&mut store).expect("store should initialize and we should get empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + // create changeset + let tx1 = Arc::new(create_test_tx( + [hash!("BDK")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + let tx3 = Arc::new(create_test_tx( + [tx2.compute_txid()], + [0], + [19_000], + [ADDRS[0]], + 1, + 0, + )); + + // try persisting and reading last_evicted + let mut last_evicted: BTreeMap = [ + (tx1.compute_txid(), 1755416600), + (tx2.compute_txid(), 1755416060), + ] + .into(); + + let changeset = ChangeSet:: { + last_evicted: last_evicted.clone(), + ..ChangeSet::::default() + }; + // persist and load + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.last_evicted, last_evicted); + + // create another changeset + let last_evicted_new: BTreeMap = [(tx3.compute_txid(), 1755416700)].into(); + + let changeset = ChangeSet:: { + last_evicted: last_evicted_new.clone(), + ..ChangeSet::::default() + }; + + // persist, load and check if same as merged + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + last_evicted.merge(last_evicted_new); + assert_eq!(changeset_read_new.last_evicted, last_evicted); +} + +/// tests if `first_seen` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `first_seen` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create +/// another such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is +/// returned. +pub fn persist_first_seen( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + // create store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // initialize store + let changeset = + initialize(&mut store).expect("store should initialize and we should get empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + // create changeset + let tx1 = Arc::new(create_test_tx( + [hash!("BTC")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + let tx3 = Arc::new(create_test_tx( + [tx2.compute_txid()], + [0], + [19_000], + [ADDRS[0]], + 1, + 0, + )); + + let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); + let mut first_seen: BTreeMap = [ + (tx1.compute_txid(), 1755416600), + (tx2.compute_txid(), 1755416600), + ] + .into(); + + let changeset = ChangeSet:: { + txs, + first_seen: first_seen.clone(), + ..ChangeSet::::default() + }; + // persist and load + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.first_seen, first_seen); + + // create another changeset + let txs_new: BTreeSet> = [tx3.clone()].into(); + let first_seen_new: BTreeMap = [(tx3.compute_txid(), 1755416700)].into(); + + let changeset = ChangeSet:: { + txs: txs_new, + first_seen: first_seen_new.clone(), + ..ChangeSet::::default() + }; + // persist, load and check if same as merged + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + first_seen.merge(first_seen_new); + assert_eq!(changeset_read_new.first_seen, first_seen); +} + +/// tests if `txouts` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `txouts` field populated, persist it and +/// check if loaded `ChangeSet` matches the persisted one. We then create another such dummy +/// `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. +pub fn persist_txouts( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + // initialize store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // create changeset + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + let mut txouts: BTreeMap = [ + (OutPoint::new(hash!("B"), 0), create_txout(1300, ADDRS[1])), + (OutPoint::new(hash!("D"), 0), create_txout(1400, ADDRS[1])), + ] + .into(); + + let changeset = ChangeSet:: { + txouts: txouts.clone(), + ..ChangeSet::::default() + }; + + // persist and load + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read = initialize(&mut store).expect("should load changeset"); + assert_eq!(changeset_read.txouts, txouts); + + // create another changeset + let txouts_new: BTreeMap = + [(OutPoint::new(hash!("K"), 0), create_txout(10000, ADDRS[1]))].into(); + + let changeset = ChangeSet:: { + txouts: txouts_new.clone(), + ..ChangeSet::::default() + }; + + // persist, load and check if same as merged + persist(&mut store, &changeset).expect("should persist changeset"); + + let changeset_read_new = initialize(&mut store).expect("should load changeset"); + txouts.merge(txouts_new); + assert_eq!(changeset_read_new.txouts, txouts); +} + +/// tests if `txs` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `txs` field populated, persist it and check +/// if loaded `ChangeSet` matches the persisted one. We then create another such dummy `ChangeSet`, +/// persist it and load it to check if merged `ChangeSet` is returned. +pub fn persist_txs( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + // create store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // initialize store + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + // create changeset + let tx1 = Arc::new(create_test_tx( + [hash!("BTC")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + let tx3 = Arc::new(create_test_tx( + [tx2.compute_txid()], + [0], + [19_000], + [ADDRS[0]], + 1, + 0, + )); + + let mut txs: BTreeSet> = [tx1, tx2.clone()].into(); + + let changeset = ChangeSet:: { + txs: txs.clone(), + ..ChangeSet::::default() + }; + + // persist and load + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.txs, txs); + + let txs_new: BTreeSet> = [tx3].into(); + + // create another changeset + let changeset = ChangeSet:: { + txs: txs_new.clone(), + ..ChangeSet::::default() + }; + + // persist, load and check if same as merged + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + txs.merge(txs_new); + assert_eq!(changeset_read_new.txs, txs); +} + +/// tests if `anchors` field of [`tx_graph::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`tx_graph::ChangeSet`] with only `anchors` and `txs` fields populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create another +/// such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. +pub fn persist_anchors( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result>, + Persist: Fn(&mut Store, &tx_graph::ChangeSet) -> anyhow::Result<()>, +{ + use tx_graph::ChangeSet; + // create store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // initialize store + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::::default()); + + // create changeset + let tx1 = Arc::new(create_test_tx( + [hash!("Running_Bitcoin")], + [0], + [30_000], + [ADDRS[0]], + 1, + 0, + )); + let tx2 = Arc::new(create_test_tx( + [tx1.compute_txid()], + [0], + [20_000], + [ADDRS[0]], + 1, + 0, + )); + let tx3 = Arc::new(create_test_tx( + [tx2.compute_txid()], + [0], + [19_000], + [ADDRS[0]], + 1, + 0, + )); + let anchor1 = ConfirmationBlockTime { + block_id: block_id!(23, "BTC"), + confirmation_time: 1756838400, + }; + + let anchor2 = ConfirmationBlockTime { + block_id: block_id!(25, "BDK"), + confirmation_time: 1756839600, + }; + + let txs: BTreeSet> = [tx1.clone(), tx2.clone()].into(); + let mut anchors: BTreeSet<(ConfirmationBlockTime, Txid)> = + [(anchor1, tx1.compute_txid()), (anchor2, tx2.compute_txid())].into(); + + let changeset = ChangeSet:: { + txs, + anchors: anchors.clone(), + ..ChangeSet::::default() + }; + + // persist and load + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.anchors, anchors); + + // create another changeset + let txs_new: BTreeSet> = [tx3.clone()].into(); + let anchors_new: BTreeSet<(ConfirmationBlockTime, Txid)> = + [(anchor2, tx3.compute_txid())].into(); + + let changeset = ChangeSet:: { + txs: txs_new, + anchors: anchors_new.clone(), + ..ChangeSet::::default() + }; + + // persist, load and check if same as merged + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + + anchors.merge(anchors_new); + assert_eq!(changeset_read.anchors, anchors); +} + +/// tests if `last_revealed` field of [`keychain_txout::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`keychain_txout::ChangeSet`] with only `last_revealed` field populated, +/// persist it and check if loaded `ChangeSet` matches the persisted one. We then create another +/// such dummy `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. +pub fn persist_last_revealed( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, +{ + use keychain_txout::ChangeSet; + // create store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // initialize store + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + // create changeset + let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).0.descriptor_id()); + + let mut last_revealed: BTreeMap = + [(descriptor_ids[0], 1), (descriptor_ids[1], 100)].into(); + + let changeset = ChangeSet { + last_revealed: last_revealed.clone(), + ..ChangeSet::default() + }; + + // persist and load + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.last_revealed, last_revealed); + + // create another changeset + let last_revealed_new: BTreeMap = [(descriptor_ids[0], 2)].into(); + + let changeset = ChangeSet { + last_revealed: last_revealed_new.clone(), + ..ChangeSet::default() + }; + + // persist, load and check if same as merged + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + last_revealed.merge(last_revealed_new); + assert_eq!(changeset_read_new.last_revealed, last_revealed); +} + +/// tests if `spk_cache` field of [`keychain_txout::ChangeSet`] is being persisted correctly +/// +/// We create a dummy [`keychain_txout::ChangeSet`] with only `spk_cache` field populated, persist +/// it and check if loaded `ChangeSet` matches the persisted one. We then create another such dummy +/// `ChangeSet`, persist it and load it to check if merged `ChangeSet` is returned. +pub fn persist_spk_cache( + file_name: &str, + create_store: CreateStore, + initialize: Initialize, + persist: Persist, +) where + CreateStore: Fn(&Path) -> anyhow::Result, + Initialize: Fn(&mut Store) -> anyhow::Result, + Persist: Fn(&mut Store, &keychain_txout::ChangeSet) -> anyhow::Result<()>, +{ + use keychain_txout::ChangeSet; + // create store + let temp_dir = tempfile::tempdir().expect("must create tempdir"); + let file_path = temp_dir.path().join(file_name); + let mut store = create_store(&file_path).expect("store should get created"); + + // initialize store + let changeset = initialize(&mut store).expect("should initialize and load empty changeset"); + assert_eq!(changeset, ChangeSet::default()); + + // create changeset + let descriptor_ids = crate::utils::DESCRIPTORS.map(|d| parse_descriptor(d).0.descriptor_id()); + let descs = crate::utils::DESCRIPTORS.map(|desc| parse_descriptor(desc).0); + + let spk_cache: BTreeMap> = [ + ( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 0..=125).collect(), + ), + ( + descriptor_ids[1], + SpkIterator::new_with_range(&descs[0], 0..=25).collect(), + ), + ] + .into(); + + let changeset = ChangeSet { + spk_cache: spk_cache.clone(), + ..ChangeSet::default() + }; + + // persist and load + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read = initialize(&mut store).expect("should load persisted changeset"); + assert_eq!(changeset_read.spk_cache, spk_cache); + + // create another changeset + let spk_cache_new: BTreeMap> = [( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 126..=150).collect(), + )] + .into(); + + let changeset = ChangeSet { + spk_cache: spk_cache_new, + ..ChangeSet::default() + }; + + // persist, load and check if same as merged + persist(&mut store, &changeset).expect("should persist changeset"); + let changeset_read_new = initialize(&mut store).expect("should load persisted changeset"); + let spk_cache: BTreeMap> = [ + ( + descriptor_ids[0], + SpkIterator::new_with_range(&descs[0], 0..=150).collect(), + ), + ( + descriptor_ids[1], + SpkIterator::new_with_range(&descs[0], 0..=25).collect(), + ), + ] + .into(); + assert_eq!(changeset_read_new.spk_cache, spk_cache); +} diff --git a/crates/testenv/src/utils.rs b/crates/testenv/src/utils.rs index 93ca1f217..75f1d40d2 100644 --- a/crates/testenv/src/utils.rs +++ b/crates/testenv/src/utils.rs @@ -1,4 +1,10 @@ -use bdk_chain::bitcoin; +use bdk_chain::bitcoin::{ + self, absolute, transaction, Address, Amount, OutPoint, Transaction, TxIn, TxOut, Txid, +}; +use core::str::FromStr; + +#[cfg(feature = "default")] +use bdk_chain::miniscript::{descriptor::KeyMap, Descriptor, DescriptorPublicKey}; #[allow(unused_macros)] #[macro_export] @@ -77,6 +83,71 @@ pub fn new_tx(lt: u32) -> bitcoin::Transaction { } } +/// Utility function to create a [`TxOut`] given amount (in satoshis) and address. +pub fn create_txout(sats: u64, addr: &str) -> TxOut { + TxOut { + value: Amount::from_sat(sats), + script_pubkey: Address::from_str(addr) + .unwrap() + .assume_checked() + .script_pubkey(), + } +} + +/// Utility function to create a transaction given txids, vouts of inputs and amounts (in satoshis), +/// addresses of outputs. +/// +/// The locktime should be in the form given to `OP_CHEKCLOCKTIMEVERIFY`. +pub fn create_test_tx( + txids: impl IntoIterator, + vouts: impl IntoIterator, + amounts: impl IntoIterator, + addrs: impl IntoIterator, + version: u32, + locktime: u32, +) -> Transaction { + let input_vec = core::iter::zip(txids, vouts) + .map(|(txid, vout)| TxIn { + previous_output: OutPoint::new(txid, vout), + ..TxIn::default() + }) + .collect(); + let output_vec = core::iter::zip(amounts, addrs) + .map(|(amount, addr)| create_txout(amount, addr)) + .collect(); + let version = transaction::Version::non_standard(version as i32); + assert!(version.is_standard()); + let lock_time = absolute::LockTime::from_consensus(locktime); + assert_eq!(lock_time.to_consensus_u32(), locktime); + Transaction { + version, + lock_time, + input: input_vec, + output: output_vec, + } +} + +/// Generates `script_pubkey` corresponding to `index` on keychain of `descriptor`. +#[cfg(feature = "default")] +pub fn spk_at_index( + descriptor: &Descriptor, + index: u32, +) -> bdk_chain::bitcoin::ScriptBuf { + use bdk_chain::bitcoin::key::Secp256k1; + descriptor + .derived_descriptor(&Secp256k1::verification_only(), index) + .expect("must derive") + .script_pubkey() +} + +/// Parses a descriptor string. +#[cfg(feature = "default")] +pub fn parse_descriptor(descriptor: &str) -> (Descriptor, KeyMap) { + use bdk_chain::bitcoin::key::Secp256k1; + let secp = Secp256k1::signing_only(); + Descriptor::::parse_descriptor(&secp, descriptor).unwrap() +} + #[allow(unused)] pub const DESCRIPTORS: [&str; 7] = [ "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)",