From f8692b8d439d12b70fa6baa00a84da990ace8f0b Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 6 Jun 2023 12:22:17 +0200 Subject: [PATCH 01/10] Add previously ommitted `block_in_place` While it's not strictly necessary for us internally, it's nice to add to allow the Node itself to be used in tokio context. --- src/lib.rs | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 3fd358fab..4f5d1c98c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -309,22 +309,25 @@ impl Node { // Block to ensure we update our fee rate cache once on startup let wallet = Arc::clone(&self.wallet); let sync_logger = Arc::clone(&self.logger); - runtime.block_on(async move { - let now = Instant::now(); - match wallet.update_fee_estimates().await { - Ok(()) => { - log_info!( - sync_logger, - "Initial fee rate cache update finished in {}ms.", - now.elapsed().as_millis() - ); - Ok(()) - } - Err(e) => { - log_error!(sync_logger, "Initial fee rate cache update failed: {}", e,); - Err(e) + let runtime_ref = &runtime; + tokio::task::block_in_place(move || { + runtime_ref.block_on(async move { + let now = Instant::now(); + match wallet.update_fee_estimates().await { + Ok(()) => { + log_info!( + sync_logger, + "Initial fee rate cache update finished in {}ms.", + now.elapsed().as_millis() + ); + Ok(()) + } + Err(e) => { + log_error!(sync_logger, "Initial fee rate cache update failed: {}", e,); + Err(e) + } } - } + }) })?; // Setup wallet sync From 879eeee872f1a513a5475cc6b60ed43460e1f066 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 13 Jun 2023 15:30:59 +0200 Subject: [PATCH 02/10] Let `Builder` emit `BuildError` rather than panic --- README.md | 2 +- bindings/ldk_node.udl | 12 ++++ src/builder.rs | 115 ++++++++++++++++++++++++----------- src/lib.rs | 5 +- src/test/functional_tests.rs | 28 ++++----- 5 files changed, 108 insertions(+), 54 deletions(-) diff --git a/README.md b/README.md index 181b70e24..d72c29bae 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ fn main() { builder.set_esplora_server("https://blockstream.info/testnet/api".to_string()); builder.set_gossip_source_rgs("https://rapidsync.lightningdevkit.org/testnet/snapshot".to_string()); - let node = builder.build(); + let node = builder.build().unwrap(); node.start().unwrap(); diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index c46bf4ace..05893fa29 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -19,6 +19,7 @@ interface Builder { [Name=from_config] constructor(Config config); void set_entropy_seed_path(string seed_path); + [Throws=BuildError] void set_entropy_seed_bytes(sequence seed_bytes); void set_entropy_bip39_mnemonic(Mnemonic mnemonic, string? passphrase); void set_esplora_server(string esplora_server_url); @@ -27,6 +28,7 @@ interface Builder { void set_storage_dir_path(string storage_dir_path); void set_network(Network network); void set_listening_address(NetAddress listening_address); + [Throws=BuildError] LDKNode build(); }; @@ -115,6 +117,16 @@ enum NodeError { "InsufficientFunds", }; +[Error] +enum BuildError { + "InvalidSeedBytes", + "InvalidSystemTime", + "ReadFailed", + "WriteFailed", + "StoragePathAccessFailed", + "WalletSetupFailed", +}; + [Enum] interface Event { PaymentSuccessful( PaymentHash payment_hash ); diff --git a/src/builder.rs b/src/builder.rs index 61ae17233..3cb8d3293 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -44,6 +44,7 @@ use bitcoin::BlockHash; use std::convert::TryInto; use std::default::Default; +use std::fmt; use std::fs; use std::sync::{Arc, Mutex, RwLock}; use std::time::SystemTime; @@ -66,6 +67,42 @@ enum GossipSourceConfig { RapidGossipSync(String), } +/// An error encountered during building a [`Node`]. +/// +/// [`Node`]: crate::Node +#[derive(Debug, Clone)] +pub enum BuildError { + /// The given seed bytes are invalid, e.g, are of invalid length. + InvalidSeedBytes, + /// The current system time is invalid, clocks might have gone backwards. + InvalidSystemTime, + /// We failed to read data from the [`KVStore`]. + ReadFailed, + /// We failed to write data to the [`KVStore`]. + WriteFailed, + /// We failed to access the given `storage_dir_path`. + StoragePathAccessFailed, + /// We failed to setup the onchain wallet. + WalletSetupFailed, +} + +impl fmt::Display for BuildError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Self::InvalidSeedBytes => write!(f, "Given seed bytes are invalid."), + Self::InvalidSystemTime => { + write!(f, "System time is invalid. Clocks might have gone back in time.") + } + Self::ReadFailed => write!(f, "Failed to read from store."), + Self::WriteFailed => write!(f, "Failed to write to store."), + Self::StoragePathAccessFailed => write!(f, "Failed to access the given storage path."), + Self::WalletSetupFailed => write!(f, "Failed to setup onchain wallet."), + } + } +} + +impl std::error::Error for BuildError {} + /// A builder for an [`Node`] instance, allowing to set some configuration and module choices from /// the getgo. /// @@ -112,14 +149,14 @@ impl NodeBuilder { /// Configures the [`Node`] instance to source its wallet entropy from the given 64 seed bytes. /// /// **Note:** Panics if the length of the given `seed_bytes` differs from 64. - pub fn set_entropy_seed_bytes(&mut self, seed_bytes: Vec) -> &mut Self { + pub fn set_entropy_seed_bytes(&mut self, seed_bytes: Vec) -> Result<&mut Self, BuildError> { if seed_bytes.len() != WALLET_KEYS_SEED_LEN { - panic!("Failed to set seed due to invalid length."); + return Err(BuildError::InvalidSeedBytes); } let mut bytes = [0u8; WALLET_KEYS_SEED_LEN]; bytes.copy_from_slice(&seed_bytes); self.entropy_source_config = Some(EntropySourceConfig::SeedBytes(bytes)); - self + Ok(self) } /// Configures the [`Node`] instance to source its wallet entropy from a [BIP 39] mnemonic. @@ -179,18 +216,20 @@ impl NodeBuilder { /// Builds a [`Node`] instance with a [`SqliteStore`] backend and according to the options /// previously configured. - pub fn build(&self) -> Node { + pub fn build(&self) -> Result, BuildError> { let storage_dir_path = self.config.storage_dir_path.clone(); - fs::create_dir_all(storage_dir_path.clone()).expect("Failed to create LDK data directory"); + fs::create_dir_all(storage_dir_path.clone()) + .map_err(|_| BuildError::StoragePathAccessFailed)?; let kv_store = Arc::new(SqliteStore::new(storage_dir_path.into())); self.build_with_store(kv_store) } /// Builds a [`Node`] instance with a [`FilesystemStore`] backend and according to the options /// previously configured. - pub fn build_with_fs_store(&self) -> Node { + pub fn build_with_fs_store(&self) -> Result, BuildError> { let storage_dir_path = self.config.storage_dir_path.clone(); - fs::create_dir_all(storage_dir_path.clone()).expect("Failed to create LDK data directory"); + fs::create_dir_all(storage_dir_path.clone()) + .map_err(|_| BuildError::StoragePathAccessFailed)?; let kv_store = Arc::new(FilesystemStore::new(storage_dir_path.into())); self.build_with_store(kv_store) } @@ -198,7 +237,7 @@ impl NodeBuilder { /// Builds a [`Node`] instance according to the options previously configured. pub fn build_with_store( &self, kv_store: Arc, - ) -> Node { + ) -> Result, BuildError> { let config = Arc::new(self.config.clone()); let runtime = Arc::new(RwLock::new(None)); @@ -251,8 +290,8 @@ impl ArcedNodeBuilder { /// Configures the [`Node`] instance to source its wallet entropy from the given 64 seed bytes. /// /// **Note:** Panics if the length of the given `seed_bytes` differs from 64. - pub fn set_entropy_seed_bytes(&self, seed_bytes: Vec) { - self.inner.write().unwrap().set_entropy_seed_bytes(seed_bytes); + pub fn set_entropy_seed_bytes(&self, seed_bytes: Vec) -> Result<(), BuildError> { + self.inner.write().unwrap().set_entropy_seed_bytes(seed_bytes).map(|_| ()) } /// Configures the [`Node`] instance to source its wallet entropy from a [BIP 39] mnemonic. @@ -301,21 +340,21 @@ impl ArcedNodeBuilder { /// Builds a [`Node`] instance with a [`SqliteStore`] backend and according to the options /// previously configured. - pub fn build(&self) -> Arc> { - Arc::new(self.inner.read().unwrap().build()) + pub fn build(&self) -> Result>, BuildError> { + self.inner.read().unwrap().build().map(Arc::new) } /// Builds a [`Node`] instance with a [`FilesystemStore`] backend and according to the options /// previously configured. - pub fn build_with_fs_store(&self) -> Arc> { - Arc::new(self.inner.read().unwrap().build_with_fs_store()) + pub fn build_with_fs_store(&self) -> Result>, BuildError> { + self.inner.read().unwrap().build_with_fs_store().map(Arc::new) } /// Builds a [`Node`] instance according to the options previously configured. pub fn build_with_store( &self, kv_store: Arc, - ) -> Arc> { - Arc::new(self.inner.read().unwrap().build_with_store(kv_store)) + ) -> Result>, BuildError> { + self.inner.read().unwrap().build_with_store(kv_store).map(Arc::new) } } @@ -325,12 +364,12 @@ fn build_with_store_internal( chain_data_source_config: Option<&ChainDataSourceConfig>, gossip_source_config: Option<&GossipSourceConfig>, kv_store: Arc, runtime: Arc>>, -) -> Node { +) -> Result, BuildError> { let ldk_data_dir = format!("{}/ldk", config.storage_dir_path); - fs::create_dir_all(ldk_data_dir.clone()).expect("Failed to create LDK data directory"); + fs::create_dir_all(ldk_data_dir.clone()).map_err(|_| BuildError::StoragePathAccessFailed)?; let bdk_data_dir = format!("{}/bdk", config.storage_dir_path); - fs::create_dir_all(bdk_data_dir.clone()).expect("Failed to create BDK data directory"); + fs::create_dir_all(bdk_data_dir.clone()).map_err(|_| BuildError::StoragePathAccessFailed)?; // Initialize the Logger let log_file_path = format!( @@ -358,7 +397,7 @@ fn build_with_store_internal( }; let xprv = bitcoin::util::bip32::ExtendedPrivKey::new_master(config.network, &seed_bytes) - .expect("Failed to read wallet master key"); + .map_err(|_| BuildError::InvalidSeedBytes)?; let wallet_name = bdk::wallet::wallet_name_from_descriptor( Bip84(xprv, bdk::KeychainKind::External), @@ -366,7 +405,7 @@ fn build_with_store_internal( config.network, &Secp256k1::new(), ) - .expect("Failed to derive on-chain wallet name"); + .map_err(|_| BuildError::WalletSetupFailed)?; let database_path = format!("{}/bdk_wallet_{}.sqlite", config.storage_dir_path, wallet_name); let database = SqliteDatabase::new(database_path); @@ -377,7 +416,7 @@ fn build_with_store_internal( config.network, database, ) - .expect("Failed to set up on-chain wallet"); + .map_err(|_| BuildError::WalletSetupFailed)?; let (blockchain, tx_sync) = match chain_data_source_config { Some(ChainDataSourceConfig::Esplora(server_url)) => { @@ -413,7 +452,7 @@ fn build_with_store_internal( // Initialize the KeysManager let cur_time = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) - .expect("System time error: Clock may have gone backwards"); + .map_err(|_| BuildError::InvalidSystemTime)?; let ldk_seed_bytes: [u8; 32] = xprv.private_key.secret_bytes(); let keys_manager = Arc::new(KeysManager::new( &ldk_seed_bytes, @@ -430,7 +469,7 @@ fn build_with_store_internal( if e.kind() == std::io::ErrorKind::NotFound { Arc::new(NetworkGraph::new(config.network, Arc::clone(&logger))) } else { - panic!("Failed to read network graph: {}", e.to_string()); + return Err(BuildError::ReadFailed); } } }; @@ -450,7 +489,7 @@ fn build_with_store_internal( Arc::clone(&logger), ))) } else { - panic!("Failed to read scorer: {}", e.to_string()); + return Err(BuildError::ReadFailed); } } }; @@ -474,7 +513,7 @@ fn build_with_store_internal( Vec::new() } else { log_error!(logger, "Failed to read channel monitors: {}", e.to_string()); - panic!("Failed to read channel monitors: {}", e.to_string()); + return Err(BuildError::ReadFailed); } } }; @@ -507,8 +546,10 @@ fn build_with_store_internal( channel_monitor_references, ); let (_hash, channel_manager) = - <(BlockHash, ChannelManager)>::read(&mut reader, read_args) - .expect("Failed to read channel manager from store"); + <(BlockHash, ChannelManager)>::read(&mut reader, read_args).map_err(|e| { + log_error!(logger, "Failed to read channel manager from KVStore: {}", e); + BuildError::ReadFailed + })?; channel_manager } else { // We're starting a fresh node. @@ -553,7 +594,7 @@ fn build_with_store_internal( let cur_time = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) - .expect("System time error: Clock may have gone backwards"); + .map_err(|_| BuildError::InvalidSystemTime)?; // Initialize the GossipSource // Use the configured gossip source, if the user set one, otherwise default to P2PNetwork. @@ -570,7 +611,7 @@ fn build_with_store_internal( Arc::clone(&kv_store), Arc::clone(&logger), ) - .expect("Persistence failed"); + .map_err(|_| BuildError::WriteFailed)?; p2p_source } GossipSourceConfig::RapidGossipSync(rgs_server) => { @@ -608,7 +649,7 @@ fn build_with_store_internal( let peer_manager = Arc::new(PeerManager::new( msg_handler, - cur_time.as_secs().try_into().expect("System time error"), + cur_time.as_secs().try_into().map_err(|_| BuildError::InvalidSystemTime)?, &ephemeral_bytes, Arc::clone(&logger), IgnoringMessageHandler {}, @@ -620,8 +661,8 @@ fn build_with_store_internal( Ok(payments) => { Arc::new(PaymentStore::new(payments, Arc::clone(&kv_store), Arc::clone(&logger))) } - Err(e) => { - panic!("Failed to read payment information: {}", e.to_string()); + Err(_) => { + return Err(BuildError::ReadFailed); } }; @@ -632,7 +673,7 @@ fn build_with_store_internal( if e.kind() == std::io::ErrorKind::NotFound { Arc::new(EventQueue::new(Arc::clone(&kv_store), Arc::clone(&logger))) } else { - panic!("Failed to read event queue: {}", e.to_string()); + return Err(BuildError::ReadFailed); } } }; @@ -643,14 +684,14 @@ fn build_with_store_internal( if e.kind() == std::io::ErrorKind::NotFound { Arc::new(PeerStore::new(Arc::clone(&kv_store), Arc::clone(&logger))) } else { - panic!("Failed to read peer store: {}", e.to_string()); + return Err(BuildError::ReadFailed); } } }; let (stop_sender, stop_receiver) = tokio::sync::watch::channel(()); - Node { + Ok(Node { runtime, stop_sender, stop_receiver, @@ -669,5 +710,5 @@ fn build_with_store_internal( scorer, peer_store, payment_store, - } + }) } diff --git a/src/lib.rs b/src/lib.rs index 4f5d1c98c..7e0495772 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -38,7 +38,7 @@ //! builder.set_esplora_server("https://blockstream.info/testnet/api".to_string()); //! builder.set_gossip_source_rgs("https://rapidsync.lightningdevkit.org/testnet/snapshot".to_string()); //! -//! let node = builder.build(); +//! let node = builder.build().unwrap(); //! //! node.start().unwrap(); //! @@ -113,6 +113,7 @@ use {bip39::Mnemonic, bitcoin::OutPoint, lightning::ln::PaymentSecret, uniffi_ty #[cfg(feature = "uniffi")] pub use builder::ArcedNodeBuilder as Builder; +pub use builder::BuildError; #[cfg(not(feature = "uniffi"))] pub use builder::NodeBuilder as Builder; @@ -1330,7 +1331,7 @@ impl Node { /// # config.network = Network::Regtest; /// # config.storage_dir_path = "/tmp/ldk_node_test/".to_string(); /// # let builder = Builder::from_config(config); - /// # let node = builder.build(); + /// # let node = builder.build().unwrap(); /// node.list_payments_with_filter(|p| p.direction == PaymentDirection::Outbound); /// ``` pub fn list_payments_with_filter bool>( diff --git a/src/test/functional_tests.rs b/src/test/functional_tests.rs index 5828d6114..806f28fd6 100644 --- a/src/test/functional_tests.rs +++ b/src/test/functional_tests.rs @@ -16,14 +16,14 @@ fn channel_full_cycle() { let config_a = random_config(); let mut builder_a = NodeBuilder::from_config(config_a); builder_a.set_esplora_server(esplora_url.clone()); - let node_a = builder_a.build(); + let node_a = builder_a.build().unwrap(); node_a.start().unwrap(); println!("\n== Node B =="); let config_b = random_config(); let mut builder_b = NodeBuilder::from_config(config_b); builder_b.set_esplora_server(esplora_url); - let node_b = builder_b.build(); + let node_b = builder_b.build().unwrap(); node_b.start().unwrap(); do_channel_full_cycle(node_a, node_b, &bitcoind, &electrsd, false); @@ -37,7 +37,7 @@ fn channel_full_cycle_0conf() { let config_a = random_config(); let mut builder_a = NodeBuilder::from_config(config_a); builder_a.set_esplora_server(esplora_url.clone()); - let node_a = builder_a.build(); + let node_a = builder_a.build().unwrap(); node_a.start().unwrap(); println!("\n== Node B =="); @@ -46,7 +46,7 @@ fn channel_full_cycle_0conf() { let mut builder_b = NodeBuilder::from_config(config_b); builder_b.set_esplora_server(esplora_url.clone()); - let node_b = builder_b.build(); + let node_b = builder_b.build().unwrap(); node_b.start().unwrap(); @@ -277,7 +277,7 @@ fn channel_open_fails_when_funds_insufficient() { let config_a = random_config(); let mut builder_a = NodeBuilder::from_config(config_a); builder_a.set_esplora_server(esplora_url.clone()); - let node_a = builder_a.build(); + let node_a = builder_a.build().unwrap(); node_a.start().unwrap(); let addr_a = node_a.new_funding_address().unwrap(); @@ -285,7 +285,7 @@ fn channel_open_fails_when_funds_insufficient() { let config_b = random_config(); let mut builder_b = NodeBuilder::from_config(config_b); builder_b.set_esplora_server(esplora_url); - let node_b = builder_b.build(); + let node_b = builder_b.build().unwrap(); node_b.start().unwrap(); let addr_b = node_b.new_funding_address().unwrap(); @@ -322,7 +322,7 @@ fn connect_to_public_testnet_esplora() { config.network = bitcoin::Network::Testnet; let mut builder = NodeBuilder::from_config(config); builder.set_esplora_server("https://blockstream.info/testnet/api".to_string()); - let node = builder.build(); + let node = builder.build().unwrap(); node.start().unwrap(); node.sync_wallets().unwrap(); node.stop().unwrap(); @@ -335,7 +335,7 @@ fn start_stop_reinit() { let config = random_config(); let mut builder = NodeBuilder::from_config(config.clone()); builder.set_esplora_server(esplora_url.clone()); - let node = builder.build(); + let node = builder.build().unwrap(); let expected_node_id = node.node_id(); let funding_address = node.new_funding_address().unwrap(); @@ -365,7 +365,7 @@ fn start_stop_reinit() { let mut new_builder = NodeBuilder::from_config(config); new_builder.set_esplora_server(esplora_url); - let reinitialized_node = builder.build(); + let reinitialized_node = builder.build().unwrap(); assert_eq!(reinitialized_node.node_id(), expected_node_id); reinitialized_node.start().unwrap(); @@ -391,7 +391,7 @@ fn start_stop_reinit_fs_store() { let config = random_config(); let mut builder = NodeBuilder::from_config(config.clone()); builder.set_esplora_server(esplora_url.clone()); - let node = builder.build_with_fs_store(); + let node = builder.build_with_fs_store().unwrap(); let expected_node_id = node.node_id(); let funding_address = node.new_funding_address().unwrap(); @@ -418,7 +418,7 @@ fn start_stop_reinit_fs_store() { let mut new_builder = NodeBuilder::from_config(config); new_builder.set_esplora_server(esplora_url); - let reinitialized_node = builder.build_with_fs_store(); + let reinitialized_node = builder.build_with_fs_store().unwrap(); assert_eq!(reinitialized_node.node_id(), expected_node_id); reinitialized_node.start().unwrap(); @@ -445,14 +445,14 @@ fn onchain_spend_receive() { let config_a = random_config(); let mut builder_a = NodeBuilder::from_config(config_a); builder_a.set_esplora_server(esplora_url.clone()); - let node_a = builder_a.build(); + let node_a = builder_a.build().unwrap(); node_a.start().unwrap(); let addr_a = node_a.new_funding_address().unwrap(); let config_b = random_config(); let mut builder_b = NodeBuilder::from_config(config_b); builder_b.set_esplora_server(esplora_url); - let node_b = builder_b.build(); + let node_b = builder_b.build().unwrap(); node_b.start().unwrap(); let addr_b = node_b.new_funding_address().unwrap(); @@ -500,7 +500,7 @@ fn sign_verify_msg() { let config = random_config(); let mut builder = NodeBuilder::from_config(config.clone()); builder.set_esplora_server(esplora_url.clone()); - let node = builder.build(); + let node = builder.build().unwrap(); node.start().unwrap(); From 6912d0b60895c499e02936d1ca2b65544343e554 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 14 Jun 2023 10:45:55 +0200 Subject: [PATCH 03/10] Don't panic on Logger setup failure --- bindings/ldk_node.udl | 1 + src/builder.rs | 8 +++++++- src/logger.rs | 14 +++++--------- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 05893fa29..4bafcf571 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -125,6 +125,7 @@ enum BuildError { "WriteFailed", "StoragePathAccessFailed", "WalletSetupFailed", + "LoggerSetupFailed", }; [Enum] diff --git a/src/builder.rs b/src/builder.rs index 3cb8d3293..259f5eb46 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -84,6 +84,8 @@ pub enum BuildError { StoragePathAccessFailed, /// We failed to setup the onchain wallet. WalletSetupFailed, + /// We failed to setup the logger. + LoggerSetupFailed, } impl fmt::Display for BuildError { @@ -97,6 +99,7 @@ impl fmt::Display for BuildError { Self::WriteFailed => write!(f, "Failed to write to store."), Self::StoragePathAccessFailed => write!(f, "Failed to access the given storage path."), Self::WalletSetupFailed => write!(f, "Failed to setup onchain wallet."), + Self::LoggerSetupFailed => write!(f, "Failed to setup the logger."), } } } @@ -377,7 +380,10 @@ fn build_with_store_internal( config.storage_dir_path, chrono::offset::Local::now().format("%Y_%m_%d") ); - let logger = Arc::new(FilesystemLogger::new(log_file_path.clone(), config.log_level)); + let logger = Arc::new( + FilesystemLogger::new(log_file_path.clone(), config.log_level) + .map_err(|_| BuildError::LoggerSetupFailed)?, + ); // Initialize the on-chain wallet and chain access let seed_bytes = match entropy_source_config { diff --git a/src/logger.rs b/src/logger.rs index 0c4df2198..3da9e7e54 100644 --- a/src/logger.rs +++ b/src/logger.rs @@ -16,7 +16,7 @@ pub(crate) struct FilesystemLogger { } impl FilesystemLogger { - pub(crate) fn new(file_path: String, level: Level) -> Self { + pub(crate) fn new(file_path: String, level: Level) -> Result { if let Some(parent_dir) = Path::new(&file_path).parent() { fs::create_dir_all(parent_dir).expect("Failed to create log parent directory"); @@ -25,21 +25,17 @@ impl FilesystemLogger { .create(true) .append(true) .open(file_path.clone()) - .expect("Failed to open log file"); + .map_err(|_| ())?; // Create a symlink to the current log file, with prior cleanup let log_file_symlink = parent_dir.join("ldk_node_latest.log"); if log_file_symlink.as_path().exists() && log_file_symlink.as_path().is_symlink() { - fs::remove_file(&log_file_symlink) - .expect("Failed to remove an old symlink for the log file"); + fs::remove_file(&log_file_symlink).map_err(|_| ())?; } - symlink(&file_path, &log_file_symlink).expect(&format!( - "Failed to create symlink for the log file: {:?}", - log_file_symlink - )); + symlink(&file_path, &log_file_symlink).map_err(|_| ())?; } - Self { file_path, level } + Ok(Self { file_path, level }) } } impl Logger for FilesystemLogger { From 6f6ddb374a2f81246842b2c78061a08cff0e8095 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 14 Jun 2023 11:49:16 +0200 Subject: [PATCH 04/10] Don't panic on reading invalid seed file --- bindings/ldk_node.udl | 1 + src/builder.rs | 11 ++++++--- src/io/utils.rs | 52 +++++++++++++++++++++++++++++++++---------- 3 files changed, 49 insertions(+), 15 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 4bafcf571..6be0d54c5 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -120,6 +120,7 @@ enum NodeError { [Error] enum BuildError { "InvalidSeedBytes", + "InvalidSeedFile", "InvalidSystemTime", "ReadFailed", "WriteFailed", diff --git a/src/builder.rs b/src/builder.rs index 259f5eb46..e9f9360a1 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -72,8 +72,10 @@ enum GossipSourceConfig { /// [`Node`]: crate::Node #[derive(Debug, Clone)] pub enum BuildError { - /// The given seed bytes are invalid, e.g, are of invalid length. + /// The given seed bytes are invalid, e.g., have invalid length. InvalidSeedBytes, + /// The given seed file is invalid, e.g., has invalid length, or could not be read. + InvalidSeedFile, /// The current system time is invalid, clocks might have gone backwards. InvalidSystemTime, /// We failed to read data from the [`KVStore`]. @@ -92,6 +94,7 @@ impl fmt::Display for BuildError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Self::InvalidSeedBytes => write!(f, "Given seed bytes are invalid."), + Self::InvalidSeedFile => write!(f, "Given seed file is invalid or could not be read."), Self::InvalidSystemTime => { write!(f, "System time is invalid. Clocks might have gone back in time.") } @@ -389,7 +392,8 @@ fn build_with_store_internal( let seed_bytes = match entropy_source_config { Some(EntropySourceConfig::SeedBytes(bytes)) => bytes.clone(), Some(EntropySourceConfig::SeedFile(seed_path)) => { - io::utils::read_or_generate_seed_file(seed_path) + io::utils::read_or_generate_seed_file(seed_path, Arc::clone(&logger)) + .map_err(|_| BuildError::InvalidSeedFile)? } Some(EntropySourceConfig::Bip39Mnemonic { mnemonic, passphrase }) => match passphrase { Some(passphrase) => mnemonic.to_seed(passphrase), @@ -398,7 +402,8 @@ fn build_with_store_internal( None => { // Default to read or generate from the default location generate a seed file. let seed_path = format!("{}/keys_seed", config.storage_dir_path); - io::utils::read_or_generate_seed_file(&seed_path) + io::utils::read_or_generate_seed_file(&seed_path, Arc::clone(&logger)) + .map_err(|_| BuildError::InvalidSeedFile)? } }; diff --git a/src/io/utils.rs b/src/io/utils.rs index f141648c3..640df5d0b 100644 --- a/src/io/utils.rs +++ b/src/io/utils.rs @@ -39,25 +39,53 @@ pub fn generate_entropy_mnemonic() -> Mnemonic { Mnemonic::from_entropy(&entropy).unwrap() } -pub(crate) fn read_or_generate_seed_file(keys_seed_path: &str) -> [u8; WALLET_KEYS_SEED_LEN] { +pub(crate) fn read_or_generate_seed_file( + keys_seed_path: &str, logger: L, +) -> std::io::Result<[u8; WALLET_KEYS_SEED_LEN]> +where + L::Target: Logger, +{ if Path::new(&keys_seed_path).exists() { - let seed = fs::read(keys_seed_path).expect("Failed to read keys seed file"); - assert_eq!( - seed.len(), - WALLET_KEYS_SEED_LEN, - "Failed to read keys seed file: unexpected length" - ); + let seed = fs::read(keys_seed_path).map_err(|e| { + log_error!(logger, "Failed to read keys seed file: {}", keys_seed_path); + e + })?; + + if seed.len() != WALLET_KEYS_SEED_LEN { + log_error!( + logger, + "Failed to read keys seed file due to invalid length: {}", + keys_seed_path + ); + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Failed to read keys seed file due to invalid length", + )); + } + let mut key = [0; WALLET_KEYS_SEED_LEN]; key.copy_from_slice(&seed); - key + Ok(key) } else { let mut key = [0; WALLET_KEYS_SEED_LEN]; thread_rng().fill_bytes(&mut key); - let mut f = fs::File::create(keys_seed_path).expect("Failed to create keys seed file"); - f.write_all(&key).expect("Failed to write node keys seed to disk"); - f.sync_all().expect("Failed to sync node keys seed to disk"); - key + let mut f = fs::File::create(keys_seed_path).map_err(|e| { + log_error!(logger, "Failed to create keys seed file: {}", keys_seed_path); + e + })?; + + f.write_all(&key).map_err(|e| { + log_error!(logger, "Failed to write node keys seed to disk: {}", keys_seed_path); + e + })?; + + f.sync_all().map_err(|e| { + log_error!(logger, "Failed to sync node keys seed to disk: {}", keys_seed_path); + e + })?; + + Ok(key) } } From ff8875eb98f1bd3ca253da7044b57c7361e0ad05 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 14 Jun 2023 11:57:53 +0200 Subject: [PATCH 05/10] Don't panic on failure to resolve listening addr --- src/lib.rs | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 7e0495772..99d718c8f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -483,9 +483,23 @@ impl Node { let bind_addr = listening_address .to_socket_addrs() - .expect("Unable to resolve listing address") + .map_err(|_| { + log_error!( + self.logger, + "Unable to resolve listing address: {:?}", + listening_address + ); + Error::InvalidNetAddress + })? .next() - .expect("Unable to resolve listing address"); + .ok_or_else(|| { + log_error!( + self.logger, + "Unable to resolve listing address: {:?}", + listening_address + ); + Error::InvalidNetAddress + })?; runtime.spawn(async move { let listener = From 4d6f7b0b4563c1071bccd52b49223cbd724cad18 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 14 Jun 2023 12:10:06 +0200 Subject: [PATCH 06/10] Improve logging in event handler While we cannot get rid of the panics here, we can make sure the errors are logged before we (explicitly) panic. --- src/event.rs | 72 ++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 53 insertions(+), 19 deletions(-) diff --git a/src/event.rs b/src/event.rs index 78d73d063..b993f7d82 100644 --- a/src/event.rs +++ b/src/event.rs @@ -311,9 +311,12 @@ where &temporary_channel_id, &counterparty_node_id, ) - .expect( - "Failed to force close channel after funding generation failed", - ); + .unwrap_or_else(|e| { + log_error!(self.logger, "Failed to force close channel after funding generation failed: {:?}", e); + panic!( + "Failed to force close channel after funding generation failed" + ); + }); } } } @@ -341,7 +344,10 @@ where status: Some(PaymentStatus::Failed), ..PaymentDetailsUpdate::new(payment_hash) }; - self.payment_store.update(&update).expect("Failed to access payment store"); + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); return; } } @@ -379,7 +385,10 @@ where status: Some(PaymentStatus::Failed), ..PaymentDetailsUpdate::new(payment_hash) }; - self.payment_store.update(&update).expect("Failed to access payment store"); + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); } } LdkEvent::PaymentClaimed { @@ -459,15 +468,19 @@ where self.event_queue .add_event(Event::PaymentReceived { payment_hash, amount_msat }) - .expect("Failed to push to event queue"); + .unwrap_or_else(|e| { + log_error!(self.logger, "Failed to push to event queue: {}", e); + panic!("Failed to push to event queue"); + }); } LdkEvent::PaymentSent { payment_preimage, payment_hash, fee_paid_msat, .. } => { if let Some(mut payment) = self.payment_store.get(&payment_hash) { payment.preimage = Some(payment_preimage); payment.status = PaymentStatus::Succeeded; - self.payment_store - .insert(payment.clone()) - .expect("Failed to access payment store"); + self.payment_store.insert(payment.clone()).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); log_info!( self.logger, "Successfully sent payment of {}msat{} from \ @@ -484,7 +497,10 @@ where } self.event_queue .add_event(Event::PaymentSuccessful { payment_hash }) - .expect("Failed to push to event queue"); + .unwrap_or_else(|e| { + log_error!(self.logger, "Failed to push to event queue: {}", e); + panic!("Failed to push to event queue"); + }); } LdkEvent::PaymentFailed { payment_hash, .. } => { log_info!( @@ -497,10 +513,16 @@ where status: Some(PaymentStatus::Failed), ..PaymentDetailsUpdate::new(payment_hash) }; - self.payment_store.update(&update).expect("Failed to access payment store"); - self.event_queue - .add_event(Event::PaymentFailed { payment_hash }) - .expect("Failed to push to event queue"); + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); + self.event_queue.add_event(Event::PaymentFailed { payment_hash }).unwrap_or_else( + |e| { + log_error!(self.logger, "Failed to push to event queue: {}", e); + panic!("Failed to push to event queue"); + }, + ); } LdkEvent::PaymentPathSuccessful { .. } => {} @@ -526,8 +548,11 @@ where } LdkEvent::SpendableOutputs { outputs } => { // TODO: We should eventually remember the outputs and supply them to the wallet's coin selection, once BDK allows us to do so. - let destination_address = - self.wallet.get_new_address().expect("Failed to get destination address"); + let destination_address = self.wallet.get_new_address().unwrap_or_else(|e| { + log_error!(self.logger, "Failed to get destination address: {}", e); + panic!("Failed to get destination address"); + }); + let output_descriptors = &outputs.iter().collect::>(); let tx_feerate = self.wallet.get_est_sat_per_1000_weight(ConfirmationTarget::Normal); @@ -676,7 +701,10 @@ where counterparty_node_id, funding_txo, }) - .expect("Failed to push to event queue"); + .unwrap_or_else(|e| { + log_error!(self.logger, "Failed to push to event queue: {}", e); + panic!("Failed to push to event queue"); + }); } LdkEvent::ChannelReady { channel_id, user_channel_id, counterparty_node_id, .. @@ -692,7 +720,10 @@ where channel_id: ChannelId(channel_id), user_channel_id: UserChannelId(user_channel_id), }) - .expect("Failed to push to event queue"); + .unwrap_or_else(|e| { + log_error!(self.logger, "Failed to push to event queue: {}", e); + panic!("Failed to push to event queue"); + }); } LdkEvent::ChannelClosed { channel_id, reason, user_channel_id } => { log_info!( @@ -706,7 +737,10 @@ where channel_id: ChannelId(channel_id), user_channel_id: UserChannelId(user_channel_id), }) - .expect("Failed to push to event queue"); + .unwrap_or_else(|e| { + log_error!(self.logger, "Failed to push to event queue: {}", e); + panic!("Failed to push to event queue"); + }); } LdkEvent::DiscardFunding { .. } => {} LdkEvent::HTLCIntercepted { .. } => {} From e0d7360d876ecb6e000f2d35c0495b2bc91fdcab Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 14 Jun 2023 12:23:12 +0200 Subject: [PATCH 07/10] Have `Wallet` take `Deref` Logger Which we'll be using in the next commit. --- src/event.rs | 4 ++-- src/lib.rs | 2 +- src/types.rs | 23 ++++++++++++----------- src/wallet.rs | 39 +++++++++++++++++++++++++-------------- 4 files changed, 40 insertions(+), 28 deletions(-) diff --git a/src/event.rs b/src/event.rs index b993f7d82..9a7f8c609 100644 --- a/src/event.rs +++ b/src/event.rs @@ -223,7 +223,7 @@ pub(crate) struct EventHandler where L::Target: Logger, { - wallet: Arc>, + wallet: Arc>, event_queue: Arc>, channel_manager: Arc>, network_graph: Arc, @@ -239,7 +239,7 @@ where L::Target: Logger, { pub fn new( - wallet: Arc>, event_queue: Arc>, + wallet: Arc>, event_queue: Arc>, channel_manager: Arc>, network_graph: Arc, keys_manager: Arc, payment_store: Arc>, runtime: Arc>>, logger: L, config: Arc, diff --git a/src/lib.rs b/src/lib.rs index 99d718c8f..ce4c1582a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -273,7 +273,7 @@ pub struct Node { stop_sender: tokio::sync::watch::Sender<()>, stop_receiver: tokio::sync::watch::Receiver<()>, config: Arc, - wallet: Arc>, + wallet: Arc>>, tx_sync: Arc>>, event_queue: Arc>>, channel_manager: Arc>, diff --git a/src/types.rs b/src/types.rs index 6e0385106..b7c38b351 100644 --- a/src/types.rs +++ b/src/types.rs @@ -26,8 +26,8 @@ use std::sync::{Arc, Mutex}; pub(crate) type ChainMonitor = chainmonitor::ChainMonitor< InMemorySigner, Arc>>, - Arc>, - Arc>, + Arc>>, + Arc>>, Arc, Arc, >; @@ -39,21 +39,22 @@ pub(crate) type PeerManager = lightning::ln::peer_handler::PeerManager< Arc, Arc, IgnoringMessageHandler, - Arc>, + Arc>>, >; pub(crate) type ChannelManager = lightning::ln::channelmanager::ChannelManager< Arc>, - Arc>, - Arc>, - Arc>, - Arc>, - Arc>, + Arc>>, + Arc>>, + Arc>>, + Arc>>, + Arc>>, Arc, Arc, >; -pub(crate) type KeysManager = WalletKeysManager; +pub(crate) type KeysManager = + WalletKeysManager>; pub(crate) type Router = DefaultRouter, Arc, Arc>>; @@ -80,8 +81,8 @@ pub(crate) type GossipSync = lightning_background_processor::GossipSync< >; pub(crate) type OnionMessenger = lightning::onion_message::OnionMessenger< - Arc>, - Arc>, + Arc>>, + Arc>>, Arc, IgnoringMessageHandler, >; diff --git a/src/wallet.rs b/src/wallet.rs index c6362486d..f9aa2e1ee 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -1,4 +1,4 @@ -use crate::logger::{log_error, log_info, log_trace, FilesystemLogger, Logger}; +use crate::logger::{log_error, log_info, log_trace, Logger}; use crate::Error; @@ -27,12 +27,14 @@ use bitcoin::secp256k1::{PublicKey, Scalar, Secp256k1, Signing}; use bitcoin::{Script, Transaction, TxOut, Txid}; use std::collections::HashMap; +use std::ops::Deref; use std::sync::{Arc, Condvar, Mutex, RwLock}; use std::time::Duration; -pub struct Wallet +pub struct Wallet where D: BatchDatabase, + L::Target: Logger, { // A BDK blockchain used for wallet sync. blockchain: EsploraBlockchain, @@ -42,16 +44,17 @@ where fee_rate_cache: RwLock>, runtime: Arc>>, sync_lock: (Mutex<()>, Condvar), - logger: Arc, + logger: L, } -impl Wallet +impl Wallet where D: BatchDatabase, + L::Target: Logger, { pub(crate) fn new( blockchain: EsploraBlockchain, wallet: bdk::Wallet, - runtime: Arc>>, logger: Arc, + runtime: Arc>>, logger: L, ) -> Self { let inner = Mutex::new(wallet); let fee_rate_cache = RwLock::new(HashMap::new()); @@ -286,9 +289,10 @@ where } } -impl FeeEstimator for Wallet +impl FeeEstimator for Wallet where D: BatchDatabase, + L::Target: Logger, { fn get_est_sat_per_1000_weight(&self, confirmation_target: ConfirmationTarget) -> u32 { (self.estimate_fee_rate(confirmation_target).fee_wu(1000) as u32) @@ -296,9 +300,10 @@ where } } -impl BroadcasterInterface for Wallet +impl BroadcasterInterface for Wallet where D: BatchDatabase, + L::Target: Logger, { fn broadcast_transaction(&self, tx: &Transaction) { let locked_runtime = self.runtime.read().unwrap(); @@ -325,24 +330,27 @@ where /// Similar to [`KeysManager`], but overrides the destination and shutdown scripts so they are /// directly spendable by the BDK wallet. -pub struct WalletKeysManager +pub struct WalletKeysManager where D: BatchDatabase, + L::Target: Logger, { inner: KeysManager, - wallet: Arc>, + wallet: Arc>, } -impl WalletKeysManager +impl WalletKeysManager where D: BatchDatabase, + L::Target: Logger, { /// Constructs a `WalletKeysManager` that overrides the destination and shutdown scripts. /// /// See [`KeysManager::new`] for more information on `seed`, `starting_time_secs`, and /// `starting_time_nanos`. pub fn new( - seed: &[u8; 32], starting_time_secs: u64, starting_time_nanos: u32, wallet: Arc>, + seed: &[u8; 32], starting_time_secs: u64, starting_time_nanos: u32, + wallet: Arc>, ) -> Self { let inner = KeysManager::new(seed, starting_time_secs, starting_time_nanos); Self { inner, wallet } @@ -378,9 +386,10 @@ where } } -impl NodeSigner for WalletKeysManager +impl NodeSigner for WalletKeysManager where D: BatchDatabase, + L::Target: Logger, { fn get_node_id(&self, recipient: Recipient) -> Result { self.inner.get_node_id(recipient) @@ -407,18 +416,20 @@ where } } -impl EntropySource for WalletKeysManager +impl EntropySource for WalletKeysManager where D: BatchDatabase, + L::Target: Logger, { fn get_secure_random_bytes(&self) -> [u8; 32] { self.inner.get_secure_random_bytes() } } -impl SignerProvider for WalletKeysManager +impl SignerProvider for WalletKeysManager where D: BatchDatabase, + L::Target: Logger, { type Signer = InMemorySigner; From 4172f9363a9f8b0068cb727fc3d53053d2cfd6d9 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 14 Jun 2023 12:29:28 +0200 Subject: [PATCH 08/10] Improve logging in `WalletKeysManager` While we cannot get rid of the panics here, we can make sure the errors are logged before we (explicitly) panic. --- src/builder.rs | 1 + src/wallet.rs | 34 +++++++++++++++++++++++++--------- 2 files changed, 26 insertions(+), 9 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index e9f9360a1..dddc6d064 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -470,6 +470,7 @@ fn build_with_store_internal( cur_time.as_secs(), cur_time.subsec_nanos(), Arc::clone(&wallet), + Arc::clone(&logger), )); // Initialize the network graph, scorer, and router diff --git a/src/wallet.rs b/src/wallet.rs index f9aa2e1ee..78fa6e567 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -337,6 +337,7 @@ where { inner: KeysManager, wallet: Arc>, + logger: L, } impl WalletKeysManager @@ -350,10 +351,10 @@ where /// `starting_time_nanos`. pub fn new( seed: &[u8; 32], starting_time_secs: u64, starting_time_nanos: u32, - wallet: Arc>, + wallet: Arc>, logger: L, ) -> Self { let inner = KeysManager::new(seed, starting_time_secs, starting_time_nanos); - Self { inner, wallet } + Self { inner, wallet, logger } } /// See [`KeysManager::spend_spendable_outputs`] for documentation on this method. @@ -450,20 +451,35 @@ where } fn get_destination_script(&self) -> Script { - let address = - self.wallet.get_new_address().expect("Failed to retrieve new address from wallet."); + let address = self.wallet.get_new_address().unwrap_or_else(|e| { + log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); + panic!("Failed to retrieve new address from wallet"); + }); address.script_pubkey() } fn get_shutdown_scriptpubkey(&self) -> ShutdownScript { - let address = - self.wallet.get_new_address().expect("Failed to retrieve new address from wallet."); + let address = self.wallet.get_new_address().unwrap_or_else(|e| { + log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); + panic!("Failed to retrieve new address from wallet"); + }); + match address.payload { bitcoin::util::address::Payload::WitnessProgram { version, program } => { - return ShutdownScript::new_witness_program(version, &program) - .expect("Invalid shutdown script."); + return ShutdownScript::new_witness_program(version, &program).unwrap_or_else( + |e| { + log_error!(self.logger, "Invalid shutdown script: {:?}", e); + panic!("Invalid shutdown script."); + }, + ); + } + _ => { + log_error!( + self.logger, + "Tried to use a non-witness address. This must never happen." + ); + panic!("Tried to use a non-witness address. This must never happen."); } - _ => panic!("Tried to use a non-witness address. This must not ever happen."), } } } From 0aa4611f1bb188e76b290bb30f2f1d76e5ad559a Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 14 Jun 2023 12:39:00 +0200 Subject: [PATCH 09/10] Improve logging in `Node` While we cannot get rid of the panics here, we can make sure the errors are logged before we (explicitly) panic. --- src/lib.rs | 39 ++++++++++++++++++++++++++++++--------- 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index ce4c1582a..b90869f4d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -461,7 +461,10 @@ impl Node { Arc::clone(&gossip_sync_store), Arc::clone(&gossip_sync_logger), ) - .expect("Persistence failed"); + .unwrap_or_else(|e| { + log_error!(gossip_sync_logger, "Persistence failed: {}", e); + panic!("Persistence failed"); + }); } Err(e) => log_error!( gossip_sync_logger, @@ -480,6 +483,7 @@ impl Node { let peer_manager_connection_handler = Arc::clone(&self.peer_manager); let mut stop_listen = self.stop_receiver.clone(); let listening_address = listening_address.clone(); + let listening_logger = Arc::clone(&self.logger); let bind_addr = listening_address .to_socket_addrs() @@ -503,9 +507,14 @@ impl Node { runtime.spawn(async move { let listener = - tokio::net::TcpListener::bind(bind_addr).await.expect( - "Failed to bind to listen address/port - is something else already listening on it?", - ); + tokio::net::TcpListener::bind(bind_addr).await + .unwrap_or_else(|e| { + log_error!(listening_logger, "Failed to bind to listen address/port - is something else already listening on it?: {}", e); + panic!( + "Failed to bind to listen address/port - is something else already listening on it?", + ); + }); + loop { let peer_mgr = Arc::clone(&peer_manager_connection_handler); tokio::select! { @@ -616,7 +625,10 @@ impl Node { let unix_time_secs = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs(); io::utils::write_latest_node_ann_bcast_timestamp(unix_time_secs, Arc::clone(&bcast_store), Arc::clone(&bcast_logger)) - .expect("Persistence failed"); + .unwrap_or_else(|e| { + log_error!(bcast_logger, "Persistence failed: {}", e); + panic!("Persistence failed"); + }); } } } @@ -642,6 +654,7 @@ impl Node { let background_gossip_sync = self.gossip_source.as_gossip_sync(); let background_peer_man = Arc::clone(&self.peer_manager); let background_logger = Arc::clone(&self.logger); + let background_error_logger = Arc::clone(&self.logger); let background_scorer = Arc::clone(&self.scorer); let stop_bp = self.stop_receiver.clone(); let sleeper = move |d| { @@ -672,7 +685,10 @@ impl Node { true, ) .await - .expect("Failed to process events"); + .unwrap_or_else(|e| { + log_error!(background_error_logger, "Failed to process events: {}", e); + panic!("Failed to process events"); + }); }); *runtime_lock = Some(runtime); @@ -733,9 +749,14 @@ impl Node { /// /// **Note:** This **MUST** be called after each event has been handled. pub fn event_handled(&self) { - self.event_queue - .event_handled() - .expect("Couldn't mark event handled due to persistence failure"); + self.event_queue.event_handled().unwrap_or_else(|e| { + log_error!( + self.logger, + "Couldn't mark event handled due to persistence failure: {}", + e + ); + panic!("Couldn't mark event handled due to persistence failure"); + }); } /// Returns our own node id From 0e3ce7fba34ceded01604e383668d61599669f7f Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 21 Jun 2023 17:58:26 +0200 Subject: [PATCH 10/10] Drop unnecessary `sync_wallets` call in tests As we now block `start()` on updating fees anyways we don't have to call `sync_wallets` manually to check we're successfully connected to the testnet esplora. We here remove this as it can be flaky dependent how many requests the Esplora server is accepting and how often our CI runs from the same machines... --- src/test/functional_tests.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/test/functional_tests.rs b/src/test/functional_tests.rs index 806f28fd6..79b52ccbb 100644 --- a/src/test/functional_tests.rs +++ b/src/test/functional_tests.rs @@ -324,7 +324,6 @@ fn connect_to_public_testnet_esplora() { builder.set_esplora_server("https://blockstream.info/testnet/api".to_string()); let node = builder.build().unwrap(); node.start().unwrap(); - node.sync_wallets().unwrap(); node.stop().unwrap(); }