@@ -1478,7 +1478,6 @@ impl BackgroundProcessor {
14781478 PGS : ' static + Deref < Target = P2PGossipSync < G , UL , L > > + Send ,
14791479 RGS : ' static + Deref < Target = RapidGossipSync < G , L > > + Send ,
14801480 PM : ' static + Deref + Send ,
1481- LM : ' static + Deref + Send ,
14821481 S : ' static + Deref < Target = SC > + Send + Sync ,
14831482 SC : for < ' b > WriteableScore < ' b > ,
14841483 D : ' static + Deref ,
@@ -1488,7 +1487,7 @@ impl BackgroundProcessor {
14881487 > (
14891488 kv_store : K , event_handler : EH , chain_monitor : M , channel_manager : CM ,
14901489 onion_messenger : OM , gossip_sync : GossipSync < PGS , RGS , G , UL , L > , peer_manager : PM ,
1491- liquidity_manager : Option < LM > , sweeper : Option < OS > , logger : L , scorer : Option < S > ,
1490+ sweeper : Option < OS > , logger : L , scorer : Option < S > ,
14921491 ) -> Self
14931492 where
14941493 UL :: Target : ' static + UtxoLookup ,
@@ -1501,7 +1500,6 @@ impl BackgroundProcessor {
15011500 CM :: Target : AChannelManager ,
15021501 OM :: Target : AOnionMessenger ,
15031502 PM :: Target : APeerManager ,
1504- LM :: Target : ALiquidityManagerSync ,
15051503 D :: Target : ChangeDestinationSourceSync ,
15061504 O :: Target : ' static + OutputSpender ,
15071505 K :: Target : ' static + KVStoreSync ,
@@ -1581,24 +1579,13 @@ impl BackgroundProcessor {
15811579 log_trace ! ( logger, "Terminating background processor." ) ;
15821580 break ;
15831581 }
1584- let sleeper = match ( Some ( & onion_messenger) , liquidity_manager . as_ref ( ) ) {
1585- ( Some ( om) , Some ( lm ) ) => Sleeper :: from_four_futures (
1582+ let sleeper = match Some ( & onion_messenger) {
1583+ Some ( om) => Sleeper :: from_three_futures (
15861584 & channel_manager. get_cm ( ) . get_event_or_persistence_needed_future ( ) ,
15871585 & chain_monitor. get_update_future ( ) ,
15881586 & om. get_om ( ) . get_update_future ( ) ,
1589- & lm. get_lm ( ) . get_pending_msgs_or_needs_persist_future ( ) ,
15901587 ) ,
1591- ( Some ( om) , None ) => Sleeper :: from_three_futures (
1592- & channel_manager. get_cm ( ) . get_event_or_persistence_needed_future ( ) ,
1593- & chain_monitor. get_update_future ( ) ,
1594- & om. get_om ( ) . get_update_future ( ) ,
1595- ) ,
1596- ( None , Some ( lm) ) => Sleeper :: from_three_futures (
1597- & channel_manager. get_cm ( ) . get_event_or_persistence_needed_future ( ) ,
1598- & chain_monitor. get_update_future ( ) ,
1599- & lm. get_lm ( ) . get_pending_msgs_or_needs_persist_future ( ) ,
1600- ) ,
1601- ( None , None ) => Sleeper :: from_two_futures (
1588+ None => Sleeper :: from_two_futures (
16021589 & channel_manager. get_cm ( ) . get_event_or_persistence_needed_future ( ) ,
16031590 & chain_monitor. get_update_future ( ) ,
16041591 ) ,
@@ -1630,13 +1617,6 @@ impl BackgroundProcessor {
16301617 log_trace ! ( logger, "Done persisting ChannelManager." ) ;
16311618 }
16321619
1633- if let Some ( liquidity_manager) = liquidity_manager. as_ref ( ) {
1634- log_trace ! ( logger, "Persisting LiquidityManager..." ) ;
1635- let _ = liquidity_manager. get_lm ( ) . persist ( ) . map_err ( |e| {
1636- log_error ! ( logger, "Persisting LiquidityManager failed: {}" , e) ;
1637- } ) ;
1638- }
1639-
16401620 // Note that we want to run a graph prune once not long after startup before
16411621 // falling back to our usual hourly prunes. This avoids short-lived clients never
16421622 // pruning their network graph. We run once 60 seconds after startup before
0 commit comments