]> Untitled Git - bdk/commitdiff
Make bdk and bdk_chain work under 1.57.0
authorSteve Myers <steve@notmandatory.org>
Fri, 3 Mar 2023 04:05:11 +0000 (22:05 -0600)
committerDaniela Brozzoni <danielabrozzoni@protonmail.com>
Fri, 3 Mar 2023 16:44:09 +0000 (17:44 +0100)
- rewrite some parts of the code to deal with older borrow checker
- downgraded hashbrown

16 files changed:
crates/bdk/Cargo.toml
crates/bdk/src/wallet/mod.rs
crates/bdk/src/wallet/persist.rs [deleted file]
crates/bdk/src/wallet/tx_builder.rs
crates/chain/Cargo.toml
crates/chain/src/chain_graph.rs
crates/chain/src/keychain/persist.rs
crates/chain/src/keychain/tracker.rs
crates/chain/src/keychain/txout_index.rs
crates/chain/src/sparse_chain.rs
crates/chain/src/spk_txout_index.rs
crates/chain/src/tx_graph.rs
crates/chain/tests/test_keychain_txout_index.rs
example-crates/keychain_tracker_electrum/src/main.rs
example-crates/keychain_tracker_example_cli/Cargo.toml
example-crates/keychain_tracker_example_cli/src/lib.rs

index c4b0102853236c046d3d7fa781ff9d9cfc6b6c3c..77b577400a9b3a7159979f081e1906091efe74b7 100644 (file)
@@ -9,8 +9,8 @@ keywords = ["bitcoin", "wallet", "descriptor", "psbt"]
 readme = "README.md"
 license = "MIT OR Apache-2.0"
 authors = ["Bitcoin Dev Kit Developers"]
-edition = "2018"
-
+edition = "2021"
+rust-version = "1.57"
 
 [dependencies]
 log = "^0.4"
index e83ae810c7f4de130ea62737bca2037120fbc7c0..ca37de129a6d0507e276a69dfb2df8f3a7a33db0 100644 (file)
@@ -22,7 +22,7 @@ use alloc::{
 pub use bdk_chain::keychain::Balance;
 use bdk_chain::{
     chain_graph,
-    keychain::{KeychainChangeSet, KeychainScan, KeychainTracker},
+    keychain::{persist, KeychainChangeSet, KeychainScan, KeychainTracker},
     sparse_chain, BlockId, ConfirmationTime, IntoOwned,
 };
 use bitcoin::consensus::encode::serialize;
@@ -48,7 +48,6 @@ pub(crate) mod utils;
 #[cfg(feature = "hardware-signer")]
 #[cfg_attr(docsrs, doc(cfg(feature = "hardware-signer")))]
 pub mod hardwaresigner;
-pub mod persist;
 
 pub use utils::IsDust;
 
@@ -85,7 +84,7 @@ pub struct Wallet<D = ()> {
     signers: Arc<SignersContainer>,
     change_signers: Arc<SignersContainer>,
     keychain_tracker: KeychainTracker<KeychainKind, ConfirmationTime>,
-    persist: persist::Persist<D>,
+    persist: persist::Persist<KeychainKind, ConfirmationTime, D>,
     network: Network,
     secp: SecpCtx,
 }
@@ -196,7 +195,7 @@ impl<D> Wallet<D> {
         network: Network,
     ) -> Result<Self, NewError<D::LoadError>>
     where
-        D: persist::Backend,
+        D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
     {
         let secp = Secp256k1::new();
 
@@ -258,7 +257,7 @@ impl<D> Wallet<D> {
     /// (i.e. does not end with /*) then the same address will always be returned for any [`AddressIndex`].
     pub fn get_address(&mut self, address_index: AddressIndex) -> AddressInfo
     where
-        D: persist::Backend,
+        D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
     {
         self._get_address(address_index, KeychainKind::External)
     }
@@ -272,14 +271,14 @@ impl<D> Wallet<D> {
     /// be returned for any [`AddressIndex`].
     pub fn get_internal_address(&mut self, address_index: AddressIndex) -> AddressInfo
     where
-        D: persist::Backend,
+        D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
     {
         self._get_address(address_index, KeychainKind::Internal)
     }
 
     fn _get_address(&mut self, address_index: AddressIndex, keychain: KeychainKind) -> AddressInfo
     where
-        D: persist::Backend,
+        D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
     {
         let keychain = self.map_keychain(keychain);
         let txout_index = &mut self.keychain_tracker.txout_index;
@@ -614,7 +613,7 @@ impl<D> Wallet<D> {
         params: TxParams,
     ) -> Result<(psbt::PartiallySignedTransaction, TransactionDetails), Error>
     where
-        D: persist::Backend,
+        D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
     {
         let external_descriptor = self
             .keychain_tracker
@@ -1689,7 +1688,7 @@ impl<D> Wallet<D> {
     /// [`commit`]: Self::commit
     pub fn apply_update<Tx>(&mut self, update: Update<Tx>) -> Result<(), UpdateError>
     where
-        D: persist::Backend,
+        D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
         Tx: IntoOwned<Transaction> + Clone,
     {
         let changeset = self.keychain_tracker.apply_update(update)?;
@@ -1702,7 +1701,7 @@ impl<D> Wallet<D> {
     /// [`staged`]: Self::staged
     pub fn commit(&mut self) -> Result<(), D::WriteError>
     where
-        D: persist::Backend,
+        D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
     {
         self.persist.commit()
     }
diff --git a/crates/bdk/src/wallet/persist.rs b/crates/bdk/src/wallet/persist.rs
deleted file mode 100644 (file)
index a947ef3..0000000
+++ /dev/null
@@ -1,124 +0,0 @@
-//! Persistence for changes made to a [`Wallet`].
-//!
-//! BDK's [`Wallet`] needs somewhere to persist changes it makes during operation.
-//! Operations like giving out a new address are crucial to persist so that next time the
-//! application is loaded it can find transactions related to that address.
-//!
-//! Note that `Wallet` does not read this persisted data during operation since it always has a copy
-//! in memory
-//!
-//! [`Wallet`]: crate::Wallet
-use crate::KeychainKind;
-use bdk_chain::{keychain::KeychainTracker, ConfirmationTime};
-
-/// `Persist` wraps a [`Backend`] to create a convienient staging area for changes before they are
-/// persisted. Not all changes made to the [`Wallet`] need to be written to disk right away so you
-/// can use [`Persist::stage`] to *stage* it first and then [`Persist::commit`] to finally write it
-/// to disk.
-///
-/// [`Wallet`]: crate::Wallet
-#[derive(Debug)]
-pub struct Persist<P> {
-    backend: P,
-    stage: ChangeSet,
-}
-
-impl<P> Persist<P> {
-    /// Create a new `Persist` from a [`Backend`]
-    pub fn new(backend: P) -> Self {
-        Self {
-            backend,
-            stage: Default::default(),
-        }
-    }
-
-    /// Stage a `changeset` to later persistence with [`commit`].
-    ///
-    /// [`commit`]: Self::commit
-    pub fn stage(&mut self, changeset: ChangeSet) {
-        self.stage.append(changeset)
-    }
-
-    /// Get the changes that haven't been commited yet
-    pub fn staged(&self) -> &ChangeSet {
-        &self.stage
-    }
-
-    /// Commit the staged changes to the underlying persistence backend.
-    ///
-    /// Retuns a backend defined error if this fails
-    pub fn commit(&mut self) -> Result<(), P::WriteError>
-    where
-        P: Backend,
-    {
-        self.backend.append_changeset(&self.stage)?;
-        self.stage = Default::default();
-        Ok(())
-    }
-}
-
-/// A persistence backend for [`Wallet`]
-///
-/// [`Wallet`]: crate::Wallet
-pub trait Backend {
-    /// The error the backend returns when it fails to write
-    type WriteError: core::fmt::Debug;
-    /// The error the backend returns when it fails to load
-    type LoadError: core::fmt::Debug;
-    /// Appends a new changeset to the persistance backend.
-    ///
-    /// It is up to the backend what it does with this. It could store every changeset in a list or
-    /// it insert the actual changes to a more structured database. All it needs to guarantee is
-    /// that [`load_into_keychain_tracker`] restores a keychain tracker to what it should be if all
-    /// changesets had been applied sequentially.
-    ///
-    /// [`load_into_keychain_tracker`]: Self::load_into_keychain_tracker
-    fn append_changeset(&mut self, changeset: &ChangeSet) -> Result<(), Self::WriteError>;
-
-    /// Applies all the changesets the backend has received to `tracker`.
-    fn load_into_keychain_tracker(
-        &mut self,
-        tracker: &mut KeychainTracker<KeychainKind, ConfirmationTime>,
-    ) -> Result<(), Self::LoadError>;
-}
-
-#[cfg(feature = "file-store")]
-mod file_store {
-    use super::*;
-    use bdk_chain::file_store::{IterError, KeychainStore};
-
-    type FileStore = KeychainStore<KeychainKind, ConfirmationTime>;
-
-    impl Backend for FileStore {
-        type WriteError = std::io::Error;
-        type LoadError = IterError;
-        fn append_changeset(&mut self, changeset: &ChangeSet) -> Result<(), Self::WriteError> {
-            self.append_changeset(changeset)
-        }
-        fn load_into_keychain_tracker(
-            &mut self,
-            tracker: &mut KeychainTracker<KeychainKind, ConfirmationTime>,
-        ) -> Result<(), Self::LoadError> {
-            self.load_into_keychain_tracker(tracker)
-        }
-    }
-}
-
-impl Backend for () {
-    type WriteError = ();
-    type LoadError = ();
-    fn append_changeset(&mut self, _changeset: &ChangeSet) -> Result<(), Self::WriteError> {
-        Ok(())
-    }
-    fn load_into_keychain_tracker(
-        &mut self,
-        _tracker: &mut KeychainTracker<KeychainKind, ConfirmationTime>,
-    ) -> Result<(), Self::LoadError> {
-        Ok(())
-    }
-}
-
-#[cfg(feature = "file-store")]
-pub use file_store::*;
-
-use super::ChangeSet;
index c5492c5052fdb1647e79a74bd60a1c7ecc9f2c2f..dbd4811c182267fd7c1261ff6ee59e3b7847217d 100644 (file)
@@ -39,6 +39,7 @@
 use crate::collections::BTreeMap;
 use crate::collections::HashSet;
 use alloc::{boxed::Box, rc::Rc, string::String, vec::Vec};
+use bdk_chain::ConfirmationTime;
 use core::cell::RefCell;
 use core::marker::PhantomData;
 
@@ -525,7 +526,7 @@ impl<'a, D, Cs: CoinSelectionAlgorithm, Ctx: TxBuilderContext> TxBuilder<'a, D,
     /// [`BIP174`]: https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki
     pub fn finish(self) -> Result<(Psbt, TransactionDetails), Error>
     where
-        D: persist::Backend,
+        D: persist::PersistBackend<KeychainKind, ConfirmationTime>,
     {
         self.wallet
             .borrow_mut()
index a443076a9299dc9c47e6d0e142cc8f3b127b14ff..c3d0d77dc7668380b63113a96b8df8af5ad9491e 100644 (file)
@@ -2,6 +2,7 @@
 name = "bdk_chain"
 version = "0.3.1"
 edition = "2021"
+rust-version = "1.57"
 homepage = "https://bitcoindevkit.org"
 repository = "https://github.com/bitcoindevkit/bdk"
 documentation = "https://docs.rs/bdk_chain"
@@ -14,8 +15,10 @@ readme = "../README.md"
 [dependencies]
 bitcoin = { version = "0.29" }
 serde_crate = { package = "serde", version = "1", optional = true, features = ["derive"] }
+
 # Use hashbrown as a feature flag to have HashSet and HashMap from it.
-hashbrown = { version = "0.13.2", optional = true }
+# note version 0.13 breaks outs MSRV.
+hashbrown = { version = "0.12", optional = true,  features = ["serde"] }
 miniscript = { version = "9.0.0", optional = true  }
 
 [dev-dependencies]
@@ -24,4 +27,4 @@ rand = "0.8"
 [features]
 default = ["std", "miniscript"]
 std = []
-serde = ["serde_crate", "bitcoin/serde"]
+serde = ["serde_crate", "bitcoin/serde" ]
index 48679c5579699a9bd3b8c57ba9e97c1be86e6b3a..ab0db6d91c1a0b53ab2060bd2ba259652a1099b5 100644 (file)
@@ -294,7 +294,7 @@ where
         &'a self,
         tx: &'a Transaction,
     ) -> impl Iterator<Item = (&'a P, Txid)> + 'a {
-        self.graph.walk_conflicts(tx, |_, conflict_txid| {
+        self.graph.walk_conflicts(tx, move |_, conflict_txid| {
             self.chain
                 .tx_position(conflict_txid)
                 .map(|conflict_pos| (conflict_pos, conflict_txid))
@@ -309,39 +309,42 @@ where
         &self,
         changeset: &mut ChangeSet<P, T>,
     ) -> Result<(), UnresolvableConflict<P>> {
-        let chain_conflicts = changeset
-            .chain
-            .txids
-            .iter()
-            // we want to find new txid additions by the changeset (all txid entries in the
-            // changeset with Some(position_change))
-            .filter_map(|(&txid, pos_change)| pos_change.as_ref().map(|pos| (txid, pos)))
-            // we don't care about txids that move, only newly added txids
-            .filter(|&(txid, _)| self.chain.tx_position(txid).is_none())
-            // full tx should exist (either in graph, or additions)
-            .filter_map(|(txid, pos)| {
-                let full_tx = self
+        let mut chain_conflicts = vec![];
+
+        for (&txid, pos_change) in &changeset.chain.txids {
+            let pos = match pos_change {
+                Some(pos) => {
+                    // Ignore txs that are still in the chain -- we only care about new ones
+                    if self.chain.tx_position(txid).is_some() {
+                        continue;
+                    }
+                    pos
+                }
+                // Ignore txids that are being delted by the change (they can't conflict)
+                None => continue,
+            };
+
+            let mut full_tx = self.graph.get_tx(txid);
+
+            if full_tx.is_none() {
+                full_tx = changeset
                     .graph
-                    .get_tx(txid)
-                    .or_else(|| {
-                        changeset
-                            .graph
-                            .tx
-                            .iter()
-                            .find(|tx| tx.as_tx().txid() == txid)
-                    })
-                    .map(|tx| (txid, tx, pos));
-                debug_assert!(full_tx.is_some(), "should have full tx at this point");
-                full_tx
-            })
-            .flat_map(|(new_txid, new_tx, new_pos)| {
-                self.tx_conflicts_in_chain(new_tx.as_tx()).map(
-                    move |(conflict_pos, conflict_txid)| {
-                        (new_pos.clone(), new_txid, conflict_pos, conflict_txid)
-                    },
-                )
-            })
-            .collect::<Vec<_>>();
+                    .tx
+                    .iter()
+                    .find(|tx| tx.as_tx().txid() == txid)
+            }
+
+            debug_assert!(full_tx.is_some(), "should have full tx at this point");
+
+            let full_tx = match full_tx {
+                Some(full_tx) => full_tx,
+                None => continue,
+            };
+
+            for (conflict_pos, conflict_txid) in self.tx_conflicts_in_chain(full_tx.as_tx()) {
+                chain_conflicts.push((pos.clone(), txid, conflict_pos, conflict_txid))
+            }
+        }
 
         for (update_pos, update_txid, conflicting_pos, conflicting_txid) in chain_conflicts {
             // We have found a tx that conflicts with our update txid. Only allow this when the
@@ -411,7 +414,7 @@ where
     pub fn transactions_in_chain(&self) -> impl DoubleEndedIterator<Item = (&P, &T)> {
         self.chain
             .txids()
-            .map(|(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist")))
+            .map(move |(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist")))
     }
 
     /// Finds the transaction in the chain that spends `outpoint` given the input/output
index 8bf68cb1d37730fd26b7d9e159651ac68c77e408..94c9faf25ff37fa6279a9e58d058b1cf50a18a00 100644 (file)
@@ -23,7 +23,7 @@ pub struct Persist<K, P, B> {
     stage: keychain::KeychainChangeSet<K, P>,
 }
 
-impl<K, P, B: PersistBackend<K, P>> Persist<K, P, B> {
+impl<K, P, B> Persist<K, P, B> {
     /// Create a new `Persist` from a [`PersistBackend`].
     pub fn new(backend: B) -> Self {
         Self {
@@ -51,7 +51,10 @@ impl<K, P, B: PersistBackend<K, P>> Persist<K, P, B> {
     /// Commit the staged changes to the underlying persistence backend.
     ///
     /// Retuns a backend defined error if this fails
-    pub fn commit(&mut self) -> Result<(), B::WriteError> {
+    pub fn commit(&mut self) -> Result<(), B::WriteError>
+    where
+        B: PersistBackend<K, P>,
+    {
         self.backend.append_changeset(&self.stage)?;
         self.stage = Default::default();
         Ok(())
index c772a8338715f561760ac9b58ff7b24c74b9f9b8..d360ff9718db89cb211fe3335a67254e3e159ea2 100644 (file)
@@ -125,7 +125,7 @@ where
     pub fn full_txouts(&self) -> impl Iterator<Item = (&(K, u32), FullTxOut<P>)> + '_ {
         self.txout_index
             .txouts()
-            .filter_map(|(spk_i, op, _)| Some((spk_i, self.chain_graph.full_txout(op)?)))
+            .filter_map(move |(spk_i, op, _)| Some((spk_i, self.chain_graph.full_txout(op)?)))
     }
 
     /// Iterates through [`FullTxOut`]s that are unspent outputs.
index 64580f6867955ac7871e0acf9a6ed084ffe03952..16ee49fd10d1ea160d07e99be4a3bb465f3e6793 100644 (file)
@@ -423,7 +423,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
                         Cow::Owned(descriptor.clone()),
                         next_reveal_index..index + 1,
                     ),
-                    DerivationAdditions([(keychain.clone(), index)].into()),
+                    DerivationAdditions(core::iter::once((keychain.clone(), index)).collect()),
                 )
             }
             None => (
@@ -575,11 +575,17 @@ where
         .take_while(move |&index| has_wildcard || index == 0)
         // we can only iterate over non-hardened indices
         .take_while(|&index| index <= BIP32_MAX_INDEX)
-        // take until failure
-        .map_while(move |index| {
-            descriptor
-                .derived_descriptor(&secp, index)
-                .map(|desc| (index, desc.script_pubkey()))
-                .ok()
-        })
+        .map(
+            move |index| -> Result<_, miniscript::descriptor::ConversionError> {
+                Ok((
+                    index,
+                    descriptor
+                        .at_derivation_index(index)
+                        .derived_descriptor(&secp)?
+                        .script_pubkey(),
+                ))
+            },
+        )
+        .take_while(Result::is_ok)
+        .map(Result::unwrap)
 }
index 833890e48a771f4c263cec80019b6517f0819b67..44af722f5c337ac36ee51eb6229a0e78ceb4cc6c 100644 (file)
@@ -949,7 +949,7 @@ impl<P: ChainPosition> SparseChain<P> {
         changeset
             .txids
             .iter()
-            .filter(|(&txid, pos)| {
+            .filter(move |(&txid, pos)| {
                 pos.is_some() /*it was not a deletion*/ &&
                 self.tx_position(txid).is_none() /* we don't have the txid already */
             })
index 6c04a5f71ffba32ec7ad5686d3e5545fd0347135..f7dffb5fee8f4fead123ee757e67c05d082915e5 100644 (file)
@@ -79,13 +79,12 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
     /// See [`ForEachTxout`] for the types that support this.
     ///
     /// [`ForEachTxout`]: crate::ForEachTxOut
-    pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> BTreeSet<&I> {
-        // let scanner = &mut SpkTxOutScanner::new(self);
+    pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> BTreeSet<I> {
         let mut scanned_indices = BTreeSet::new();
 
         txouts.for_each_txout(|(op, txout)| {
             if let Some(spk_i) = scan_txout!(self, op, txout) {
-                scanned_indices.insert(spk_i);
+                scanned_indices.insert(spk_i.clone());
             }
         });
 
@@ -207,7 +206,7 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
     {
         self.unused
             .range(range)
-            .map(|index| (index, self.spk_at_index(index).expect("must exist")))
+            .map(move |index| (index, self.spk_at_index(index).expect("must exist")))
     }
 
     /// Returns whether the script pubkey at `index` has been used or not.
index 69cef1cc112fa36cb178b96437295a7f4915f30e..1857a8d0ce9d2ebc6df6cdde025e6655d89fdc80 100644 (file)
@@ -419,7 +419,7 @@ impl<T> TxGraph<T> {
         tx.input
             .iter()
             .enumerate()
-            .filter_map(|(vin, txin)| self.spends.get(&txin.previous_output).zip(Some(vin)))
+            .filter_map(move |(vin, txin)| self.spends.get(&txin.previous_output).zip(Some(vin)))
             .flat_map(|(spends, vin)| core::iter::repeat(vin).zip(spends.iter().cloned()))
             .filter(move |(_, conflicting_txid)| *conflicting_txid != txid)
     }
@@ -474,7 +474,7 @@ impl<T> Additions<T> {
                     .output
                     .iter()
                     .enumerate()
-                    .map(|(vout, txout)| (OutPoint::new(tx.as_tx().txid(), vout as _), txout))
+                    .map(move |(vout, txout)| (OutPoint::new(tx.as_tx().txid(), vout as _), txout))
             })
             .chain(self.txout.iter().map(|(op, txout)| (*op, txout)))
     }
index 48be994d6ba04f7368b008db657e77678310606b..cfbcd123b57d0b451ac45486ca4b9d6082a55595 100644 (file)
@@ -253,7 +253,7 @@ fn test_wildcard_derivations() {
 
     (0..=15)
         .into_iter()
-        .chain([17, 20, 23].into_iter())
+        .chain(vec![17, 20, 23].into_iter())
         .for_each(|index| assert!(txout_index.mark_used(&TestKeychain::External, index)));
 
     assert_eq!(txout_index.next_index(&TestKeychain::External), (26, true));
index 6af8161438db0d7db9c6137a6e2cce46d97fbc81..70c3441eb172809fbf1633534b5da9a7eaf51793 100644 (file)
@@ -62,7 +62,7 @@ fn main() -> anyhow::Result<()> {
 
     let client = electrum_client::Client::from_config(electrum_url, config)?;
 
-    let electrum_cmd = match args.command {
+    let electrum_cmd = match args.command.clone() {
         cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd,
         general_command => {
             return cli::handle_commands(
index e805fd8f478b26ab5dceb760020fbe8f5a904d76..72009b0c562c0e5600dc76fe179d0a9be84b8b61 100644 (file)
@@ -10,7 +10,7 @@ bdk_file_store = { path = "../../crates/file_store" }
 bdk_tmp_plan = { path = "../../nursery/tmp_plan" }
 bdk_coin_select = { path = "../../nursery/coin_select" }
 
-clap = { version = "4", features = ["derive", "env"] }
+clap = { version = "3.2.23", features = ["derive", "env"] }
 anyhow = "1"
 serde = { version = "1", features = ["derive"] }
 serde_json = { version = "^1.0" }
index 5991b0410f53d28b7cd2a242a41204a1cd933479..b18016812642c1a89916d01f2350c6857c1f5b62 100644 (file)
@@ -675,7 +675,7 @@ pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, P: ChainPosition>(
 ) -> impl Iterator<Item = (bdk_tmp_plan::Plan<AK>, FullTxOut<P>)> + 'a {
     tracker
         .full_utxos()
-        .filter_map(|((keychain, derivation_index), full_txout)| {
+        .filter_map(move |((keychain, derivation_index), full_txout)| {
             Some((
                 bdk_tmp_plan::plan_satisfaction(
                     &tracker