]> Untitled Git - bdk/commitdiff
Move everything else over 🎉
authorLLFourn <lloyd.fourn@gmail.com>
Thu, 2 Mar 2023 05:23:06 +0000 (16:23 +1100)
committerDaniela Brozzoni <danielabrozzoni@protonmail.com>
Thu, 2 Mar 2023 09:56:36 +0000 (10:56 +0100)
This completes the move of things from https://github.com/LLFourn/bdk_core_staging

37 files changed:
Cargo.toml
crates/bdk/src/descriptor/policy.rs
crates/bdk/src/wallet/export.rs
crates/electrum/Cargo.toml
crates/esplora/Cargo.toml [new file with mode: 0644]
crates/esplora/README.md [new file with mode: 0644]
crates/esplora/src/lib.rs [new file with mode: 0644]
crates/file_store/Cargo.toml
crates/file_store/tests/test_file_store.rs
example-crates/keychain_tracker_electrum_example/.gitignore [new file with mode: 0644]
example-crates/keychain_tracker_electrum_example/Cargo.toml [new file with mode: 0644]
example-crates/keychain_tracker_electrum_example/README.md [new file with mode: 0644]
example-crates/keychain_tracker_electrum_example/src/main.rs [new file with mode: 0644]
example-crates/keychain_tracker_esplora_example/.gitignore [new file with mode: 0644]
example-crates/keychain_tracker_esplora_example/Cargo.toml [new file with mode: 0644]
example-crates/keychain_tracker_esplora_example/src/main.rs [new file with mode: 0644]
example-crates/keychain_tracker_example_cli/.gitignore [new file with mode: 0644]
example-crates/keychain_tracker_example_cli/Cargo.toml [new file with mode: 0644]
example-crates/keychain_tracker_example_cli/README.md [new file with mode: 0644]
example-crates/keychain_tracker_example_cli/src/lib.rs [new file with mode: 0644]
nursery/README.md [new file with mode: 0644]
nursery/coin_select/Cargo.toml [new file with mode: 0644]
nursery/coin_select/src/bnb.rs [new file with mode: 0644]
nursery/coin_select/src/coin_selector.rs [new file with mode: 0644]
nursery/coin_select/src/lib.rs [new file with mode: 0644]
nursery/tmp_plan/Cargo.toml [new file with mode: 0644]
nursery/tmp_plan/README.md [new file with mode: 0644]
nursery/tmp_plan/bdk_tmp_plan/Cargo.toml [new file with mode: 0644]
nursery/tmp_plan/bdk_tmp_plan/README.md [new file with mode: 0644]
nursery/tmp_plan/bdk_tmp_plan/src/lib.rs [new file with mode: 0644]
nursery/tmp_plan/bdk_tmp_plan/src/plan_impls.rs [new file with mode: 0644]
nursery/tmp_plan/bdk_tmp_plan/src/requirements.rs [new file with mode: 0644]
nursery/tmp_plan/bdk_tmp_plan/src/template.rs [new file with mode: 0644]
nursery/tmp_plan/src/lib.rs [new file with mode: 0644]
nursery/tmp_plan/src/plan_impls.rs [new file with mode: 0644]
nursery/tmp_plan/src/requirements.rs [new file with mode: 0644]
nursery/tmp_plan/src/template.rs [new file with mode: 0644]

index e8a93b6a8473335b89534a9a703748059b8d6d4b..7f97bf6a893df7e0b0011735da4f697184cecd7e 100644 (file)
@@ -1,11 +1,16 @@
 [workspace]
 members = [
     "crates/bdk",
-    "crates/bdk_chain",
-    "crates/bdk_file_store",
-    "crates/bdk_electrum",
+    "crates/chain",
+    "crates/file_store",
+    "crates/electrum",
     "example-crates/esplora-wallet",
     "example-crates/electrum-wallet",
+    "example-crates/keychain_tracker_electrum_example",
+    "example-crates/keychain_tracker_esplora_example",
+    "example-crates/keychain_tracker_example_cli",
+    "nursery/tmp_plan",
+    "nursery/coin_select"
 ]
 
 [workspace.package]
index 96889ffa0be9adf95082be7c0246762d989670c3..af3e4a3b8c7fb1877e7ee28dcbb5e4109e7a8110 100644 (file)
@@ -32,7 +32,7 @@
 //!
 //! let signers = Arc::new(SignersContainer::build(key_map, &extended_desc, &secp));
 //! let policy = extended_desc.extract_policy(&signers, BuildSatisfaction::None, &secp)?;
-//! println!("policy: {}", serde_json::to_string(&policy)?);
+//! println!("policy: {}", serde_json::to_string(&policy).unwrap());
 //! # Ok::<(), bdk::Error>(())
 //! ```
 
index 3ec43d2b9ea5752d38a857b0e977950739974def..905638449e9763759e9824f1c7ad4af7a84a2330 100644 (file)
@@ -34,7 +34,7 @@
 //!     import.change_descriptor().as_ref(),
 //!     Network::Testnet,
 //! )?;
-//! # Ok::<_, bdk::Error>(())
+//! # Ok::<_, Box<dyn std::error::Error>>(())
 //! ```
 //!
 //! ### Export a `Wallet`
 //!     Some("wpkh([c258d2e4/84h/1h/0h]tpubDD3ynpHgJQW8VvWRzQ5WFDCrs4jqVFGHB3vLC3r49XHJSqP8bHKdK4AriuUKLccK68zfzowx7YhmDN8SiSkgCDENUFx9qVw65YyqM78vyVe/1/*)"),
 //!     Network::Testnet,
 //! )?;
-//! let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true)
-//!     .map_err(ToString::to_string)
-//!     .map_err(bdk::Error::Generic)?;
+//! let export = FullyNodedExport::export_wallet(&wallet, "exported wallet", true).unwrap();
 //!
 //! println!("Exported: {}", export.to_string());
-//! # Ok::<_, bdk::Error>(())
+//! # Ok::<_, Box<dyn std::error::Error>>(())
 //! ```
 
 use core::str::FromStr;
index 315e861940549a8e874aaf52617ad494ca57b7a5..a0f70c7e43d6c824189a033929f63d3601344155 100644 (file)
@@ -5,12 +5,12 @@ edition = "2021"
 homepage = "https://bitcoindevkit.org"
 repository = "https://github.com/LLFourn/bdk_core_staging"
 documentation = "https://docs.rs/bdk_electrum"
-description = "BDK Electrum client library for updating the keychain tracker."
+description = "Fetch data from electrum in the form BDK accepts"
 license = "MIT OR Apache-2.0"
 readme = "README.md"
 
 # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
 
 [dependencies]
-bdk_chain = { path = "../bdk_chain", version = "0.3", features = ["serde", "miniscript"] }
+bdk_chain = { path = "../chain", version = "0.3", features = ["serde", "miniscript"] }
 electrum-client = { version = "0.12" }
diff --git a/crates/esplora/Cargo.toml b/crates/esplora/Cargo.toml
new file mode 100644 (file)
index 0000000..680ed3c
--- /dev/null
@@ -0,0 +1,16 @@
+[package]
+name = "bdk_esplora"
+version = "0.1.0"
+edition = "2021"
+homepage = "https://bitcoindevkit.org"
+repository = "https://github.com/LLFourn/bdk_core_staging"
+documentation = "https://docs.rs/bdk_esplora"
+description = "Fetch data from esplora in the form that accepts"
+license = "MIT OR Apache-2.0"
+readme = "README.md"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+bdk_chain = { path = "../chain", version = "0.3", features = ["serde", "miniscript"] }
+esplora-client = { version = "0.3" }
diff --git a/crates/esplora/README.md b/crates/esplora/README.md
new file mode 100644 (file)
index 0000000..637a7d5
--- /dev/null
@@ -0,0 +1,3 @@
+# BDK Esplora
+
+BDK Esplora client library for updating the `bdk_chain` structures.
diff --git a/crates/esplora/src/lib.rs b/crates/esplora/src/lib.rs
new file mode 100644 (file)
index 0000000..a8eae78
--- /dev/null
@@ -0,0 +1,303 @@
+//! This crate is used for updating structures of [`bdk_chain`] with data from an esplora server.
+//!
+//! The star of the show is the  [`EsploraExt::scan`] method which scans for relevant
+//! blockchain data (via esplora) and outputs a [`KeychainScan`].
+
+use bdk_chain::{
+    bitcoin::{BlockHash, OutPoint, Script, Txid},
+    chain_graph::ChainGraph,
+    keychain::KeychainScan,
+    sparse_chain, BlockId, ConfirmationTime,
+};
+use esplora_client::{OutputStatus, TxStatus};
+use std::collections::BTreeMap;
+
+pub use esplora_client;
+use esplora_client::Error;
+
+/// Trait to extend [`esplora_client::BlockingClient`] functionality.
+///
+/// Refer to [crate-level documentation] for more.
+///
+/// [crate-level documentation]: crate
+pub trait EsploraExt {
+    /// Scan the blockchain (via esplora) for the data specified and returns a [`KeychainScan`].
+    ///
+    /// - `local_chain`: the most recent block hashes present locally
+    /// - `keychain_spks`: keychains that we want to scan transactions for
+    /// - `txids`: transactions that we want updated [`ChainPosition`]s for
+    /// - `outpoints`: transactions associated with these outpoints (residing, spending) that we
+    ///     want to included in the update
+    ///
+    /// The scan for each keychain stops after a gap of `stop_gap` script pubkeys with no associated
+    /// transactions. `parallel_requests` specifies the max number of HTTP requests to make in
+    /// parallel.
+    ///
+    /// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition
+    fn scan<K: Ord + Clone>(
+        &self,
+        local_chain: &BTreeMap<u32, BlockHash>,
+        keychain_spks: BTreeMap<K, impl IntoIterator<Item = (u32, Script)>>,
+        txids: impl IntoIterator<Item = Txid>,
+        outpoints: impl IntoIterator<Item = OutPoint>,
+        stop_gap: usize,
+        parallel_requests: usize,
+    ) -> Result<KeychainScan<K, ConfirmationTime>, Error>;
+
+    /// Convenience method to call [`scan`] without requiring a keychain.
+    ///
+    /// [`scan`]: EsploraExt::scan
+    fn scan_without_keychain(
+        &self,
+        local_chain: &BTreeMap<u32, BlockHash>,
+        misc_spks: impl IntoIterator<Item = Script>,
+        txids: impl IntoIterator<Item = Txid>,
+        outpoints: impl IntoIterator<Item = OutPoint>,
+        parallel_requests: usize,
+    ) -> Result<ChainGraph<ConfirmationTime>, Error> {
+        let wallet_scan = self.scan(
+            local_chain,
+            [(
+                (),
+                misc_spks
+                    .into_iter()
+                    .enumerate()
+                    .map(|(i, spk)| (i as u32, spk)),
+            )]
+            .into(),
+            txids,
+            outpoints,
+            usize::MAX,
+            parallel_requests,
+        )?;
+
+        Ok(wallet_scan.update)
+    }
+}
+
+impl EsploraExt for esplora_client::BlockingClient {
+    fn scan<K: Ord + Clone>(
+        &self,
+        local_chain: &BTreeMap<u32, BlockHash>,
+        keychain_spks: BTreeMap<K, impl IntoIterator<Item = (u32, Script)>>,
+        txids: impl IntoIterator<Item = Txid>,
+        outpoints: impl IntoIterator<Item = OutPoint>,
+        stop_gap: usize,
+        parallel_requests: usize,
+    ) -> Result<KeychainScan<K, ConfirmationTime>, Error> {
+        let parallel_requests = parallel_requests.max(1);
+        let mut scan = KeychainScan::default();
+        let update = &mut scan.update;
+        let last_active_indices = &mut scan.last_active_indices;
+
+        for (&height, &original_hash) in local_chain.iter().rev() {
+            let update_block_id = BlockId {
+                height,
+                hash: self.get_block_hash(height)?,
+            };
+            let _ = update
+                .insert_checkpoint(update_block_id)
+                .expect("cannot repeat height here");
+            if update_block_id.hash == original_hash {
+                break;
+            }
+        }
+        let tip_at_start = BlockId {
+            height: self.get_height()?,
+            hash: self.get_tip_hash()?,
+        };
+        if let Err(failure) = update.insert_checkpoint(tip_at_start) {
+            match failure {
+                sparse_chain::InsertCheckpointError::HashNotMatching { .. } => {
+                    // there has been a re-org before we started scanning. We haven't consumed any iterators so it's safe to recursively call.
+                    return EsploraExt::scan(
+                        self,
+                        local_chain,
+                        keychain_spks,
+                        txids,
+                        outpoints,
+                        stop_gap,
+                        parallel_requests,
+                    );
+                }
+            }
+        }
+
+        for (keychain, spks) in keychain_spks {
+            let mut spks = spks.into_iter();
+            let mut last_active_index = None;
+            let mut empty_scripts = 0;
+
+            loop {
+                let handles = (0..parallel_requests)
+                    .filter_map(
+                        |_| -> Option<
+                            std::thread::JoinHandle<Result<(u32, Vec<esplora_client::Tx>), _>>,
+                        > {
+                            let (index, script) = spks.next()?;
+                            let client = self.clone();
+                            Some(std::thread::spawn(move || {
+                                let mut related_txs = client.scripthash_txs(&script, None)?;
+
+                                let n_confirmed =
+                                    related_txs.iter().filter(|tx| tx.status.confirmed).count();
+                                // esplora pages on 25 confirmed transactions. If there's 25 or more we
+                                // keep requesting to see if there's more.
+                                if n_confirmed >= 25 {
+                                    loop {
+                                        let new_related_txs = client.scripthash_txs(
+                                            &script,
+                                            Some(related_txs.last().unwrap().txid),
+                                        )?;
+                                        let n = new_related_txs.len();
+                                        related_txs.extend(new_related_txs);
+                                        // we've reached the end
+                                        if n < 25 {
+                                            break;
+                                        }
+                                    }
+                                }
+
+                                Result::<_, esplora_client::Error>::Ok((index, related_txs))
+                            }))
+                        },
+                    )
+                    .collect::<Vec<_>>();
+
+                let n_handles = handles.len();
+
+                for handle in handles {
+                    let (index, related_txs) = handle.join().unwrap()?; // TODO: don't unwrap
+                    if related_txs.is_empty() {
+                        empty_scripts += 1;
+                    } else {
+                        last_active_index = Some(index);
+                        empty_scripts = 0;
+                    }
+                    for tx in related_txs {
+                        let confirmation_time =
+                            map_confirmation_time(&tx.status, tip_at_start.height);
+
+                        if let Err(failure) = update.insert_tx(tx.to_tx(), confirmation_time) {
+                            use bdk_chain::{
+                                chain_graph::InsertTxError, sparse_chain::InsertTxError::*,
+                            };
+                            match failure {
+                                InsertTxError::Chain(TxTooHigh { .. }) => {
+                                    unreachable!("chain position already checked earlier")
+                                }
+                                InsertTxError::Chain(TxMovedUnexpectedly { .. })
+                                | InsertTxError::UnresolvableConflict(_) => {
+                                    /* implies reorg during scan. We deal with that below */
+                                }
+                            }
+                        }
+                    }
+                }
+
+                if n_handles == 0 || empty_scripts >= stop_gap {
+                    break;
+                }
+            }
+
+            if let Some(last_active_index) = last_active_index {
+                last_active_indices.insert(keychain, last_active_index);
+            }
+        }
+
+        for txid in txids.into_iter() {
+            let (tx, tx_status) = match (self.get_tx(&txid)?, self.get_tx_status(&txid)?) {
+                (Some(tx), Some(tx_status)) => (tx, tx_status),
+                _ => continue,
+            };
+
+            let confirmation_time = map_confirmation_time(&tx_status, tip_at_start.height);
+
+            if let Err(failure) = update.insert_tx(tx, confirmation_time) {
+                use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*};
+                match failure {
+                    InsertTxError::Chain(TxTooHigh { .. }) => {
+                        unreachable!("chain position already checked earlier")
+                    }
+                    InsertTxError::Chain(TxMovedUnexpectedly { .. })
+                    | InsertTxError::UnresolvableConflict(_) => {
+                        /* implies reorg during scan. We deal with that below */
+                    }
+                }
+            }
+        }
+
+        for op in outpoints.into_iter() {
+            let mut op_txs = Vec::with_capacity(2);
+            if let (Some(tx), Some(tx_status)) =
+                (self.get_tx(&op.txid)?, self.get_tx_status(&op.txid)?)
+            {
+                op_txs.push((tx, tx_status));
+                if let Some(OutputStatus {
+                    txid: Some(txid),
+                    status: Some(spend_status),
+                    ..
+                }) = self.get_output_status(&op.txid, op.vout as _)?
+                {
+                    if let Some(spend_tx) = self.get_tx(&txid)? {
+                        op_txs.push((spend_tx, spend_status));
+                    }
+                }
+            }
+
+            for (tx, status) in op_txs {
+                let confirmation_time = map_confirmation_time(&status, tip_at_start.height);
+
+                if let Err(failure) = update.insert_tx(tx, confirmation_time) {
+                    use bdk_chain::{chain_graph::InsertTxError, sparse_chain::InsertTxError::*};
+                    match failure {
+                        InsertTxError::Chain(TxTooHigh { .. }) => {
+                            unreachable!("chain position already checked earlier")
+                        }
+                        InsertTxError::Chain(TxMovedUnexpectedly { .. })
+                        | InsertTxError::UnresolvableConflict(_) => {
+                            /* implies reorg during scan. We deal with that below */
+                        }
+                    }
+                }
+            }
+        }
+
+        let reorg_occurred = {
+            if let Some(checkpoint) = update.chain().latest_checkpoint() {
+                self.get_block_hash(checkpoint.height)? != checkpoint.hash
+            } else {
+                false
+            }
+        };
+
+        if reorg_occurred {
+            // A reorg occurred so lets find out where all the txids we found are in the chain now.
+            // XXX: collect required because of weird type naming issues
+            let txids_found = update
+                .chain()
+                .txids()
+                .map(|(_, txid)| *txid)
+                .collect::<Vec<_>>();
+            scan.update = EsploraExt::scan_without_keychain(
+                self,
+                local_chain,
+                [],
+                txids_found,
+                [],
+                parallel_requests,
+            )?;
+        }
+
+        Ok(scan)
+    }
+}
+
+fn map_confirmation_time(tx_status: &TxStatus, height_at_start: u32) -> ConfirmationTime {
+    match (tx_status.block_time, tx_status.block_height) {
+        (Some(time), Some(height)) if height <= height_at_start => {
+            ConfirmationTime::Confirmed { height, time }
+        }
+        _ => ConfirmationTime::Unconfirmed,
+    }
+}
index 1670cc09d2a42ef0c132c3a677c1df1816f0cb83..2a5358253ff749eb13ec342b6892a2e6c74f5a58 100644 (file)
@@ -5,6 +5,9 @@ edition = "2021"
 license = "MIT OR Apache-2.0"
 
 [dependencies]
-bdk_chain = { path = "../bdk_chain", version = "0.3", features = [ "serde", "miniscript" ] }
+bdk_chain = { path = "../chain", version = "0.3", features = [ "serde", "miniscript" ] }
 bincode = { version = "2.0.0-rc.2", features = [ "serde" ] }
 serde = { version = "1", features = ["derive"] }
+
+[dev-dependencies]
+tempfile = "3"
index 5230c097f1fdf9f47360cd2c7db1a7ee7f2dd163..5842c3fd85a2563328f9001fc5499978260a72e2 100644 (file)
@@ -89,16 +89,16 @@ fn new_fails_if_file_is_too_short() {
 
 #[test]
 fn new_fails_if_magic_bytes_are_invalid() {
-    let invalid_magic_mnemonic = "ldkfs0000000";
+    let invalid_magic_bytes = "ldkfs0000000";
 
     let path = TempPath::new();
     path.open()
-        .write_all(invalid_magic_mnemonic.as_bytes())
+        .write_all(invalid_magic_bytes.as_bytes())
         .expect("should write");
 
     match KeychainStore::<TestKeychain, TxHeight, Transaction>::new(path.open()) {
         Err(FileError::InvalidMagicBytes(b)) => {
-            assert_eq!(b, invalid_magic_mnemonic.as_bytes())
+            assert_eq!(b, invalid_magic_bytes.as_bytes())
         }
         unexpected => panic!("unexpected result: {:?}", unexpected),
     };
diff --git a/example-crates/keychain_tracker_electrum_example/.gitignore b/example-crates/keychain_tracker_electrum_example/.gitignore
new file mode 100644 (file)
index 0000000..ea8c4bf
--- /dev/null
@@ -0,0 +1 @@
+/target
diff --git a/example-crates/keychain_tracker_electrum_example/Cargo.toml b/example-crates/keychain_tracker_electrum_example/Cargo.toml
new file mode 100644 (file)
index 0000000..4eceaa7
--- /dev/null
@@ -0,0 +1,9 @@
+[package]
+name = "keychain_tracker_electrum_example"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+bdk_chain = { path = "../../crates/chain", version = "0.3", features = ["serde"] }
+bdk_electrum = { path = "../../crates/electrum" }
+keychain_tracker_example_cli = { path = "../keychain_tracker_example_cli"}
diff --git a/example-crates/keychain_tracker_electrum_example/README.md b/example-crates/keychain_tracker_electrum_example/README.md
new file mode 100644 (file)
index 0000000..b8bdea2
--- /dev/null
@@ -0,0 +1,6 @@
+# Keychain Tracker with electrum
+
+This example shows how you use the `KeychainTracker` from `bdk_chain` to create a simple command
+line wallet.
+
+
diff --git a/example-crates/keychain_tracker_electrum_example/src/main.rs b/example-crates/keychain_tracker_electrum_example/src/main.rs
new file mode 100644 (file)
index 0000000..0fe27fd
--- /dev/null
@@ -0,0 +1,248 @@
+use bdk_chain::bitcoin::{Address, OutPoint, Txid};
+use bdk_electrum::bdk_chain::{self, bitcoin::Network, TxHeight};
+use bdk_electrum::{
+    electrum_client::{self, ElectrumApi},
+    ElectrumExt, ElectrumUpdate,
+};
+use keychain_tracker_example_cli::{
+    self as cli,
+    anyhow::{self, Context},
+    clap::{self, Parser, Subcommand},
+};
+use std::{collections::BTreeMap, fmt::Debug, io, io::Write};
+
+#[derive(Subcommand, Debug, Clone)]
+enum ElectrumCommands {
+    /// Scans the addresses in the wallet using esplora API.
+    Scan {
+        /// When a gap this large has been found for a keychain it will stop.
+        #[clap(long, default_value = "5")]
+        stop_gap: usize,
+        #[clap(flatten)]
+        scan_options: ScanOptions,
+    },
+    /// Scans particular addresses using esplora API
+    Sync {
+        /// Scan all the unused addresses
+        #[clap(long)]
+        unused_spks: bool,
+        /// Scan every address that you have derived
+        #[clap(long)]
+        all_spks: bool,
+        /// Scan unspent outpoints for spends or changes to confirmation status of residing tx
+        #[clap(long)]
+        utxos: bool,
+        /// Scan unconfirmed transactions for updates
+        #[clap(long)]
+        unconfirmed: bool,
+        #[clap(flatten)]
+        scan_options: ScanOptions,
+    },
+}
+
+#[derive(Parser, Debug, Clone, PartialEq)]
+pub struct ScanOptions {
+    /// Set batch size for each script_history call to electrum client
+    #[clap(long, default_value = "25")]
+    pub batch_size: usize,
+}
+
+fn main() -> anyhow::Result<()> {
+    let (args, keymap, mut tracker, mut db) = cli::init::<ElectrumCommands, _>()?;
+
+    let electrum_url = match args.network {
+        Network::Bitcoin => "ssl://electrum.blockstream.info:50002",
+        Network::Testnet => "ssl://electrum.blockstream.info:60002",
+        Network::Regtest => "tcp://localhost:60401",
+        Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001",
+    };
+    let config = electrum_client::Config::builder()
+        .validate_domain(match args.network {
+            Network::Bitcoin => true,
+            _ => false,
+        })
+        .build();
+
+    let client = electrum_client::Client::from_config(electrum_url, config)?;
+
+    let electrum_cmd = match args.command {
+        cli::Commands::ChainSpecific(electrum_cmd) => electrum_cmd,
+        general_command => {
+            return cli::handle_commands(
+                general_command,
+                |transaction| {
+                    let _txid = client.transaction_broadcast(transaction)?;
+                    Ok(())
+                },
+                &mut tracker,
+                &mut db,
+                args.network,
+                &keymap,
+            )
+        }
+    };
+
+    let response = match electrum_cmd {
+        ElectrumCommands::Scan {
+            stop_gap,
+            scan_options: scan_option,
+        } => {
+            let (spk_iterators, local_chain) = {
+                // Get a short lock on the tracker to get the spks iterators
+                // and local chain state
+                let tracker = &*tracker.lock().unwrap();
+                let spk_iterators = tracker
+                    .txout_index
+                    .spks_of_all_keychains()
+                    .into_iter()
+                    .map(|(keychain, iter)| {
+                        let mut first = true;
+                        let spk_iter = iter.inspect(move |(i, _)| {
+                            if first {
+                                eprint!("\nscanning {}: ", keychain);
+                                first = false;
+                            }
+
+                            eprint!("{} ", i);
+                            let _ = io::stdout().flush();
+                        });
+                        (keychain, spk_iter)
+                    })
+                    .collect::<BTreeMap<_, _>>();
+                let local_chain = tracker.chain().checkpoints().clone();
+                (spk_iterators, local_chain)
+            };
+
+            // we scan the spks **without** a lock on the tracker
+            client.scan(
+                &local_chain,
+                spk_iterators,
+                core::iter::empty(),
+                core::iter::empty(),
+                stop_gap,
+                scan_option.batch_size,
+            )?
+        }
+        ElectrumCommands::Sync {
+            mut unused_spks,
+            mut utxos,
+            mut unconfirmed,
+            all_spks,
+            scan_options,
+        } => {
+            // Get a short lock on the tracker to get the spks we're interested in
+            let tracker = tracker.lock().unwrap();
+
+            if !(all_spks || unused_spks || utxos || unconfirmed) {
+                unused_spks = true;
+                unconfirmed = true;
+                utxos = true;
+            } else if all_spks {
+                unused_spks = false;
+            }
+
+            let mut spks: Box<dyn Iterator<Item = bdk_chain::bitcoin::Script>> =
+                Box::new(core::iter::empty());
+            if all_spks {
+                let all_spks = tracker
+                    .txout_index
+                    .all_spks()
+                    .iter()
+                    .map(|(k, v)| (k.clone(), v.clone()))
+                    .collect::<Vec<_>>();
+                spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| {
+                    eprintln!("scanning {:?}", index);
+                    script
+                })));
+            }
+            if unused_spks {
+                let unused_spks = tracker
+                    .txout_index
+                    .unused_spks(..)
+                    .map(|(k, v)| (k.clone(), v.clone()))
+                    .collect::<Vec<_>>();
+                spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| {
+                    eprintln!(
+                        "Checking if address {} {:?} has been used",
+                        Address::from_script(&script, args.network).unwrap(),
+                        index
+                    );
+
+                    script
+                })));
+            }
+
+            let mut outpoints: Box<dyn Iterator<Item = OutPoint>> = Box::new(core::iter::empty());
+
+            if utxos {
+                let utxos = tracker
+                    .full_utxos()
+                    .map(|(_, utxo)| utxo)
+                    .collect::<Vec<_>>();
+                outpoints = Box::new(
+                    utxos
+                        .into_iter()
+                        .inspect(|utxo| {
+                            eprintln!(
+                                "Checking if outpoint {} (value: {}) has been spent",
+                                utxo.outpoint, utxo.txout.value
+                            );
+                        })
+                        .map(|utxo| utxo.outpoint),
+                );
+            };
+
+            let mut txids: Box<dyn Iterator<Item = Txid>> = Box::new(core::iter::empty());
+
+            if unconfirmed {
+                let unconfirmed_txids = tracker
+                    .chain()
+                    .range_txids_by_height(TxHeight::Unconfirmed..)
+                    .map(|(_, txid)| *txid)
+                    .collect::<Vec<_>>();
+
+                txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| {
+                    eprintln!("Checking if {} is confirmed yet", txid);
+                }));
+            }
+
+            let local_chain = tracker.chain().checkpoints().clone();
+            // drop lock on tracker
+            drop(tracker);
+
+            // we scan the spks **without** a lock on the tracker
+            ElectrumUpdate {
+                chain_update: client
+                    .scan_without_keychain(
+                        &local_chain,
+                        spks,
+                        txids,
+                        outpoints,
+                        scan_options.batch_size,
+                    )
+                    .context("scanning the blockchain")?,
+                ..Default::default()
+            }
+        }
+    };
+
+    let missing_txids = response.missing_full_txs(&*tracker.lock().unwrap());
+
+    // fetch the missing full transactions **without** a lock on the tracker
+    let new_txs = client
+        .batch_transaction_get(missing_txids)
+        .context("fetching full transactions")?;
+
+    {
+        // Get a final short lock to apply the changes
+        let mut tracker = tracker.lock().unwrap();
+        let changeset = {
+            let scan = response.into_keychain_scan(new_txs, &*tracker)?;
+            tracker.determine_changeset(&scan)?
+        };
+        db.lock().unwrap().append_changeset(&changeset)?;
+        tracker.apply_changeset(changeset);
+    };
+
+    Ok(())
+}
diff --git a/example-crates/keychain_tracker_esplora_example/.gitignore b/example-crates/keychain_tracker_esplora_example/.gitignore
new file mode 100644 (file)
index 0000000..8359723
--- /dev/null
@@ -0,0 +1,3 @@
+/target
+Cargo.lock
+.bdk_example_db
diff --git a/example-crates/keychain_tracker_esplora_example/Cargo.toml b/example-crates/keychain_tracker_esplora_example/Cargo.toml
new file mode 100644 (file)
index 0000000..57e9d9c
--- /dev/null
@@ -0,0 +1,11 @@
+[package]
+name = "keychain_tracker_esplora_example"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+bdk_chain = { path = "../../crates/chain", version = "0.3", features = ["serde", "miniscript"] }
+bdk_esplora = { path = "../../crates/esplora" }
+keychain_tracker_example_cli = { path = "../keychain_tracker_example_cli" }
diff --git a/example-crates/keychain_tracker_esplora_example/src/main.rs b/example-crates/keychain_tracker_esplora_example/src/main.rs
new file mode 100644 (file)
index 0000000..fba4a3b
--- /dev/null
@@ -0,0 +1,241 @@
+use bdk_chain::bitcoin::{Address, OutPoint, Txid};
+use bdk_chain::{bitcoin::Network, TxHeight};
+use bdk_esplora::esplora_client;
+use bdk_esplora::EsploraExt;
+
+use std::io::{self, Write};
+
+use keychain_tracker_example_cli::{
+    self as cli,
+    anyhow::{self, Context},
+    clap::{self, Parser, Subcommand},
+};
+
+#[derive(Subcommand, Debug, Clone)]
+enum EsploraCommands {
+    /// Scans the addresses in the wallet using esplora API.
+    Scan {
+        /// When a gap this large has been found for a keychain it will stop.
+        #[clap(long, default_value = "5")]
+        stop_gap: usize,
+
+        #[clap(flatten)]
+        scan_options: ScanOptions,
+    },
+    /// Scans particular addresses using esplora API
+    Sync {
+        /// Scan all the unused addresses
+        #[clap(long)]
+        unused_spks: bool,
+        /// Scan every address that you have derived
+        #[clap(long)]
+        all_spks: bool,
+        /// Scan unspent outpoints for spends or changes to confirmation status of residing tx
+        #[clap(long)]
+        utxos: bool,
+        /// Scan unconfirmed transactions for updates
+        #[clap(long)]
+        unconfirmed: bool,
+
+        #[clap(flatten)]
+        scan_options: ScanOptions,
+    },
+}
+
+#[derive(Parser, Debug, Clone, PartialEq)]
+pub struct ScanOptions {
+    #[clap(long, default_value = "5")]
+    pub parallel_requests: usize,
+}
+
+fn main() -> anyhow::Result<()> {
+    let (args, keymap, keychain_tracker, db) = cli::init::<EsploraCommands, _>()?;
+    let esplora_url = match args.network {
+        Network::Bitcoin => "https://mempool.space/api",
+        Network::Testnet => "https://mempool.space/testnet/api",
+        Network::Regtest => "http://localhost:3002",
+        Network::Signet => "https://mempool.space/signet/api",
+    };
+
+    let client = esplora_client::Builder::new(esplora_url).build_blocking()?;
+
+    let esplora_cmd = match args.command {
+        cli::Commands::ChainSpecific(esplora_cmd) => esplora_cmd,
+        general_command => {
+            return cli::handle_commands(
+                general_command,
+                |transaction| Ok(client.broadcast(transaction)?),
+                &keychain_tracker,
+                &db,
+                args.network,
+                &keymap,
+            )
+        }
+    };
+
+    match esplora_cmd {
+        EsploraCommands::Scan {
+            stop_gap,
+            scan_options,
+        } => {
+            let (spk_iterators, local_chain) = {
+                // Get a short lock on the tracker to get the spks iterators
+                // and local chain state
+                let tracker = &*keychain_tracker.lock().unwrap();
+                let spk_iterators = tracker
+                    .txout_index
+                    .spks_of_all_keychains()
+                    .into_iter()
+                    .map(|(keychain, iter)| {
+                        let mut first = true;
+                        (
+                            keychain,
+                            iter.inspect(move |(i, _)| {
+                                if first {
+                                    eprint!("\nscanning {}: ", keychain);
+                                    first = false;
+                                }
+
+                                eprint!("{} ", i);
+                                let _ = io::stdout().flush();
+                            }),
+                        )
+                    })
+                    .collect();
+
+                let local_chain = tracker.chain().checkpoints().clone();
+                (spk_iterators, local_chain)
+            };
+
+            // we scan the iterators **without** a lock on the tracker
+            let wallet_scan = client
+                .scan(
+                    &local_chain,
+                    spk_iterators,
+                    core::iter::empty(),
+                    core::iter::empty(),
+                    stop_gap,
+                    scan_options.parallel_requests,
+                )
+                .context("scanning the blockchain")?;
+            eprintln!();
+
+            {
+                // we take a short lock to apply results to tracker and db
+                let tracker = &mut *keychain_tracker.lock().unwrap();
+                let db = &mut *db.lock().unwrap();
+                let changeset = tracker.apply_update(wallet_scan)?;
+                db.append_changeset(&changeset)?;
+            }
+        }
+        EsploraCommands::Sync {
+            mut unused_spks,
+            mut utxos,
+            mut unconfirmed,
+            all_spks,
+            scan_options,
+        } => {
+            // Get a short lock on the tracker to get the spks we're interested in
+            let tracker = keychain_tracker.lock().unwrap();
+
+            if !(all_spks || unused_spks || utxos || unconfirmed) {
+                unused_spks = true;
+                unconfirmed = true;
+                utxos = true;
+            } else if all_spks {
+                unused_spks = false;
+            }
+
+            let mut spks: Box<dyn Iterator<Item = bdk_chain::bitcoin::Script>> =
+                Box::new(core::iter::empty());
+            if all_spks {
+                let all_spks = tracker
+                    .txout_index
+                    .all_spks()
+                    .iter()
+                    .map(|(k, v)| (k.clone(), v.clone()))
+                    .collect::<Vec<_>>();
+                spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| {
+                    eprintln!("scanning {:?}", index);
+                    script
+                })));
+            }
+            if unused_spks {
+                let unused_spks = tracker
+                    .txout_index
+                    .unused_spks(..)
+                    .map(|(k, v)| (k.clone(), v.clone()))
+                    .collect::<Vec<_>>();
+                spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| {
+                    eprintln!(
+                        "Checking if address {} {:?} has been used",
+                        Address::from_script(&script, args.network).unwrap(),
+                        index
+                    );
+
+                    script
+                })));
+            }
+
+            let mut outpoints: Box<dyn Iterator<Item = OutPoint>> = Box::new(core::iter::empty());
+
+            if utxos {
+                let utxos = tracker
+                    .full_utxos()
+                    .map(|(_, utxo)| utxo)
+                    .collect::<Vec<_>>();
+                outpoints = Box::new(
+                    utxos
+                        .into_iter()
+                        .inspect(|utxo| {
+                            eprintln!(
+                                "Checking if outpoint {} (value: {}) has been spent",
+                                utxo.outpoint, utxo.txout.value
+                            );
+                        })
+                        .map(|utxo| utxo.outpoint),
+                );
+            };
+
+            let mut txids: Box<dyn Iterator<Item = Txid>> = Box::new(core::iter::empty());
+
+            if unconfirmed {
+                let unconfirmed_txids = tracker
+                    .chain()
+                    .range_txids_by_height(TxHeight::Unconfirmed..)
+                    .map(|(_, txid)| *txid)
+                    .collect::<Vec<_>>();
+
+                txids = Box::new(unconfirmed_txids.into_iter().inspect(|txid| {
+                    eprintln!("Checking if {} is confirmed yet", txid);
+                }));
+            }
+
+            let local_chain = tracker.chain().checkpoints().clone();
+
+            // drop lock on tracker
+            drop(tracker);
+
+            // we scan the desired spks **without** a lock on the tracker
+            let scan = client
+                .scan_without_keychain(
+                    &local_chain,
+                    spks,
+                    txids,
+                    outpoints,
+                    scan_options.parallel_requests,
+                )
+                .context("scanning the blockchain")?;
+
+            {
+                // we take a short lock to apply the results to the tracker and db
+                let tracker = &mut *keychain_tracker.lock().unwrap();
+                let changeset = tracker.apply_update(scan.into())?;
+                let db = &mut *db.lock().unwrap();
+                db.append_changeset(&changeset)?;
+            }
+        }
+    }
+
+    Ok(())
+}
diff --git a/example-crates/keychain_tracker_example_cli/.gitignore b/example-crates/keychain_tracker_example_cli/.gitignore
new file mode 100644 (file)
index 0000000..ea8c4bf
--- /dev/null
@@ -0,0 +1 @@
+/target
diff --git a/example-crates/keychain_tracker_example_cli/Cargo.toml b/example-crates/keychain_tracker_example_cli/Cargo.toml
new file mode 100644 (file)
index 0000000..e2565e4
--- /dev/null
@@ -0,0 +1,16 @@
+[package]
+name = "keychain_tracker_example_cli"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+[dependencies]
+bdk_chain = { path = "../../crates/chain", version = "0.3", features = ["serde", "miniscript"]}
+bdk_file_store = { path = "../../crates/file_store" }
+bdk_tmp_plan = { path = "../../nursery/tmp_plan" }
+bdk_coin_select = { path = "../../nursery/coin_select" }
+
+clap = { version = "4", features = ["derive", "env"] }
+anyhow = "1"
+serde = { version = "1", features = ["derive"] }
+serde_json = { version = "^1.0" }
diff --git a/example-crates/keychain_tracker_example_cli/README.md b/example-crates/keychain_tracker_example_cli/README.md
new file mode 100644 (file)
index 0000000..1d9370d
--- /dev/null
@@ -0,0 +1 @@
+Provides common command line processing logic between examples using the `KeychainTracker`
diff --git a/example-crates/keychain_tracker_example_cli/src/lib.rs b/example-crates/keychain_tracker_example_cli/src/lib.rs
new file mode 100644 (file)
index 0000000..019abc8
--- /dev/null
@@ -0,0 +1,688 @@
+pub extern crate anyhow;
+use anyhow::{anyhow, Context, Result};
+use bdk_chain::{
+    bitcoin::{
+        secp256k1::Secp256k1,
+        util::sighash::{Prevouts, SighashCache},
+        Address, LockTime, Network, Sequence, Transaction, TxIn, TxOut,
+    },
+    chain_graph::InsertTxError,
+    keychain::{DerivationAdditions, KeychainChangeSet, KeychainTracker},
+    miniscript::{
+        descriptor::{DescriptorSecretKey, KeyMap},
+        Descriptor, DescriptorPublicKey,
+    },
+    sparse_chain::{self, ChainPosition},
+    DescriptorExt, FullTxOut,
+};
+use bdk_coin_select::{coin_select_bnb, CoinSelector, CoinSelectorOpt, WeightedValue};
+use bdk_file_store::KeychainStore;
+use clap::{Parser, Subcommand};
+use std::{
+    cmp::Reverse, collections::HashMap, fmt::Debug, path::PathBuf, sync::Mutex, time::Duration,
+};
+
+pub use bdk_file_store;
+pub use clap;
+
+#[derive(Parser)]
+#[clap(author, version, about, long_about = None)]
+#[clap(propagate_version = true)]
+pub struct Args<C: clap::Subcommand> {
+    #[clap(env = "DESCRIPTOR")]
+    pub descriptor: String,
+    #[clap(env = "CHANGE_DESCRIPTOR")]
+    pub change_descriptor: Option<String>,
+
+    #[clap(env = "BITCOIN_NETWORK", long, default_value = "signet")]
+    pub network: Network,
+
+    #[clap(env = "BDK_DB_PATH", long, default_value = ".bdk_example_db")]
+    pub db_path: PathBuf,
+
+    #[clap(env = "BDK_CP_LIMIT", long, default_value = "20")]
+    pub cp_limit: usize,
+
+    #[clap(subcommand)]
+    pub command: Commands<C>,
+}
+
+#[derive(Subcommand, Debug, Clone)]
+pub enum Commands<C: clap::Subcommand> {
+    #[clap(flatten)]
+    ChainSpecific(C),
+    /// Address generation and inspection
+    Address {
+        #[clap(subcommand)]
+        addr_cmd: AddressCmd,
+    },
+    /// Get the wallet balance
+    Balance,
+    /// TxOut related commands
+    #[clap(name = "txout")]
+    TxOut {
+        #[clap(subcommand)]
+        txout_cmd: TxOutCmd,
+    },
+    /// Send coins to an address
+    Send {
+        value: u64,
+        address: Address,
+        #[clap(short, default_value = "largest-first")]
+        coin_select: CoinSelectionAlgo,
+    },
+}
+
+#[derive(Clone, Debug)]
+pub enum CoinSelectionAlgo {
+    LargestFirst,
+    SmallestFirst,
+    OldestFirst,
+    NewestFirst,
+    BranchAndBound,
+}
+
+impl Default for CoinSelectionAlgo {
+    fn default() -> Self {
+        Self::LargestFirst
+    }
+}
+
+impl core::str::FromStr for CoinSelectionAlgo {
+    type Err = anyhow::Error;
+
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        use CoinSelectionAlgo::*;
+        Ok(match s {
+            "largest-first" => LargestFirst,
+            "smallest-first" => SmallestFirst,
+            "oldest-first" => OldestFirst,
+            "newest-first" => NewestFirst,
+            "bnb" => BranchAndBound,
+            unknown => return Err(anyhow!("unknown coin selection algorithm '{}'", unknown)),
+        })
+    }
+}
+
+impl core::fmt::Display for CoinSelectionAlgo {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        use CoinSelectionAlgo::*;
+        write!(
+            f,
+            "{}",
+            match self {
+                LargestFirst => "largest-first",
+                SmallestFirst => "smallest-first",
+                OldestFirst => "oldest-first",
+                NewestFirst => "newest-first",
+                BranchAndBound => "bnb",
+            }
+        )
+    }
+}
+
+#[derive(Subcommand, Debug, Clone)]
+pub enum AddressCmd {
+    /// Get the next unused address
+    Next,
+    /// Get a new address regardless if the existing ones haven't been used
+    New,
+    /// List all addresses
+    List {
+        #[clap(long)]
+        change: bool,
+    },
+    Index,
+}
+
+#[derive(Subcommand, Debug, Clone)]
+pub enum TxOutCmd {
+    List {
+        /// Return only spent outputs
+        #[clap(short, long)]
+        spent: bool,
+        /// Return only unspent outputs
+        #[clap(short, long)]
+        unspent: bool,
+        /// Return only confirmed outputs
+        #[clap(long)]
+        confirmed: bool,
+        /// Return only unconfirmed outputs
+        #[clap(long)]
+        unconfirmed: bool,
+    },
+}
+
+#[derive(
+    Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, serde::Deserialize, serde::Serialize,
+)]
+pub enum Keychain {
+    External,
+    Internal,
+}
+
+impl core::fmt::Display for Keychain {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            Keychain::External => write!(f, "external"),
+            Keychain::Internal => write!(f, "internal"),
+        }
+    }
+}
+
+/// A structure defining output of a AddressCmd execution.
+#[derive(serde::Serialize, serde::Deserialize)]
+pub struct AddrsOutput {
+    keychain: String,
+    index: u32,
+    addrs: Address,
+    used: bool,
+}
+
+pub fn run_address_cmd<P>(
+    tracker: &Mutex<KeychainTracker<Keychain, P>>,
+    db: &Mutex<KeychainStore<Keychain, P>>,
+    addr_cmd: AddressCmd,
+    network: Network,
+) -> Result<()>
+where
+    P: bdk_chain::sparse_chain::ChainPosition,
+    KeychainChangeSet<Keychain, P>: serde::Serialize + serde::de::DeserializeOwned,
+{
+    let mut tracker = tracker.lock().unwrap();
+    let txout_index = &mut tracker.txout_index;
+
+    let addr_cmmd_output = match addr_cmd {
+        AddressCmd::Next => Some(txout_index.next_unused_spk(&Keychain::External)),
+        AddressCmd::New => Some(txout_index.reveal_next_spk(&Keychain::External)),
+        _ => None,
+    };
+
+    if let Some(((index, spk), additions)) = addr_cmmd_output {
+        let mut db = db.lock().unwrap();
+        // update database since we're about to give out a new address
+        db.append_changeset(&additions.into())?;
+
+        let spk = spk.clone();
+        let address =
+            Address::from_script(&spk, network).expect("should always be able to derive address");
+        eprintln!("This is the address at index {}", index);
+        println!("{}", address);
+    }
+
+    match addr_cmd {
+        AddressCmd::Next | AddressCmd::New => {
+            /* covered */
+            Ok(())
+        }
+        AddressCmd::Index => {
+            for (keychain, derivation_index) in txout_index.last_revealed_indices() {
+                println!("{:?}: {}", keychain, derivation_index);
+            }
+            Ok(())
+        }
+        AddressCmd::List { change } => {
+            let target_keychain = match change {
+                true => Keychain::Internal,
+                false => Keychain::External,
+            };
+            for (index, spk) in txout_index.revealed_spks_of_keychain(&target_keychain) {
+                let address = Address::from_script(&spk, network)
+                    .expect("should always be able to derive address");
+                println!(
+                    "{:?} {} used:{}",
+                    index,
+                    address,
+                    txout_index.is_used(&(target_keychain, index))
+                );
+            }
+            Ok(())
+        }
+    }
+}
+
+pub fn run_balance_cmd<P: ChainPosition>(tracker: &Mutex<KeychainTracker<Keychain, P>>) {
+    let tracker = tracker.lock().unwrap();
+    let (confirmed, unconfirmed) =
+        tracker
+            .full_utxos()
+            .fold((0, 0), |(confirmed, unconfirmed), (_, utxo)| {
+                if utxo.chain_position.height().is_confirmed() {
+                    (confirmed + utxo.txout.value, unconfirmed)
+                } else {
+                    (confirmed, unconfirmed + utxo.txout.value)
+                }
+            });
+
+    println!("confirmed: {}", confirmed);
+    println!("unconfirmed: {}", unconfirmed);
+}
+
+pub fn run_txo_cmd<K: Debug + Clone + Ord, P: ChainPosition>(
+    txout_cmd: TxOutCmd,
+    tracker: &Mutex<KeychainTracker<K, P>>,
+    network: Network,
+) {
+    match txout_cmd {
+        TxOutCmd::List {
+            unspent,
+            spent,
+            confirmed,
+            unconfirmed,
+        } => {
+            let tracker = tracker.lock().unwrap();
+            let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> = match (unspent, spent)
+            {
+                (true, false) => Box::new(tracker.full_utxos()),
+                (false, true) => Box::new(
+                    tracker
+                        .full_txouts()
+                        .filter(|(_, txout)| txout.spent_by.is_some()),
+                ),
+                _ => Box::new(tracker.full_txouts()),
+            };
+
+            let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> =
+                match (confirmed, unconfirmed) {
+                    (true, false) => Box::new(
+                        txouts.filter(|(_, txout)| txout.chain_position.height().is_confirmed()),
+                    ),
+                    (false, true) => Box::new(
+                        txouts.filter(|(_, txout)| !txout.chain_position.height().is_confirmed()),
+                    ),
+                    _ => txouts,
+                };
+
+            for (spk_index, full_txout) in txouts {
+                let address =
+                    Address::from_script(&full_txout.txout.script_pubkey, network).unwrap();
+
+                println!(
+                    "{:?} {} {} {} spent:{:?}",
+                    spk_index,
+                    full_txout.txout.value,
+                    full_txout.outpoint,
+                    address,
+                    full_txout.spent_by
+                )
+            }
+        }
+    }
+}
+
+pub fn create_tx<P: ChainPosition>(
+    value: u64,
+    address: Address,
+    coin_select: CoinSelectionAlgo,
+    keychain_tracker: &mut KeychainTracker<Keychain, P>,
+    keymap: &HashMap<DescriptorPublicKey, DescriptorSecretKey>,
+) -> Result<(
+    Transaction,
+    Option<(DerivationAdditions<Keychain>, (Keychain, u32))>,
+)> {
+    let mut additions = DerivationAdditions::default();
+
+    let assets = bdk_tmp_plan::Assets {
+        keys: keymap.iter().map(|(pk, _)| pk.clone()).collect(),
+        ..Default::default()
+    };
+
+    // TODO use planning module
+    let mut candidates = planned_utxos(keychain_tracker, &assets).collect::<Vec<_>>();
+
+    // apply coin selection algorithm
+    match coin_select {
+        CoinSelectionAlgo::LargestFirst => {
+            candidates.sort_by_key(|(_, utxo)| Reverse(utxo.txout.value))
+        }
+        CoinSelectionAlgo::SmallestFirst => candidates.sort_by_key(|(_, utxo)| utxo.txout.value),
+        CoinSelectionAlgo::OldestFirst => {
+            candidates.sort_by_key(|(_, utxo)| utxo.chain_position.clone())
+        }
+        CoinSelectionAlgo::NewestFirst => {
+            candidates.sort_by_key(|(_, utxo)| Reverse(utxo.chain_position.clone()))
+        }
+        CoinSelectionAlgo::BranchAndBound => {}
+    }
+
+    // turn the txos we chose into a weight and value
+    let wv_candidates = candidates
+        .iter()
+        .map(|(plan, utxo)| {
+            WeightedValue::new(
+                utxo.txout.value,
+                plan.expected_weight() as _,
+                plan.witness_version().is_some(),
+            )
+        })
+        .collect();
+
+    let mut outputs = vec![TxOut {
+        value,
+        script_pubkey: address.script_pubkey(),
+    }];
+
+    let internal_keychain = if keychain_tracker
+        .txout_index
+        .keychains()
+        .get(&Keychain::Internal)
+        .is_some()
+    {
+        Keychain::Internal
+    } else {
+        Keychain::External
+    };
+
+    let ((change_index, change_script), change_additions) = keychain_tracker
+        .txout_index
+        .next_unused_spk(&internal_keychain);
+    additions.append(change_additions);
+
+    // Clone to drop the immutable reference.
+    let change_script = change_script.clone();
+
+    let change_plan = bdk_tmp_plan::plan_satisfaction(
+        &keychain_tracker
+            .txout_index
+            .keychains()
+            .get(&internal_keychain)
+            .expect("must exist")
+            .at_derivation_index(change_index),
+        &assets,
+    )
+    .expect("failed to obtain change plan");
+
+    let mut change_output = TxOut {
+        value: 0,
+        script_pubkey: change_script,
+    };
+
+    let cs_opts = CoinSelectorOpt {
+        target_feerate: 0.5,
+        min_drain_value: keychain_tracker
+            .txout_index
+            .keychains()
+            .get(&internal_keychain)
+            .expect("must exist")
+            .dust_value(),
+        ..CoinSelectorOpt::fund_outputs(
+            &outputs,
+            &change_output,
+            change_plan.expected_weight() as u32,
+        )
+    };
+
+    // TODO: How can we make it easy to shuffle in order of inputs and outputs here?
+    // apply coin selection by saying we need to fund these outputs
+    let mut coin_selector = CoinSelector::new(&wv_candidates, &cs_opts);
+
+    // just select coins in the order provided until we have enough
+    // only use first result (least waste)
+    let selection = match coin_select {
+        CoinSelectionAlgo::BranchAndBound => {
+            coin_select_bnb(Duration::from_secs(10), coin_selector.clone())
+                .map_or_else(|| coin_selector.select_until_finished(), |cs| cs.finish())?
+        }
+        _ => coin_selector.select_until_finished()?,
+    };
+    let (_, selection_meta) = selection.best_strategy();
+
+    // get the selected utxos
+    let selected_txos = selection.apply_selection(&candidates).collect::<Vec<_>>();
+
+    if let Some(drain_value) = selection_meta.drain_value {
+        change_output.value = drain_value;
+        // if the selection tells us to use change and the change value is sufficient we add it as an output
+        outputs.push(change_output)
+    }
+
+    let mut transaction = Transaction {
+        version: 0x02,
+        lock_time: keychain_tracker
+            .chain()
+            .latest_checkpoint()
+            .and_then(|block_id| LockTime::from_height(block_id.height).ok())
+            .unwrap_or(LockTime::ZERO)
+            .into(),
+        input: selected_txos
+            .iter()
+            .map(|(_, utxo)| TxIn {
+                previous_output: utxo.outpoint,
+                sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
+                ..Default::default()
+            })
+            .collect(),
+        output: outputs,
+    };
+
+    let prevouts = selected_txos
+        .iter()
+        .map(|(_, utxo)| utxo.txout.clone())
+        .collect::<Vec<_>>();
+    let sighash_prevouts = Prevouts::All(&prevouts);
+
+    // first set tx values for plan so that we don't change them while signing
+    for (i, (plan, _)) in selected_txos.iter().enumerate() {
+        if let Some(sequence) = plan.required_sequence() {
+            transaction.input[i].sequence = sequence
+        }
+    }
+
+    // create a short lived transaction
+    let _sighash_tx = transaction.clone();
+    let mut sighash_cache = SighashCache::new(&_sighash_tx);
+
+    for (i, (plan, _)) in selected_txos.iter().enumerate() {
+        let requirements = plan.requirements();
+        let mut auth_data = bdk_tmp_plan::SatisfactionMaterial::default();
+        assert!(
+            !requirements.requires_hash_preimages(),
+            "can't have hash pre-images since we didn't provide any"
+        );
+        assert!(
+            requirements.signatures.sign_with_keymap(
+                i,
+                &keymap,
+                &sighash_prevouts,
+                None,
+                None,
+                &mut sighash_cache,
+                &mut auth_data,
+                &Secp256k1::default(),
+            )?,
+            "we should have signed with this input"
+        );
+
+        match plan.try_complete(&auth_data) {
+            bdk_tmp_plan::PlanState::Complete {
+                final_script_sig,
+                final_script_witness,
+            } => {
+                if let Some(witness) = final_script_witness {
+                    transaction.input[i].witness = witness;
+                }
+
+                if let Some(script_sig) = final_script_sig {
+                    transaction.input[i].script_sig = script_sig;
+                }
+            }
+            bdk_tmp_plan::PlanState::Incomplete(_) => {
+                return Err(anyhow!(
+                    "we weren't able to complete the plan with our keys"
+                ));
+            }
+        }
+    }
+
+    let change_info = if selection_meta.drain_value.is_some() {
+        Some((additions, (internal_keychain, change_index)))
+    } else {
+        None
+    };
+
+    Ok((transaction, change_info))
+}
+
+pub fn handle_commands<C: clap::Subcommand, P>(
+    command: Commands<C>,
+    broadcast: impl FnOnce(&Transaction) -> Result<()>,
+    // we Mutexes around these not because we need them for a simple CLI app but to demonsrate how
+    // all the stuff we're doing can be thread safe and also not keep locks up over an IO bound.
+    tracker: &Mutex<KeychainTracker<Keychain, P>>,
+    store: &Mutex<KeychainStore<Keychain, P>>,
+    network: Network,
+    keymap: &HashMap<DescriptorPublicKey, DescriptorSecretKey>,
+) -> Result<()>
+where
+    P: ChainPosition,
+    KeychainChangeSet<Keychain, P>: serde::Serialize + serde::de::DeserializeOwned,
+{
+    match command {
+        // TODO: Make these functions return stuffs
+        Commands::Address { addr_cmd } => run_address_cmd(&tracker, &store, addr_cmd, network),
+        Commands::Balance => {
+            run_balance_cmd(&tracker);
+            Ok(())
+        }
+        Commands::TxOut { txout_cmd } => {
+            run_txo_cmd(txout_cmd, &tracker, network);
+            Ok(())
+        }
+        Commands::Send {
+            value,
+            address,
+            coin_select,
+        } => {
+            let (transaction, change_index) = {
+                // take mutable ref to construct tx -- it is only open for a short time while building it.
+                let tracker = &mut *tracker.lock().unwrap();
+                let (transaction, change_info) =
+                    create_tx(value, address, coin_select, tracker, &keymap)?;
+
+                if let Some((change_derivation_changes, (change_keychain, index))) = change_info {
+                    // We must first persist to disk the fact that we've got a new address from the
+                    // change keychain so future scans will find the tx we're about to broadcast.
+                    // If we're unable to persist this then we don't want to broadcast.
+                    let store = &mut *store.lock().unwrap();
+                    store.append_changeset(&change_derivation_changes.into())?;
+
+                    // We don't want other callers/threads to use this address while we're using it
+                    // but we also don't want to scan the tx we just created because it's not
+                    // technically in the blockchain yet.
+                    tracker.txout_index.mark_used(&change_keychain, index);
+                    (transaction, Some((change_keychain, index)))
+                } else {
+                    (transaction, None)
+                }
+            };
+
+            match (broadcast)(&transaction) {
+                Ok(_) => {
+                    println!("Broadcasted Tx : {}", transaction.txid());
+                    let mut tracker = tracker.lock().unwrap();
+                    match tracker.insert_tx(transaction.clone(), P::unconfirmed()) {
+                        Ok(changeset) => {
+                            let store = &mut *store.lock().unwrap();
+                            // We know the tx is at least unconfirmed now. Note if persisting here
+                            // fails it's not a big deal since we can always find it again form
+                            // blockchain.
+                            store.append_changeset(&changeset)?;
+                            Ok(())
+                        }
+                        Err(e) => match e {
+                            InsertTxError::Chain(e) => match e {
+                                // TODO: add insert_unconfirmed_tx to chain graph and sparse chain
+                                sparse_chain::InsertTxError::TxTooHigh { .. } => unreachable!("we are inserting at unconfirmed position"),
+                                sparse_chain::InsertTxError::TxMovedUnexpectedly { txid, original_pos, ..} => Err(anyhow!("the tx we created {} has already been confirmed at block {:?}", txid, original_pos)),
+                            },
+                            InsertTxError::UnresolvableConflict(e) => Err(e).context("another tx that conflicts with the one we tried to create has been confirmed"),
+                        }
+                    }
+                }
+                Err(e) => {
+                    let tracker = &mut *tracker.lock().unwrap();
+                    if let Some((keychain, index)) = change_index {
+                        // We failed to broadcast so allow our change address to be used in the future
+                        tracker.txout_index.unmark_used(&keychain, index);
+                    }
+                    Err(e.into())
+                }
+            }
+        }
+        Commands::ChainSpecific(_) => {
+            todo!("example code is meant to handle this!")
+        }
+    }
+}
+
+pub fn init<C: clap::Subcommand, P>() -> anyhow::Result<(
+    Args<C>,
+    KeyMap,
+    // These don't need to have mutexes around them but we want the cli example code to make it obvious how they
+    // are thread safe so this forces the example developer to show where they would lock and unlock things.
+    Mutex<KeychainTracker<Keychain, P>>,
+    Mutex<KeychainStore<Keychain, P>>,
+)>
+where
+    P: sparse_chain::ChainPosition,
+    KeychainChangeSet<Keychain, P>: serde::Serialize + serde::de::DeserializeOwned,
+{
+    let args = Args::<C>::parse();
+    let secp = Secp256k1::default();
+    let (descriptor, mut keymap) =
+        Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &args.descriptor)?;
+
+    let mut tracker = KeychainTracker::default();
+    tracker.set_checkpoint_limit(Some(args.cp_limit));
+
+    tracker
+        .txout_index
+        .add_keychain(Keychain::External, descriptor);
+
+    let internal = args
+        .change_descriptor
+        .clone()
+        .map(|descriptor| Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, &descriptor))
+        .transpose()?;
+    if let Some((internal_descriptor, internal_keymap)) = internal {
+        keymap.extend(internal_keymap);
+        tracker
+            .txout_index
+            .add_keychain(Keychain::Internal, internal_descriptor);
+    };
+
+    let mut db = KeychainStore::<Keychain, P>::new_from_path(args.db_path.as_path())?;
+
+    if let Err(e) = db.load_into_keychain_tracker(&mut tracker) {
+        match tracker.chain().latest_checkpoint()  {
+            Some(checkpoint) => eprintln!("Failed to load all changesets from {}. Last checkpoint was at height {}. Error: {}", args.db_path.display(), checkpoint.height, e),
+            None => eprintln!("Failed to load any checkpoints from {}: {}", args.db_path.display(), e),
+
+        }
+        eprintln!("âš  Consider running a rescan of chain data.");
+    }
+
+    Ok((args, keymap, Mutex::new(tracker), Mutex::new(db)))
+}
+
+pub fn planned_utxos<'a, AK: bdk_tmp_plan::CanDerive + Clone, P: ChainPosition>(
+    tracker: &'a KeychainTracker<Keychain, P>,
+    assets: &'a bdk_tmp_plan::Assets<AK>,
+) -> impl Iterator<Item = (bdk_tmp_plan::Plan<AK>, FullTxOut<P>)> + 'a {
+    tracker
+        .full_utxos()
+        .filter_map(|((keychain, derivation_index), full_txout)| {
+            Some((
+                bdk_tmp_plan::plan_satisfaction(
+                    &tracker
+                        .txout_index
+                        .keychains()
+                        .get(keychain)
+                        .expect("must exist since we have a utxo for it")
+                        .at_derivation_index(*derivation_index),
+                    assets,
+                )?,
+                full_txout,
+            ))
+        })
+}
diff --git a/nursery/README.md b/nursery/README.md
new file mode 100644 (file)
index 0000000..e136a20
--- /dev/null
@@ -0,0 +1,5 @@
+# Bitcoin Dev Kit Nursery
+
+This is a directory for crates that are experimental and have not been released yet.
+Keep in mind that they may never be released.
+Things in `/example-crates` may use them to demonsrate how things might look in the future.
diff --git a/nursery/coin_select/Cargo.toml b/nursery/coin_select/Cargo.toml
new file mode 100644 (file)
index 0000000..b135075
--- /dev/null
@@ -0,0 +1,11 @@
+[package]
+name = "bdk_coin_select"
+version = "0.0.1"
+authors = [ "LLFourn <lloyd.fourn@gmail.com>" ]
+
+[dependencies]
+bdk_chain = { version = "0.3", path = "../../crates/chain" }
+
+[features]
+default = ["std"]
+std = []
diff --git a/nursery/coin_select/src/bnb.rs b/nursery/coin_select/src/bnb.rs
new file mode 100644 (file)
index 0000000..f9a956c
--- /dev/null
@@ -0,0 +1,651 @@
+use super::*;
+
+/// Strategy in which we should branch.
+pub enum BranchStrategy {
+    /// We continue exploring subtrees of this node, starting with the inclusion branch.
+    Continue,
+    /// We continue exploring ONY the omission branch of this node, skipping the inclusion branch.
+    SkipInclusion,
+    /// We skip both the inclusion and omission branches of this node.
+    SkipBoth,
+}
+
+impl BranchStrategy {
+    pub fn will_continue(&self) -> bool {
+        match self {
+            Self::Continue | Self::SkipInclusion => true,
+            _ => false,
+        }
+    }
+}
+
+/// Closure to decide the branching strategy, alongside a score (if the current selection is a
+/// candidate solution).
+pub type DecideStrategy<'c, S> = dyn Fn(&Bnb<'c, S>) -> (BranchStrategy, Option<S>);
+
+/// [`Bnb`] represents the current state of the BnB algorithm.
+pub struct Bnb<'c, S> {
+    pub pool: Vec<(usize, &'c WeightedValue)>,
+    pub pool_pos: usize,
+    pub best_score: S,
+
+    pub selection: CoinSelector<'c>,
+    pub rem_abs: u64,
+    pub rem_eff: i64,
+}
+
+impl<'c, S: Ord> Bnb<'c, S> {
+    /// Creates a new [`Bnb`].
+    pub fn new(selector: CoinSelector<'c>, pool: Vec<(usize, &'c WeightedValue)>, max: S) -> Self {
+        let (rem_abs, rem_eff) = pool.iter().fold((0, 0), |(abs, eff), (_, c)| {
+            (
+                abs + c.value,
+                eff + c.effective_value(selector.opts.target_feerate),
+            )
+        });
+
+        Self {
+            pool,
+            pool_pos: 0,
+            best_score: max,
+            selection: selector,
+            rem_abs,
+            rem_eff,
+        }
+    }
+
+    /// Turns our [`Bnb`] state into an iterator.
+    ///
+    /// `strategy` should assess our current selection/node and determine the branching strategy and
+    /// whether this selection is a candidate solution (if so, return the score of the selection).
+    pub fn into_iter<'f>(self, strategy: &'f DecideStrategy<'c, S>) -> BnbIter<'c, 'f, S> {
+        BnbIter {
+            state: self,
+            done: false,
+            strategy,
+        }
+    }
+
+    /// Attempt to backtrack to the previously selected node's omission branch, return false
+    /// otherwise (no more solutions).
+    pub fn backtrack(&mut self) -> bool {
+        (0..self.pool_pos)
+            .rev()
+            .find(|&pos| {
+                let (index, candidate) = self.pool[pos];
+
+                if self.selection.is_selected(index) {
+                    // deselect last `pos`, so next round will check omission branch
+                    self.pool_pos = pos;
+                    self.selection.deselect(index);
+                    return true;
+                } else {
+                    self.rem_abs += candidate.value;
+                    self.rem_eff += candidate.effective_value(self.selection.opts.target_feerate);
+                    return false;
+                }
+            })
+            .is_some()
+    }
+
+    /// Continue down this branch, skip inclusion branch if specified.
+    pub fn forward(&mut self, skip: bool) {
+        let (index, candidate) = self.pool[self.pool_pos];
+        self.rem_abs -= candidate.value;
+        self.rem_eff -= candidate.effective_value(self.selection.opts.target_feerate);
+
+        if !skip {
+            self.selection.select(index);
+        }
+    }
+
+    /// Compare advertised score with current best. New best will be the smaller value. Return true
+    /// if best is replaced.
+    pub fn advertise_new_score(&mut self, score: S) -> bool {
+        if score <= self.best_score {
+            self.best_score = score;
+            return true;
+        }
+        return false;
+    }
+}
+
+pub struct BnbIter<'c, 'f, S> {
+    state: Bnb<'c, S>,
+    done: bool,
+
+    /// Check our current selection (node), and returns the branching strategy, alongside a score
+    /// (if the current selection is a candidate solution).
+    strategy: &'f DecideStrategy<'c, S>,
+}
+
+impl<'c, 'f, S: Ord + Copy + Display> Iterator for BnbIter<'c, 'f, S> {
+    type Item = Option<CoinSelector<'c>>;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if self.done {
+            return None;
+        }
+
+        let (strategy, score) = (self.strategy)(&self.state);
+
+        let mut found_best = Option::<CoinSelector>::None;
+
+        if let Some(score) = score {
+            if self.state.advertise_new_score(score) {
+                found_best = Some(self.state.selection.clone());
+            }
+        }
+
+        debug_assert!(
+            !strategy.will_continue() || self.state.pool_pos < self.state.pool.len(),
+            "Faulty strategy implementation! Strategy suggested that we continue traversing, however we have already reached the end of the candidates pool! pool_len={}, pool_pos={}",
+            self.state.pool.len(), self.state.pool_pos,
+        );
+
+        match strategy {
+            BranchStrategy::Continue => {
+                self.state.forward(false);
+            }
+            BranchStrategy::SkipInclusion => {
+                self.state.forward(true);
+            }
+            BranchStrategy::SkipBoth => {
+                if !self.state.backtrack() {
+                    self.done = true;
+                }
+            }
+        };
+
+        // increment selection pool position for next round
+        self.state.pool_pos += 1;
+
+        if found_best.is_some() || !self.done {
+            Some(found_best)
+        } else {
+            // we have traversed all branches
+            None
+        }
+    }
+}
+
+/// Determines how we should limit rounds of branch and bound.
+pub enum BnbLimit {
+    Rounds(usize),
+    #[cfg(feature = "std")]
+    Duration(core::time::Duration),
+}
+
+impl From<usize> for BnbLimit {
+    fn from(v: usize) -> Self {
+        Self::Rounds(v)
+    }
+}
+
+#[cfg(feature = "std")]
+impl From<core::time::Duration> for BnbLimit {
+    fn from(v: core::time::Duration) -> Self {
+        Self::Duration(v)
+    }
+}
+
+/// This is a variation of the Branch and Bound Coin Selection algorithm designed by Murch (as seen
+/// in Bitcoin Core).
+///
+/// The differences are as follows:
+/// * In additional to working with effective values, we also work with absolute values.
+///   This way, we can use bounds of absolute values to enforce `min_absolute_fee` (which is used by
+///   RBF), and `max_extra_target` (which can be used to increase the possible solution set, given
+///   that the sender is okay with sending extra to the receiver).
+///
+/// Murch's Master Thesis: <https://murch.one/wp-content/uploads/2016/11/erhardt2016coinselection.pdf>
+/// Bitcoin Core Implementation: <https://github.com/bitcoin/bitcoin/blob/23.x/src/wallet/coinselection.cpp#L65>
+///
+/// TODO: Another optimization we could do is figure out candidate with smallest waste, and
+/// if we find a result with waste equal to this, we can just break.
+pub fn coin_select_bnb<L>(limit: L, selector: CoinSelector) -> Option<CoinSelector>
+where
+    L: Into<BnbLimit>,
+{
+    let opts = selector.opts;
+
+    // prepare pool of candidates to select from:
+    // * filter out candidates with negative/zero effective values
+    // * sort candidates by descending effective value
+    let pool = {
+        let mut pool = selector
+            .unselected()
+            .filter(|(_, c)| c.effective_value(opts.target_feerate) > 0)
+            .collect::<Vec<_>>();
+        pool.sort_unstable_by(|(_, a), (_, b)| {
+            let a = a.effective_value(opts.target_feerate);
+            let b = b.effective_value(opts.target_feerate);
+            b.cmp(&a)
+        });
+        pool
+    };
+
+    let feerate_decreases = opts.target_feerate > opts.long_term_feerate();
+
+    let target_abs = opts.target_value.unwrap_or(0) + opts.min_absolute_fee;
+    let target_eff = selector.effective_target();
+
+    let upper_bound_abs = target_abs + (opts.drain_weight as f32 * opts.target_feerate) as u64;
+    let upper_bound_eff = target_eff + opts.drain_waste();
+
+    let strategy = move |bnb: &Bnb<i64>| -> (BranchStrategy, Option<i64>) {
+        let selected_abs = bnb.selection.selected_absolute_value();
+        let selected_eff = bnb.selection.selected_effective_value();
+
+        // backtrack if remaining value is not enough to reach target
+        if selected_abs + bnb.rem_abs < target_abs || selected_eff + bnb.rem_eff < target_eff {
+            return (BranchStrategy::SkipBoth, None);
+        }
+
+        // backtrack if selected value already surpassed upper bounds
+        if selected_abs > upper_bound_abs && selected_eff > upper_bound_eff {
+            return (BranchStrategy::SkipBoth, None);
+        }
+
+        let selected_waste = bnb.selection.selected_waste();
+
+        // when feerate decreases, waste without excess is guaranteed to increase with each
+        // selection. So if we have already surpassed best score, we can backtrack.
+        if feerate_decreases && selected_waste > bnb.best_score {
+            return (BranchStrategy::SkipBoth, None);
+        }
+
+        // solution?
+        if selected_abs >= target_abs && selected_eff >= target_eff {
+            let waste = selected_waste + bnb.selection.current_excess();
+            return (BranchStrategy::SkipBoth, Some(waste));
+        }
+
+        // early bailout optimization:
+        // If the candidate at the previous position is NOT selected and has the same weight and
+        // value as the current candidate, we can skip selecting the current candidate.
+        if bnb.pool_pos > 0 && !bnb.selection.is_empty() {
+            let (_, candidate) = bnb.pool[bnb.pool_pos];
+            let (prev_index, prev_candidate) = bnb.pool[bnb.pool_pos - 1];
+
+            if !bnb.selection.is_selected(prev_index)
+                && candidate.value == prev_candidate.value
+                && candidate.weight == prev_candidate.weight
+            {
+                return (BranchStrategy::SkipInclusion, None);
+            }
+        }
+
+        // check out inclusion branch first
+        return (BranchStrategy::Continue, None);
+    };
+
+    // determine sum of absolute and effective values for current selection
+    let (selected_abs, selected_eff) = selector.selected().fold((0, 0), |(abs, eff), (_, c)| {
+        (
+            abs + c.value,
+            eff + c.effective_value(selector.opts.target_feerate),
+        )
+    });
+
+    let bnb = Bnb::new(selector, pool, i64::MAX);
+
+    // not enough to select anyway
+    if selected_abs + bnb.rem_abs < target_abs || selected_eff + bnb.rem_eff < target_eff {
+        return None;
+    }
+
+    match limit.into() {
+        BnbLimit::Rounds(rounds) => {
+            bnb.into_iter(&strategy)
+                .take(rounds)
+                .reduce(|b, c| if c.is_some() { c } else { b })
+        }
+        #[cfg(feature = "std")]
+        BnbLimit::Duration(duration) => {
+            let start = std::time::SystemTime::now();
+            bnb.into_iter(&strategy)
+                .take_while(|_| start.elapsed().expect("failed to get system time") <= duration)
+                .reduce(|b, c| if c.is_some() { c } else { b })
+        }
+    }?
+}
+
+#[cfg(all(test, feature = "miniscript"))]
+mod test {
+    use bitcoin::secp256k1::Secp256k1;
+
+    use crate::coin_select::{evaluate_cs::evaluate, ExcessStrategyKind};
+
+    use super::{
+        coin_select_bnb,
+        evaluate_cs::{Evaluation, EvaluationError},
+        tester::Tester,
+        CoinSelector, CoinSelectorOpt, Vec, WeightedValue,
+    };
+
+    fn tester() -> Tester {
+        const DESC_STR: &str = "tr(xprv9uBuvtdjghkz8D1qzsSXS9Vs64mqrUnXqzNccj2xcvnCHPpXKYE1U2Gbh9CDHk8UPyF2VuXpVkDA7fk5ZP4Hd9KnhUmTscKmhee9Dp5sBMK)";
+        Tester::new(&Secp256k1::default(), DESC_STR)
+    }
+
+    fn evaluate_bnb(
+        initial_selector: CoinSelector,
+        max_tries: usize,
+    ) -> Result<Evaluation, EvaluationError> {
+        evaluate(initial_selector, |cs| {
+            coin_select_bnb(max_tries, cs.clone()).map_or(false, |new_cs| {
+                *cs = new_cs;
+                true
+            })
+        })
+    }
+
+    #[test]
+    fn not_enough_coins() {
+        let t = tester();
+        let candidates: Vec<WeightedValue> = vec![
+            t.gen_candidate(0, 100_000).into(),
+            t.gen_candidate(1, 100_000).into(),
+        ];
+        let opts = t.gen_opts(200_000);
+        let selector = CoinSelector::new(&candidates, &opts);
+        assert!(!coin_select_bnb(10_000, selector).is_some());
+    }
+
+    #[test]
+    fn exactly_enough_coins_preselected() {
+        let t = tester();
+        let candidates: Vec<WeightedValue> = vec![
+            t.gen_candidate(0, 100_000).into(), // to preselect
+            t.gen_candidate(1, 100_000).into(), // to preselect
+            t.gen_candidate(2, 100_000).into(),
+        ];
+        let opts = CoinSelectorOpt {
+            target_feerate: 0.0,
+            ..t.gen_opts(200_000)
+        };
+        let selector = {
+            let mut selector = CoinSelector::new(&candidates, &opts);
+            selector.select(0); // preselect
+            selector.select(1); // preselect
+            selector
+        };
+
+        let evaluation = evaluate_bnb(selector, 10_000).expect("eval failed");
+        println!("{}", evaluation);
+        assert_eq!(evaluation.solution.selected, (0..=1).collect());
+        assert_eq!(evaluation.solution.excess_strategies.len(), 1);
+        assert_eq!(
+            evaluation.feerate_offset(ExcessStrategyKind::ToFee).floor(),
+            0.0
+        );
+    }
+
+    /// `cost_of_change` acts as the upper-bound in Bnb, we check whether these boundaries are
+    /// enforced in code
+    #[test]
+    fn cost_of_change() {
+        let t = tester();
+        let candidates: Vec<WeightedValue> = vec![
+            t.gen_candidate(0, 200_000).into(),
+            t.gen_candidate(1, 200_000).into(),
+            t.gen_candidate(2, 200_000).into(),
+        ];
+
+        // lowest and highest possible `recipient_value` opts for derived `drain_waste`, assuming
+        // that we want 2 candidates selected
+        let (lowest_opts, highest_opts) = {
+            let opts = t.gen_opts(0);
+
+            let fee_from_inputs =
+                (candidates[0].weight as f32 * opts.target_feerate).ceil() as u64 * 2;
+            let fee_from_template =
+                ((opts.base_weight + 2) as f32 * opts.target_feerate).ceil() as u64;
+
+            let lowest_opts = CoinSelectorOpt {
+                target_value: Some(
+                    400_000 - fee_from_inputs - fee_from_template - opts.drain_waste() as u64,
+                ),
+                ..opts
+            };
+
+            let highest_opts = CoinSelectorOpt {
+                target_value: Some(400_000 - fee_from_inputs - fee_from_template),
+                ..opts
+            };
+
+            (lowest_opts, highest_opts)
+        };
+
+        // test lowest possible target we are able to select
+        let lowest_eval = evaluate_bnb(CoinSelector::new(&candidates, &lowest_opts), 10_000);
+        assert!(lowest_eval.is_ok());
+        let lowest_eval = lowest_eval.unwrap();
+        println!("LB {}", lowest_eval);
+        assert_eq!(lowest_eval.solution.selected.len(), 2);
+        assert_eq!(lowest_eval.solution.excess_strategies.len(), 1);
+        assert_eq!(
+            lowest_eval
+                .feerate_offset(ExcessStrategyKind::ToFee)
+                .floor(),
+            0.0
+        );
+
+        // test highest possible target we are able to select
+        let highest_eval = evaluate_bnb(CoinSelector::new(&candidates, &highest_opts), 10_000);
+        assert!(highest_eval.is_ok());
+        let highest_eval = highest_eval.unwrap();
+        println!("UB {}", highest_eval);
+        assert_eq!(highest_eval.solution.selected.len(), 2);
+        assert_eq!(highest_eval.solution.excess_strategies.len(), 1);
+        assert_eq!(
+            highest_eval
+                .feerate_offset(ExcessStrategyKind::ToFee)
+                .floor(),
+            0.0
+        );
+
+        // test lower out of bounds
+        let loob_opts = CoinSelectorOpt {
+            target_value: lowest_opts.target_value.map(|v| v - 1),
+            ..lowest_opts
+        };
+        let loob_eval = evaluate_bnb(CoinSelector::new(&candidates, &loob_opts), 10_000);
+        assert!(loob_eval.is_err());
+        println!("Lower OOB: {}", loob_eval.unwrap_err());
+
+        // test upper out of bounds
+        let uoob_opts = CoinSelectorOpt {
+            target_value: highest_opts.target_value.map(|v| v + 1),
+            ..highest_opts
+        };
+        let uoob_eval = evaluate_bnb(CoinSelector::new(&candidates, &uoob_opts), 10_000);
+        assert!(uoob_eval.is_err());
+        println!("Upper OOB: {}", uoob_eval.unwrap_err());
+    }
+
+    #[test]
+    fn try_select() {
+        let t = tester();
+        let candidates: Vec<WeightedValue> = vec![
+            t.gen_candidate(0, 300_000).into(),
+            t.gen_candidate(1, 300_000).into(),
+            t.gen_candidate(2, 300_000).into(),
+            t.gen_candidate(3, 200_000).into(),
+            t.gen_candidate(4, 200_000).into(),
+        ];
+        let make_opts = |v: u64| -> CoinSelectorOpt {
+            CoinSelectorOpt {
+                target_feerate: 0.0,
+                ..t.gen_opts(v)
+            }
+        };
+
+        let test_cases = vec![
+            (make_opts(100_000), false, 0),
+            (make_opts(200_000), true, 1),
+            (make_opts(300_000), true, 1),
+            (make_opts(500_000), true, 2),
+            (make_opts(1_000_000), true, 4),
+            (make_opts(1_200_000), false, 0),
+            (make_opts(1_300_000), true, 5),
+            (make_opts(1_400_000), false, 0),
+        ];
+
+        for (opts, expect_solution, expect_selected) in test_cases {
+            let res = evaluate_bnb(CoinSelector::new(&candidates, &opts), 10_000);
+            assert_eq!(res.is_ok(), expect_solution);
+
+            match res {
+                Ok(eval) => {
+                    println!("{}", eval);
+                    assert_eq!(eval.feerate_offset(ExcessStrategyKind::ToFee), 0.0);
+                    assert_eq!(eval.solution.selected.len(), expect_selected as _);
+                }
+                Err(err) => println!("expected failure: {}", err),
+            }
+        }
+    }
+
+    #[test]
+    fn early_bailout_optimization() {
+        let t = tester();
+
+        // target: 300_000
+        // candidates: 2x of 125_000, 1000x of 100_000, 1x of 50_000
+        // expected solution: 2x 125_000, 1x 50_000
+        // set bnb max tries: 1100, should succeed
+        let candidates = {
+            let mut candidates: Vec<WeightedValue> = vec![
+                t.gen_candidate(0, 125_000).into(),
+                t.gen_candidate(1, 125_000).into(),
+                t.gen_candidate(2, 50_000).into(),
+            ];
+            (3..3 + 1000_u32)
+                .for_each(|index| candidates.push(t.gen_candidate(index, 100_000).into()));
+            candidates
+        };
+        let opts = CoinSelectorOpt {
+            target_feerate: 0.0,
+            ..t.gen_opts(300_000)
+        };
+
+        let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 1100);
+        assert!(result.is_ok());
+
+        let eval = result.unwrap();
+        println!("{}", eval);
+        assert_eq!(eval.solution.selected, (0..=2).collect());
+    }
+
+    #[test]
+    fn should_exhaust_iteration() {
+        static MAX_TRIES: usize = 1000;
+        let t = tester();
+        let candidates = (0..MAX_TRIES + 1)
+            .map(|index| t.gen_candidate(index as _, 10_000).into())
+            .collect::<Vec<WeightedValue>>();
+        let opts = t.gen_opts(10_001 * MAX_TRIES as u64);
+        let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), MAX_TRIES);
+        assert!(result.is_err());
+        println!("error as expected: {}", result.unwrap_err());
+    }
+
+    /// Solution should have fee >= min_absolute_fee (or no solution at all)
+    #[test]
+    fn min_absolute_fee() {
+        let t = tester();
+        let candidates = {
+            let mut candidates = Vec::new();
+            t.gen_weighted_values(&mut candidates, 5, 10_000);
+            t.gen_weighted_values(&mut candidates, 5, 20_000);
+            t.gen_weighted_values(&mut candidates, 5, 30_000);
+            t.gen_weighted_values(&mut candidates, 10, 10_300);
+            t.gen_weighted_values(&mut candidates, 10, 10_500);
+            t.gen_weighted_values(&mut candidates, 10, 10_700);
+            t.gen_weighted_values(&mut candidates, 10, 10_900);
+            t.gen_weighted_values(&mut candidates, 10, 11_000);
+            t.gen_weighted_values(&mut candidates, 10, 12_000);
+            t.gen_weighted_values(&mut candidates, 10, 13_000);
+            candidates
+        };
+        let mut opts = CoinSelectorOpt {
+            min_absolute_fee: 1,
+            ..t.gen_opts(100_000)
+        };
+
+        (1..=120_u64).for_each(|fee_factor| {
+            opts.min_absolute_fee = fee_factor * 31;
+
+            let result = evaluate_bnb(CoinSelector::new(&candidates, &opts), 21_000);
+            match result {
+                Ok(result) => {
+                    println!("Solution {}", result);
+                    let fee = result.solution.excess_strategies[&ExcessStrategyKind::ToFee].fee;
+                    assert!(fee >= opts.min_absolute_fee);
+                    assert_eq!(result.solution.excess_strategies.len(), 1);
+                }
+                Err(err) => {
+                    println!("No Solution: {}", err);
+                }
+            }
+        });
+    }
+
+    /// For a decreasing feerate (longterm feerate is lower than effective feerate), we should
+    /// select less. For increasing feerate (longterm feerate is higher than effective feerate), we
+    /// should select more.
+    #[test]
+    fn feerate_difference() {
+        let t = tester();
+        let candidates = {
+            let mut candidates = Vec::new();
+            t.gen_weighted_values(&mut candidates, 10, 2_000);
+            t.gen_weighted_values(&mut candidates, 10, 5_000);
+            t.gen_weighted_values(&mut candidates, 10, 20_000);
+            candidates
+        };
+
+        let decreasing_feerate_opts = CoinSelectorOpt {
+            target_feerate: 1.25,
+            long_term_feerate: Some(0.25),
+            ..t.gen_opts(100_000)
+        };
+
+        let increasing_feerate_opts = CoinSelectorOpt {
+            target_feerate: 0.25,
+            long_term_feerate: Some(1.25),
+            ..t.gen_opts(100_000)
+        };
+
+        let decreasing_res = evaluate_bnb(
+            CoinSelector::new(&candidates, &decreasing_feerate_opts),
+            21_000,
+        )
+        .expect("no result");
+        let decreasing_len = decreasing_res.solution.selected.len();
+
+        let increasing_res = evaluate_bnb(
+            CoinSelector::new(&candidates, &increasing_feerate_opts),
+            21_000,
+        )
+        .expect("no result");
+        let increasing_len = increasing_res.solution.selected.len();
+
+        println!("decreasing_len: {}", decreasing_len);
+        println!("increasing_len: {}", increasing_len);
+        assert!(decreasing_len < increasing_len);
+    }
+
+    /// TODO: UNIMPLEMENTED TESTS:
+    /// * Excess strategies:
+    ///     * We should always have `ExcessStrategy::ToFee`.
+    ///     * We should only have `ExcessStrategy::ToRecipient` when `max_extra_target > 0`.
+    ///     * We should only have `ExcessStrategy::ToDrain` when `drain_value >= min_drain_value`.
+    /// * Fuzz
+    ///     * Solution feerate should never be lower than target feerate
+    ///     * Solution fee should never be lower than `min_absolute_fee`
+    ///     * Preselected should always remain selected
+    fn _todo() {}
+}
diff --git a/nursery/coin_select/src/coin_selector.rs b/nursery/coin_select/src/coin_selector.rs
new file mode 100644 (file)
index 0000000..b6bde0d
--- /dev/null
@@ -0,0 +1,617 @@
+use super::*;
+
+/// A [`WeightedValue`] represents an input candidate for [`CoinSelector`]. This can either be a
+/// single UTXO, or a group of UTXOs that should be spent together.
+#[derive(Debug, Clone, Copy)]
+pub struct WeightedValue {
+    /// Total value of the UTXO(s) that this [`WeightedValue`] represents.
+    pub value: u64,
+    /// Total weight of including this/these UTXO(s).
+    /// `txin` fields: `prevout`, `nSequence`, `scriptSigLen`, `scriptSig`, `scriptWitnessLen`,
+    /// `scriptWitness` should all be included.
+    pub weight: u32,
+    /// Total number of inputs; so we can calculate extra `varint` weight due to `vin` len changes.
+    pub input_count: usize,
+    /// Whether this [`WeightedValue`] contains at least one segwit spend.
+    pub is_segwit: bool,
+}
+
+impl WeightedValue {
+    /// Create a new [`WeightedValue`] that represents a single input.
+    ///
+    /// `satisfaction_weight` is the weight of `scriptSigLen + scriptSig + scriptWitnessLen +
+    /// scriptWitness`.
+    pub fn new(value: u64, satisfaction_weight: u32, is_segwit: bool) -> WeightedValue {
+        let weight = TXIN_BASE_WEIGHT + satisfaction_weight;
+        WeightedValue {
+            value,
+            weight,
+            input_count: 1,
+            is_segwit,
+        }
+    }
+
+    /// Effective value of this input candidate: `actual_value - input_weight * feerate (sats/wu)`.
+    pub fn effective_value(&self, effective_feerate: f32) -> i64 {
+        // We prefer undershooting the candidate's effective value (so we over estimate the fee of a
+        // candidate). If we overshoot the candidate's effective value, it may be possible to find a
+        // solution which does not meet the target feerate.
+        self.value as i64 - (self.weight as f32 * effective_feerate).ceil() as i64
+    }
+}
+
+#[derive(Debug, Clone, Copy)]
+pub struct CoinSelectorOpt {
+    /// The value we need to select.
+    /// If the value is `None` then the selection will be complete if it can pay for the drain
+    /// output and satisfy the other constraints (e.g. minimum fees).
+    pub target_value: Option<u64>,
+    /// Additional leeway for the target value.
+    pub max_extra_target: u64, // TODO: Maybe out of scope here?
+
+    /// The feerate we should try and achieve in sats per weight unit.
+    pub target_feerate: f32,
+    /// The feerate
+    pub long_term_feerate: Option<f32>, // TODO: Maybe out of scope? (waste)
+    /// The minimum absolute fee. I.e. needed for RBF.
+    pub min_absolute_fee: u64,
+
+    /// The weight of the template transaction including fixed fields and outputs.
+    pub base_weight: u32,
+    /// Additional weight if we include the drain (change) output.
+    pub drain_weight: u32,
+    /// Weight of spending the drain (change) output in the future.
+    pub spend_drain_weight: u32, // TODO: Maybe out of scope? (waste)
+
+    /// Minimum value allowed for a drain (change) output.
+    pub min_drain_value: u64,
+}
+
+impl CoinSelectorOpt {
+    fn from_weights(base_weight: u32, drain_weight: u32, spend_drain_weight: u32) -> Self {
+        // 0.25 sats/wu == 1 sat/vb
+        let target_feerate = 0.25_f32;
+
+        // set `min_drain_value` to dust limit
+        let min_drain_value =
+            3 * ((drain_weight + spend_drain_weight) as f32 * target_feerate) as u64;
+
+        Self {
+            target_value: None,
+            max_extra_target: 0,
+            target_feerate,
+            long_term_feerate: None,
+            min_absolute_fee: 0,
+            base_weight,
+            drain_weight,
+            spend_drain_weight,
+            min_drain_value,
+        }
+    }
+
+    pub fn fund_outputs(
+        txouts: &[TxOut],
+        drain_output: &TxOut,
+        drain_satisfaction_weight: u32,
+    ) -> Self {
+        let mut tx = Transaction {
+            input: vec![],
+            version: 1,
+            lock_time: LockTime::ZERO.into(),
+            output: txouts.to_vec(),
+        };
+        let base_weight = tx.weight();
+        // this awkward calculation is necessary since TxOut doesn't have \.weight()
+        let drain_weight = {
+            tx.output.push(drain_output.clone());
+            tx.weight() - base_weight
+        };
+        Self {
+            target_value: if txouts.is_empty() {
+                None
+            } else {
+                Some(txouts.iter().map(|txout| txout.value).sum())
+            },
+            ..Self::from_weights(
+                base_weight as u32,
+                drain_weight as u32,
+                TXIN_BASE_WEIGHT + drain_satisfaction_weight,
+            )
+        }
+    }
+
+    pub fn long_term_feerate(&self) -> f32 {
+        self.long_term_feerate.unwrap_or(self.target_feerate)
+    }
+
+    pub fn drain_waste(&self) -> i64 {
+        (self.drain_weight as f32 * self.target_feerate
+            + self.spend_drain_weight as f32 * self.long_term_feerate()) as i64
+    }
+}
+
+/// [`CoinSelector`] is responsible for selecting and deselecting from a set of canididates.
+#[derive(Debug, Clone)]
+pub struct CoinSelector<'a> {
+    pub opts: &'a CoinSelectorOpt,
+    pub candidates: &'a Vec<WeightedValue>,
+    selected: BTreeSet<usize>,
+}
+
+impl<'a> CoinSelector<'a> {
+    pub fn candidate(&self, index: usize) -> &WeightedValue {
+        &self.candidates[index]
+    }
+
+    pub fn new(candidates: &'a Vec<WeightedValue>, opts: &'a CoinSelectorOpt) -> Self {
+        Self {
+            candidates,
+            selected: Default::default(),
+            opts,
+        }
+    }
+
+    pub fn select(&mut self, index: usize) -> bool {
+        assert!(index < self.candidates.len());
+        self.selected.insert(index)
+    }
+
+    pub fn deselect(&mut self, index: usize) -> bool {
+        self.selected.remove(&index)
+    }
+
+    pub fn is_selected(&self, index: usize) -> bool {
+        self.selected.contains(&index)
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.selected.is_empty()
+    }
+
+    /// Weight sum of all selected inputs.
+    pub fn selected_weight(&self) -> u32 {
+        self.selected
+            .iter()
+            .map(|&index| self.candidates[index].weight)
+            .sum()
+    }
+
+    /// Effective value sum of all selected inputs.
+    pub fn selected_effective_value(&self) -> i64 {
+        self.selected
+            .iter()
+            .map(|&index| self.candidates[index].effective_value(self.opts.target_feerate))
+            .sum()
+    }
+
+    /// Absolute value sum of all selected inputs.
+    pub fn selected_absolute_value(&self) -> u64 {
+        self.selected
+            .iter()
+            .map(|&index| self.candidates[index].value)
+            .sum()
+    }
+
+    /// Waste sum of all selected inputs.
+    pub fn selected_waste(&self) -> i64 {
+        (self.selected_weight() as f32 * (self.opts.target_feerate - self.opts.long_term_feerate()))
+            as i64
+    }
+
+    /// Current weight of template tx + selected inputs.
+    pub fn current_weight(&self) -> u32 {
+        let witness_header_extra_weight = self
+            .selected()
+            .find(|(_, wv)| wv.is_segwit)
+            .map(|_| 2)
+            .unwrap_or(0);
+        let vin_count_varint_extra_weight = {
+            let input_count = self.selected().map(|(_, wv)| wv.input_count).sum::<usize>();
+            (varint_size(input_count) - 1) * 4
+        };
+        self.opts.base_weight
+            + self.selected_weight()
+            + witness_header_extra_weight
+            + vin_count_varint_extra_weight
+    }
+
+    /// Current excess.
+    pub fn current_excess(&self) -> i64 {
+        self.selected_effective_value() - self.effective_target()
+    }
+
+    /// This is the effective target value.
+    pub fn effective_target(&self) -> i64 {
+        let (has_segwit, max_input_count) = self
+            .candidates
+            .iter()
+            .fold((false, 0_usize), |(is_segwit, input_count), c| {
+                (is_segwit || c.is_segwit, input_count + c.input_count)
+            });
+
+        let effective_base_weight = self.opts.base_weight
+            + if has_segwit { 2_u32 } else { 0_u32 }
+            + (varint_size(max_input_count) - 1) * 4;
+
+        self.opts.target_value.unwrap_or(0) as i64
+            + (effective_base_weight as f32 * self.opts.target_feerate).ceil() as i64
+    }
+
+    pub fn selected_count(&self) -> usize {
+        self.selected.len()
+    }
+
+    pub fn selected(&self) -> impl Iterator<Item = (usize, &'a WeightedValue)> + '_ {
+        self.selected
+            .iter()
+            .map(move |&index| (index, &self.candidates[index]))
+    }
+
+    pub fn unselected(&self) -> impl Iterator<Item = (usize, &'a WeightedValue)> + '_ {
+        self.candidates
+            .iter()
+            .enumerate()
+            .filter(move |(index, _)| !self.selected.contains(index))
+    }
+
+    pub fn selected_indexes(&self) -> impl Iterator<Item = usize> + '_ {
+        self.selected.iter().cloned()
+    }
+
+    pub fn unselected_indexes(&self) -> impl Iterator<Item = usize> + '_ {
+        (0..self.candidates.len()).filter(move |index| !self.selected.contains(index))
+    }
+
+    pub fn all_selected(&self) -> bool {
+        self.selected.len() == self.candidates.len()
+    }
+
+    pub fn select_all(&mut self) {
+        self.selected = (0..self.candidates.len()).collect();
+    }
+
+    pub fn select_until_finished(&mut self) -> Result<Selection, SelectionError> {
+        let mut selection = self.finish();
+
+        if selection.is_ok() {
+            return selection;
+        }
+
+        let unselected = self.unselected_indexes().collect::<Vec<_>>();
+
+        for index in unselected {
+            self.select(index);
+            selection = self.finish();
+
+            if selection.is_ok() {
+                break;
+            }
+        }
+
+        selection
+    }
+
+    pub fn finish(&self) -> Result<Selection, SelectionError> {
+        let weight_without_drain = self.current_weight();
+        let weight_with_drain = weight_without_drain + self.opts.drain_weight;
+
+        let fee_without_drain =
+            (weight_without_drain as f32 * self.opts.target_feerate).ceil() as u64;
+        let fee_with_drain = (weight_with_drain as f32 * self.opts.target_feerate).ceil() as u64;
+
+        let inputs_minus_outputs = {
+            let target_value = self.opts.target_value.unwrap_or(0);
+            let selected = self.selected_absolute_value();
+
+            // find the largest unsatisfied constraint (if any), and return error of that constraint
+            // "selected" should always be greater than or equal to these selected values
+            [
+                (
+                    SelectionConstraint::TargetValue,
+                    target_value.saturating_sub(selected),
+                ),
+                (
+                    SelectionConstraint::TargetFee,
+                    (target_value + fee_without_drain).saturating_sub(selected),
+                ),
+                (
+                    SelectionConstraint::MinAbsoluteFee,
+                    (target_value + self.opts.min_absolute_fee).saturating_sub(selected),
+                ),
+                (
+                    SelectionConstraint::MinDrainValue,
+                    // when we have no target value (hence no recipient txouts), we need to ensure
+                    // the selected amount can satisfy requirements for a drain output (so we at
+                    // least have one txout)
+                    if self.opts.target_value.is_none() {
+                        (fee_with_drain + self.opts.min_drain_value).saturating_sub(selected)
+                    } else {
+                        0
+                    },
+                ),
+            ]
+            .iter()
+            .filter(|&(_, v)| v > &0)
+            .max_by_key(|&(_, v)| v)
+            .map_or(Ok(()), |(constraint, missing)| {
+                Err(SelectionError {
+                    selected,
+                    missing: *missing,
+                    constraint: *constraint,
+                })
+            })?;
+
+            (selected - target_value) as u64
+        };
+
+        let fee_without_drain = fee_without_drain.max(self.opts.min_absolute_fee);
+        let fee_with_drain = fee_with_drain.max(self.opts.min_absolute_fee);
+
+        let excess_without_drain = inputs_minus_outputs - fee_without_drain;
+        let input_waste = self.selected_waste();
+
+        // begin preparing excess strategies for final selection
+        let mut excess_strategies = HashMap::new();
+
+        // only allow `ToFee` and `ToRecipient` excess strategies when we have a `target_value`,
+        // otherwise we will result in a result with no txouts, or attempt to add value to an output
+        // that does not exist
+        if self.opts.target_value.is_some() {
+            // no drain, excess to fee
+            excess_strategies.insert(
+                ExcessStrategyKind::ToFee,
+                ExcessStrategy {
+                    recipient_value: self.opts.target_value,
+                    drain_value: None,
+                    fee: fee_without_drain + excess_without_drain,
+                    weight: weight_without_drain,
+                    waste: input_waste + excess_without_drain as i64,
+                },
+            );
+
+            // no drain, excess to recipient
+            // if `excess == 0`, this result will be the same as the previous, so don't consider it
+            // if `max_extra_target == 0`, there is no leeway for this strategy
+            if excess_without_drain > 0 && self.opts.max_extra_target > 0 {
+                let extra_recipient_value =
+                    core::cmp::min(self.opts.max_extra_target, excess_without_drain);
+                let extra_fee = excess_without_drain - extra_recipient_value;
+                excess_strategies.insert(
+                    ExcessStrategyKind::ToRecipient,
+                    ExcessStrategy {
+                        recipient_value: self.opts.target_value.map(|v| v + extra_recipient_value),
+                        drain_value: None,
+                        fee: fee_without_drain + extra_fee,
+                        weight: weight_without_drain,
+                        waste: input_waste + extra_fee as i64,
+                    },
+                );
+            }
+        }
+
+        // with drain
+        if fee_with_drain >= self.opts.min_absolute_fee
+            && inputs_minus_outputs >= fee_with_drain + self.opts.min_drain_value
+        {
+            excess_strategies.insert(
+                ExcessStrategyKind::ToDrain,
+                ExcessStrategy {
+                    recipient_value: self.opts.target_value,
+                    drain_value: Some(inputs_minus_outputs.saturating_sub(fee_with_drain)),
+                    fee: fee_with_drain,
+                    weight: weight_with_drain,
+                    waste: input_waste + self.opts.drain_waste(),
+                },
+            );
+        }
+
+        debug_assert!(
+            !excess_strategies.is_empty(),
+            "should have at least one excess strategy"
+        );
+
+        Ok(Selection {
+            selected: self.selected.clone(),
+            excess: excess_without_drain,
+            excess_strategies,
+        })
+    }
+}
+
+#[derive(Clone, Debug)]
+pub struct SelectionError {
+    selected: u64,
+    missing: u64,
+    constraint: SelectionConstraint,
+}
+
+impl core::fmt::Display for SelectionError {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            SelectionError {
+                selected,
+                missing,
+                constraint,
+            } => write!(
+                f,
+                "insufficient coins selected; selected={}, missing={}, unsatisfied_constraint={:?}",
+                selected, missing, constraint
+            ),
+        }
+    }
+}
+
+#[cfg(feature = "std")]
+impl std::error::Error for SelectionError {}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum SelectionConstraint {
+    /// The target is not met
+    TargetValue,
+    /// The target fee (given the feerate) is not met
+    TargetFee,
+    /// Min absolute fee is not met
+    MinAbsoluteFee,
+    /// Min drain value is not met
+    MinDrainValue,
+}
+
+impl core::fmt::Display for SelectionConstraint {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            SelectionConstraint::TargetValue => core::write!(f, "target_value"),
+            SelectionConstraint::TargetFee => core::write!(f, "target_fee"),
+            SelectionConstraint::MinAbsoluteFee => core::write!(f, "min_absolute_fee"),
+            SelectionConstraint::MinDrainValue => core::write!(f, "min_drain_value"),
+        }
+    }
+}
+
+#[derive(Clone, Debug)]
+pub struct Selection {
+    pub selected: BTreeSet<usize>,
+    pub excess: u64,
+    pub excess_strategies: HashMap<ExcessStrategyKind, ExcessStrategy>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, core::hash::Hash)]
+pub enum ExcessStrategyKind {
+    ToFee,
+    ToRecipient,
+    ToDrain,
+}
+
+#[derive(Clone, Copy, Debug)]
+pub struct ExcessStrategy {
+    pub recipient_value: Option<u64>,
+    pub drain_value: Option<u64>,
+    pub fee: u64,
+    pub weight: u32,
+    pub waste: i64,
+}
+
+impl Selection {
+    pub fn apply_selection<'a, T>(
+        &'a self,
+        candidates: &'a [T],
+    ) -> impl Iterator<Item = &'a T> + 'a {
+        self.selected.iter().map(move |i| &candidates[*i])
+    }
+
+    /// Returns the [`ExcessStrategy`] that results in the least waste.
+    pub fn best_strategy(&self) -> (&ExcessStrategyKind, &ExcessStrategy) {
+        self.excess_strategies
+            .iter()
+            .min_by_key(|&(_, a)| a.waste)
+            .expect("selection has no excess strategy")
+    }
+}
+
+impl core::fmt::Display for ExcessStrategyKind {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            ExcessStrategyKind::ToFee => core::write!(f, "to_fee"),
+            ExcessStrategyKind::ToRecipient => core::write!(f, "to_recipient"),
+            ExcessStrategyKind::ToDrain => core::write!(f, "to_drain"),
+        }
+    }
+}
+
+impl ExcessStrategy {
+    /// Returns feerate in sats/wu.
+    pub fn feerate(&self) -> f32 {
+        self.fee as f32 / self.weight as f32
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use crate::{ExcessStrategyKind, SelectionConstraint};
+
+    use super::{CoinSelector, CoinSelectorOpt, WeightedValue};
+
+    /// Ensure `target_value` is respected. Can't have no disrespect.
+    #[test]
+    fn target_value_respected() {
+        let target_value = 1000_u64;
+
+        let candidates = (500..1500_u64)
+            .map(|value| WeightedValue {
+                value,
+                weight: 100,
+                input_count: 1,
+                is_segwit: false,
+            })
+            .collect::<super::Vec<_>>();
+
+        let opts = CoinSelectorOpt {
+            target_value: Some(target_value),
+            max_extra_target: 0,
+            target_feerate: 0.00,
+            long_term_feerate: None,
+            min_absolute_fee: 0,
+            base_weight: 10,
+            drain_weight: 10,
+            spend_drain_weight: 10,
+            min_drain_value: 10,
+        };
+
+        for (index, v) in candidates.iter().enumerate() {
+            let mut selector = CoinSelector::new(&candidates, &opts);
+            assert!(selector.select(index));
+
+            let res = selector.finish();
+            if v.value < opts.target_value.unwrap_or(0) {
+                let err = res.expect_err("should have failed");
+                assert_eq!(err.selected, v.value);
+                assert_eq!(err.missing, target_value - v.value);
+                assert_eq!(err.constraint, SelectionConstraint::MinAbsoluteFee);
+            } else {
+                let sel = res.expect("should have succeeded");
+                assert_eq!(sel.excess, v.value - opts.target_value.unwrap_or(0));
+            }
+        }
+    }
+
+    #[test]
+    fn drain_all() {
+        let candidates = (0..100)
+            .map(|_| WeightedValue {
+                value: 666,
+                weight: 166,
+                input_count: 1,
+                is_segwit: false,
+            })
+            .collect::<super::Vec<_>>();
+
+        let opts = CoinSelectorOpt {
+            target_value: None,
+            max_extra_target: 0,
+            target_feerate: 0.25,
+            long_term_feerate: None,
+            min_absolute_fee: 0,
+            base_weight: 10,
+            drain_weight: 100,
+            spend_drain_weight: 66,
+            min_drain_value: 1000,
+        };
+
+        let selection = CoinSelector::new(&candidates, &opts)
+            .select_until_finished()
+            .expect("should succeed");
+
+        assert!(selection.selected.len() > 1);
+        assert_eq!(selection.excess_strategies.len(), 1);
+
+        let (kind, strategy) = selection.best_strategy();
+        assert_eq!(*kind, ExcessStrategyKind::ToDrain);
+        assert!(strategy.recipient_value.is_none());
+        assert!(strategy.drain_value.is_some());
+    }
+
+    /// TODO: Tests to add:
+    /// * `finish` should ensure at least `target_value` is selected.
+    /// * actual feerate should be equal or higher than `target_feerate`.
+    /// * actual drain value should be equal or higher than `min_drain_value` (or else no drain).
+    fn _todo() {}
+}
diff --git a/nursery/coin_select/src/lib.rs b/nursery/coin_select/src/lib.rs
new file mode 100644 (file)
index 0000000..ff4d453
--- /dev/null
@@ -0,0 +1,33 @@
+#![no_std]
+
+#[cfg(feature = "std")]
+extern crate std;
+
+#[macro_use]
+extern crate alloc;
+extern crate bdk_chain;
+
+use alloc::vec::Vec;
+use bdk_chain::{
+    bitcoin,
+    collections::{BTreeSet, HashMap},
+};
+use bitcoin::{LockTime, Transaction, TxOut};
+use core::fmt::{Debug, Display};
+
+mod coin_selector;
+pub use coin_selector::*;
+
+mod bnb;
+pub use bnb::*;
+
+/// Txin "base" fields include `outpoint` (32+4) and `nSequence` (4). This does not include
+/// `scriptSigLen` or `scriptSig`.
+pub const TXIN_BASE_WEIGHT: u32 = (32 + 4 + 4) * 4;
+
+/// Helper to calculate varint size. `v` is the value the varint represents.
+// Shamelessly copied from
+// https://github.com/rust-bitcoin/rust-miniscript/blob/d5615acda1a7fdc4041a11c1736af139b8c7ebe8/src/util.rs#L8
+pub(crate) fn varint_size(v: usize) -> u32 {
+    bitcoin::VarInt(v as u64).len() as u32
+}
diff --git a/nursery/tmp_plan/Cargo.toml b/nursery/tmp_plan/Cargo.toml
new file mode 100644 (file)
index 0000000..67eb57e
--- /dev/null
@@ -0,0 +1,13 @@
+[package]
+name = "bdk_tmp_plan"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+bdk_chain = {  version = "0.3", features = ["miniscript"], path = "../../crates/chain" }
+
+[features]
+default = ["std"]
+std = []
diff --git a/nursery/tmp_plan/README.md b/nursery/tmp_plan/README.md
new file mode 100644 (file)
index 0000000..70cc100
--- /dev/null
@@ -0,0 +1,3 @@
+# Temporary planning module
+
+A temporary place to hold the planning module until https://github.com/rust-bitcoin/rust-miniscript/pull/481 is merged and released
diff --git a/nursery/tmp_plan/bdk_tmp_plan/Cargo.toml b/nursery/tmp_plan/bdk_tmp_plan/Cargo.toml
new file mode 100644 (file)
index 0000000..ecbfaad
--- /dev/null
@@ -0,0 +1,13 @@
+[package]
+name = "bdk_tmp_plan"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+bdk_chain = {  version = "0.3", features = ["miniscript"] }
+
+[features]
+default = ["std"]
+std = []
diff --git a/nursery/tmp_plan/bdk_tmp_plan/README.md b/nursery/tmp_plan/bdk_tmp_plan/README.md
new file mode 100644 (file)
index 0000000..70cc100
--- /dev/null
@@ -0,0 +1,3 @@
+# Temporary planning module
+
+A temporary place to hold the planning module until https://github.com/rust-bitcoin/rust-miniscript/pull/481 is merged and released
diff --git a/nursery/tmp_plan/bdk_tmp_plan/src/lib.rs b/nursery/tmp_plan/bdk_tmp_plan/src/lib.rs
new file mode 100644 (file)
index 0000000..a64d449
--- /dev/null
@@ -0,0 +1,436 @@
+#![allow(unused)]
+#![allow(missing_docs)]
+//! A spending plan or *plan* for short is a representation of a particular spending path on a
+//! descriptor. This allows us to analayze a choice of spending path without producing any
+//! signatures or other witness data for it.
+//!
+//! To make a plan you provide the descriptor with "assets" like which keys you are able to use, hash
+//! pre-images you have access to, the current block height etc.
+//!
+//! Once you've got a plan it can tell you its expected satisfaction weight which can be useful for
+//! doing coin selection. Furthermore it provides which subset of those keys and hash pre-images you
+//! will actually need as well as what locktime or sequence number you need to set.
+//!
+//! Once you've obstained signatures, hash pre-images etc required by the plan, it can create a
+//! witness/script_sig for the input.
+use bdk_chain::{bitcoin, collections::*, miniscript};
+use bitcoin::{
+    blockdata::{locktime::LockTime, transaction::Sequence},
+    hashes::{hash160, ripemd160, sha256},
+    secp256k1::Secp256k1,
+    util::{
+        address::WitnessVersion,
+        bip32::{DerivationPath, Fingerprint, KeySource},
+        taproot::{LeafVersion, TapBranchHash, TapLeafHash},
+    },
+    EcdsaSig, SchnorrSig, Script, TxIn, Witness,
+};
+use miniscript::{
+    descriptor::{InnerXKey, Tr},
+    hash256, DefiniteDescriptorKey, Descriptor, DescriptorPublicKey, ScriptContext, ToPublicKey,
+};
+
+pub(crate) fn varint_len(v: usize) -> usize {
+    bitcoin::VarInt(v as u64).len() as usize
+}
+
+mod plan_impls;
+mod requirements;
+mod template;
+pub use requirements::*;
+pub use template::PlanKey;
+use template::TemplateItem;
+
+#[derive(Clone, Debug)]
+enum TrSpend {
+    KeySpend,
+    LeafSpend {
+        script: Script,
+        leaf_version: LeafVersion,
+    },
+}
+
+#[derive(Clone, Debug)]
+enum Target {
+    Legacy,
+    Segwitv0 {
+        script_code: Script,
+    },
+    Segwitv1 {
+        tr: Tr<DefiniteDescriptorKey>,
+        tr_plan: TrSpend,
+    },
+}
+
+impl Target {}
+
+#[derive(Clone, Debug)]
+/// A plan represents a particular spending path for a descriptor.
+///
+/// See the module level documentation for more info.
+pub struct Plan<AK> {
+    template: Vec<TemplateItem<AK>>,
+    target: Target,
+    set_locktime: Option<LockTime>,
+    set_sequence: Option<Sequence>,
+}
+
+impl Default for Target {
+    fn default() -> Self {
+        Target::Legacy
+    }
+}
+
+#[derive(Clone, Debug, Default)]
+/// Signatures and hash pre-images that can be used to complete a plan.
+pub struct SatisfactionMaterial {
+    /// Schnorr signautres under their keys
+    pub schnorr_sigs: BTreeMap<DefiniteDescriptorKey, SchnorrSig>,
+    /// ECDSA signatures under their keys
+    pub ecdsa_sigs: BTreeMap<DefiniteDescriptorKey, EcdsaSig>,
+    /// SHA256 pre-images under their images
+    pub sha256_preimages: BTreeMap<sha256::Hash, Vec<u8>>,
+    /// hash160 pre-images under their images
+    pub hash160_preimages: BTreeMap<hash160::Hash, Vec<u8>>,
+    /// hash256 pre-images under their images
+    pub hash256_preimages: BTreeMap<hash256::Hash, Vec<u8>>,
+    /// ripemd160 pre-images under their images
+    pub ripemd160_preimages: BTreeMap<ripemd160::Hash, Vec<u8>>,
+}
+
+impl<Ak> Plan<Ak>
+where
+    Ak: Clone,
+{
+    /// The expected satisfaction weight for the plan if it is completed.
+    pub fn expected_weight(&self) -> usize {
+        let script_sig_size = match self.target {
+            Target::Legacy => unimplemented!(), // self
+            // .template
+            // .iter()
+            // .map(|step| {
+            //     let size = step.expected_size();
+            //     size + push_opcode_size(size)
+            // })
+            // .sum()
+            Target::Segwitv0 { .. } | Target::Segwitv1 { .. } => 1,
+        };
+        let witness_elem_sizes: Option<Vec<usize>> = match &self.target {
+            Target::Legacy => None,
+            Target::Segwitv0 { .. } => Some(
+                self.template
+                    .iter()
+                    .map(|step| step.expected_size())
+                    .collect(),
+            ),
+            Target::Segwitv1 { tr, tr_plan } => {
+                let mut witness_elems = self
+                    .template
+                    .iter()
+                    .map(|step| step.expected_size())
+                    .collect::<Vec<_>>();
+
+                if let TrSpend::LeafSpend {
+                    script,
+                    leaf_version,
+                } = tr_plan
+                {
+                    let control_block = tr
+                        .spend_info()
+                        .control_block(&(script.clone(), *leaf_version))
+                        .expect("must exist");
+                    witness_elems.push(script.len());
+                    witness_elems.push(control_block.size());
+                }
+
+                Some(witness_elems)
+            }
+        };
+
+        let witness_size: usize = match witness_elem_sizes {
+            Some(elems) => {
+                varint_len(elems.len())
+                    + elems
+                        .into_iter()
+                        .map(|elem| varint_len(elem) + elem)
+                        .sum::<usize>()
+            }
+            None => 0,
+        };
+
+        script_sig_size * 4 + witness_size
+    }
+
+    pub fn requirements(&self) -> Requirements<Ak> {
+        match self.try_complete(&SatisfactionMaterial::default()) {
+            PlanState::Complete { .. } => Requirements::default(),
+            PlanState::Incomplete(requirements) => requirements,
+        }
+    }
+
+    pub fn try_complete(&self, auth_data: &SatisfactionMaterial) -> PlanState<Ak> {
+        let unsatisfied_items = self
+            .template
+            .iter()
+            .filter(|step| match step {
+                TemplateItem::Sign(key) => {
+                    !auth_data.schnorr_sigs.contains_key(&key.descriptor_key)
+                }
+                TemplateItem::Hash160(image) => !auth_data.hash160_preimages.contains_key(image),
+                TemplateItem::Hash256(image) => !auth_data.hash256_preimages.contains_key(image),
+                TemplateItem::Sha256(image) => !auth_data.sha256_preimages.contains_key(image),
+                TemplateItem::Ripemd160(image) => {
+                    !auth_data.ripemd160_preimages.contains_key(image)
+                }
+                TemplateItem::Pk { .. } | TemplateItem::One | TemplateItem::Zero => false,
+            })
+            .collect::<Vec<_>>();
+
+        if unsatisfied_items.is_empty() {
+            let mut witness = self
+                .template
+                .iter()
+                .flat_map(|step| step.to_witness_stack(&auth_data))
+                .collect::<Vec<_>>();
+            match &self.target {
+                Target::Segwitv0 { .. } => todo!(),
+                Target::Legacy => todo!(),
+                Target::Segwitv1 {
+                    tr_plan: TrSpend::KeySpend,
+                    ..
+                } => PlanState::Complete {
+                    final_script_sig: None,
+                    final_script_witness: Some(Witness::from_vec(witness)),
+                },
+                Target::Segwitv1 {
+                    tr,
+                    tr_plan:
+                        TrSpend::LeafSpend {
+                            script,
+                            leaf_version,
+                        },
+                } => {
+                    let spend_info = tr.spend_info();
+                    let control_block = spend_info
+                        .control_block(&(script.clone(), *leaf_version))
+                        .expect("must exist");
+                    witness.push(script.clone().into_bytes());
+                    witness.push(control_block.serialize());
+
+                    PlanState::Complete {
+                        final_script_sig: None,
+                        final_script_witness: Some(Witness::from_vec(witness)),
+                    }
+                }
+            }
+        } else {
+            let mut requirements = Requirements::default();
+
+            match &self.target {
+                Target::Legacy => {
+                    todo!()
+                }
+                Target::Segwitv0 { .. } => {
+                    todo!()
+                }
+                Target::Segwitv1 { tr, tr_plan } => {
+                    let spend_info = tr.spend_info();
+                    match tr_plan {
+                        TrSpend::KeySpend => match &self.template[..] {
+                            [TemplateItem::Sign(ref plan_key)] => {
+                                requirements.signatures = RequiredSignatures::TapKey {
+                                    merkle_root: spend_info.merkle_root(),
+                                    plan_key: plan_key.clone(),
+                                };
+                            }
+                            _ => unreachable!("tapkey spend will always have only one sign step"),
+                        },
+                        TrSpend::LeafSpend {
+                            script,
+                            leaf_version,
+                        } => {
+                            let leaf_hash = TapLeafHash::from_script(&script, *leaf_version);
+                            requirements.signatures = RequiredSignatures::TapScript {
+                                leaf_hash,
+                                plan_keys: vec![],
+                            }
+                        }
+                    }
+                }
+            }
+
+            let required_signatures = match requirements.signatures {
+                RequiredSignatures::Legacy { .. } => todo!(),
+                RequiredSignatures::Segwitv0 { .. } => todo!(),
+                RequiredSignatures::TapKey { .. } => return PlanState::Incomplete(requirements),
+                RequiredSignatures::TapScript {
+                    plan_keys: ref mut keys,
+                    ..
+                } => keys,
+            };
+
+            for step in unsatisfied_items {
+                match step {
+                    TemplateItem::Sign(plan_key) => {
+                        required_signatures.push(plan_key.clone());
+                    }
+                    TemplateItem::Hash160(image) => {
+                        requirements.hash160_images.insert(image.clone());
+                    }
+                    TemplateItem::Hash256(image) => {
+                        requirements.hash256_images.insert(image.clone());
+                    }
+                    TemplateItem::Sha256(image) => {
+                        requirements.sha256_images.insert(image.clone());
+                    }
+                    TemplateItem::Ripemd160(image) => {
+                        requirements.ripemd160_images.insert(image.clone());
+                    }
+                    TemplateItem::Pk { .. } | TemplateItem::One | TemplateItem::Zero => { /* no requirements */
+                    }
+                }
+            }
+
+            PlanState::Incomplete(requirements)
+        }
+    }
+
+    /// Witness version for the plan
+    pub fn witness_version(&self) -> Option<WitnessVersion> {
+        match self.target {
+            Target::Legacy => None,
+            Target::Segwitv0 { .. } => Some(WitnessVersion::V0),
+            Target::Segwitv1 { .. } => Some(WitnessVersion::V1),
+        }
+    }
+
+    /// The minimum required locktime height or time on the transaction using the plan.
+    pub fn required_locktime(&self) -> Option<LockTime> {
+        self.set_locktime.clone()
+    }
+
+    /// The minimum required sequence (height or time) on the input to satisfy the plan
+    pub fn required_sequence(&self) -> Option<Sequence> {
+        self.set_sequence.clone()
+    }
+
+    /// The minmum required transaction version required on the transaction using the plan.
+    pub fn min_version(&self) -> Option<u32> {
+        if let Some(_) = self.set_sequence {
+            Some(2)
+        } else {
+            Some(1)
+        }
+    }
+}
+
+/// The returned value from [`Plan::try_complete`].
+pub enum PlanState<Ak> {
+    /// The plan is complete
+    Complete {
+        /// The script sig that should be set on the input
+        final_script_sig: Option<Script>,
+        /// The witness that should be set on the input
+        final_script_witness: Option<Witness>,
+    },
+    Incomplete(Requirements<Ak>),
+}
+
+#[derive(Clone, Debug)]
+pub struct Assets<K> {
+    pub keys: Vec<K>,
+    pub txo_age: Option<Sequence>,
+    pub max_locktime: Option<LockTime>,
+    pub sha256: Vec<sha256::Hash>,
+    pub hash256: Vec<hash256::Hash>,
+    pub ripemd160: Vec<ripemd160::Hash>,
+    pub hash160: Vec<hash160::Hash>,
+}
+
+impl<K> Default for Assets<K> {
+    fn default() -> Self {
+        Self {
+            keys: Default::default(),
+            txo_age: Default::default(),
+            max_locktime: Default::default(),
+            sha256: Default::default(),
+            hash256: Default::default(),
+            ripemd160: Default::default(),
+            hash160: Default::default(),
+        }
+    }
+}
+
+pub trait CanDerive {
+    fn can_derive(&self, key: &DefiniteDescriptorKey) -> Option<DerivationPath>;
+}
+
+impl CanDerive for KeySource {
+    fn can_derive(&self, key: &DefiniteDescriptorKey) -> Option<DerivationPath> {
+        match DescriptorPublicKey::from(key.clone()) {
+            DescriptorPublicKey::Single(single_pub) => {
+                path_to_child(self, single_pub.origin.as_ref()?, None)
+            }
+            DescriptorPublicKey::XPub(dxk) => {
+                let origin = dxk.origin.clone().unwrap_or_else(|| {
+                    let secp = Secp256k1::signing_only();
+                    (dxk.xkey.xkey_fingerprint(&secp), DerivationPath::master())
+                });
+
+                path_to_child(self, &origin, Some(&dxk.derivation_path))
+            }
+        }
+    }
+}
+
+impl CanDerive for DescriptorPublicKey {
+    fn can_derive(&self, key: &DefiniteDescriptorKey) -> Option<DerivationPath> {
+        match (self, DescriptorPublicKey::from(key.clone())) {
+            (parent, child) if parent == &child => Some(DerivationPath::master()),
+            (DescriptorPublicKey::XPub(parent), _) => {
+                let origin = parent.origin.clone().unwrap_or_else(|| {
+                    let secp = Secp256k1::signing_only();
+                    (
+                        parent.xkey.xkey_fingerprint(&secp),
+                        DerivationPath::master(),
+                    )
+                });
+                KeySource::from(origin).can_derive(key)
+            }
+            _ => None,
+        }
+    }
+}
+
+fn path_to_child(
+    parent: &KeySource,
+    child_origin: &(Fingerprint, DerivationPath),
+    child_derivation: Option<&DerivationPath>,
+) -> Option<DerivationPath> {
+    if parent.0 == child_origin.0 {
+        let mut remaining_derivation =
+            DerivationPath::from(child_origin.1[..].strip_prefix(&parent.1[..])?);
+        remaining_derivation =
+            remaining_derivation.extend(child_derivation.unwrap_or(&DerivationPath::master()));
+        Some(remaining_derivation)
+    } else {
+        None
+    }
+}
+
+pub fn plan_satisfaction<Ak>(
+    desc: &Descriptor<DefiniteDescriptorKey>,
+    assets: &Assets<Ak>,
+) -> Option<Plan<Ak>>
+where
+    Ak: CanDerive + Clone,
+{
+    match desc {
+        Descriptor::Bare(_) => todo!(),
+        Descriptor::Pkh(_) => todo!(),
+        Descriptor::Wpkh(_) => todo!(),
+        Descriptor::Sh(_) => todo!(),
+        Descriptor::Wsh(_) => todo!(),
+        Descriptor::Tr(tr) => crate::plan_impls::plan_satisfaction_tr(tr, assets),
+    }
+}
diff --git a/nursery/tmp_plan/bdk_tmp_plan/src/plan_impls.rs b/nursery/tmp_plan/bdk_tmp_plan/src/plan_impls.rs
new file mode 100644 (file)
index 0000000..79f5cf6
--- /dev/null
@@ -0,0 +1,323 @@
+use bdk_chain::{bitcoin, miniscript};
+use bitcoin::locktime::{Height, Time};
+use miniscript::Terminal;
+
+use super::*;
+
+impl<Ak> TermPlan<Ak> {
+    fn combine(self, other: Self) -> Option<Self> {
+        let min_locktime = {
+            match (self.min_locktime, other.min_locktime) {
+                (Some(lhs), Some(rhs)) => {
+                    if lhs.is_same_unit(rhs) {
+                        Some(if lhs.to_consensus_u32() > rhs.to_consensus_u32() {
+                            lhs
+                        } else {
+                            rhs
+                        })
+                    } else {
+                        return None;
+                    }
+                }
+                _ => self.min_locktime.or(other.min_locktime),
+            }
+        };
+
+        let min_sequence = {
+            match (self.min_sequence, other.min_sequence) {
+                (Some(lhs), Some(rhs)) => {
+                    if lhs.is_height_locked() == rhs.is_height_locked() {
+                        Some(if lhs.to_consensus_u32() > rhs.to_consensus_u32() {
+                            lhs
+                        } else {
+                            rhs
+                        })
+                    } else {
+                        return None;
+                    }
+                }
+                _ => self.min_sequence.or(other.min_sequence),
+            }
+        };
+
+        let mut template = self.template;
+        template.extend(other.template);
+
+        Some(Self {
+            min_locktime,
+            min_sequence,
+            template,
+        })
+    }
+
+    pub(crate) fn expected_size(&self) -> usize {
+        self.template.iter().map(|step| step.expected_size()).sum()
+    }
+}
+
+// impl crate::descriptor::Pkh<DefiniteDescriptorKey> {
+//     pub(crate) fn plan_satisfaction<Ak>(&self, assets: &Assets<Ak>) -> Option<Plan<Ak>>
+//     where
+//         Ak: CanDerive + Clone,
+//     {
+//         let (asset_key, derivation_hint) = assets.keys.iter().find_map(|asset_key| {
+//             let derivation_hint = asset_key.can_derive(self.as_inner())?;
+//             Some((asset_key, derivation_hint))
+//         })?;
+
+//         Some(Plan {
+//             template: vec![TemplateItem::Sign(PlanKey {
+//                 asset_key: asset_key.clone(),
+//                 descriptor_key: self.as_inner().clone(),
+//                 derivation_hint,
+//             })],
+//             target: Target::Legacy,
+//             set_locktime: None,
+//             set_sequence: None,
+//         })
+//     }
+// }
+
+// impl crate::descriptor::Wpkh<DefiniteDescriptorKey> {
+//     pub(crate) fn plan_satisfaction<Ak>(&self, assets: &Assets<Ak>) -> Option<Plan<Ak>>
+//     where
+//         Ak: CanDerive + Clone,
+//     {
+//         let (asset_key, derivation_hint) = assets.keys.iter().find_map(|asset_key| {
+//             let derivation_hint = asset_key.can_derive(self.as_inner())?;
+//             Some((asset_key, derivation_hint))
+//         })?;
+
+//         Some(Plan {
+//             template: vec![TemplateItem::Sign(PlanKey {
+//                 asset_key: asset_key.clone(),
+//                 descriptor_key: self.as_inner().clone(),
+//                 derivation_hint,
+//             })],
+//             target: Target::Segwitv0,
+//             set_locktime: None,
+//             set_sequence: None,
+//         })
+//     }
+// }
+
+pub(crate) fn plan_satisfaction_tr<Ak>(
+    tr: &miniscript::descriptor::Tr<DefiniteDescriptorKey>,
+    assets: &Assets<Ak>,
+) -> Option<Plan<Ak>>
+where
+    Ak: CanDerive + Clone,
+{
+    let key_path_spend = assets.keys.iter().find_map(|asset_key| {
+        let derivation_hint = asset_key.can_derive(tr.internal_key())?;
+        Some((asset_key, derivation_hint))
+    });
+
+    if let Some((asset_key, derivation_hint)) = key_path_spend {
+        return Some(Plan {
+            template: vec![TemplateItem::Sign(PlanKey {
+                asset_key: asset_key.clone(),
+                descriptor_key: tr.internal_key().clone(),
+                derivation_hint,
+            })],
+            target: Target::Segwitv1 {
+                tr: tr.clone(),
+                tr_plan: TrSpend::KeySpend,
+            },
+            set_locktime: None,
+            set_sequence: None,
+        });
+    }
+
+    let mut plans = tr
+        .iter_scripts()
+        .filter_map(|(_, ms)| Some((ms, (plan_steps(&ms.node, assets)?))))
+        .collect::<Vec<_>>();
+
+    plans.sort_by_cached_key(|(_, plan)| plan.expected_size());
+
+    let (script, best_plan) = plans.into_iter().next()?;
+
+    Some(Plan {
+        target: Target::Segwitv1 {
+            tr: tr.clone(),
+            tr_plan: TrSpend::LeafSpend {
+                script: script.encode(),
+                leaf_version: LeafVersion::TapScript,
+            },
+        },
+        set_locktime: best_plan.min_locktime.clone(),
+        set_sequence: best_plan.min_sequence.clone(),
+        template: best_plan.template,
+    })
+}
+
+#[derive(Debug)]
+struct TermPlan<Ak> {
+    pub min_locktime: Option<LockTime>,
+    pub min_sequence: Option<Sequence>,
+    pub template: Vec<TemplateItem<Ak>>,
+}
+
+impl<Ak> TermPlan<Ak> {
+    fn new(template: Vec<TemplateItem<Ak>>) -> Self {
+        TermPlan {
+            template,
+            ..Default::default()
+        }
+    }
+}
+
+impl<Ak> Default for TermPlan<Ak> {
+    fn default() -> Self {
+        Self {
+            min_locktime: Default::default(),
+            min_sequence: Default::default(),
+            template: Default::default(),
+        }
+    }
+}
+
+fn plan_steps<Ak: Clone + CanDerive, Ctx: ScriptContext>(
+    term: &Terminal<DefiniteDescriptorKey, Ctx>,
+    assets: &Assets<Ak>,
+) -> Option<TermPlan<Ak>> {
+    match term {
+        Terminal::True => Some(TermPlan::new(vec![])),
+        Terminal::False => return None,
+        Terminal::PkH(key) => {
+            let (asset_key, derivation_hint) = assets
+                .keys
+                .iter()
+                .find_map(|asset_key| Some((asset_key, asset_key.can_derive(key)?)))?;
+            Some(TermPlan::new(vec![
+                TemplateItem::Sign(PlanKey {
+                    asset_key: asset_key.clone(),
+                    derivation_hint,
+                    descriptor_key: key.clone(),
+                }),
+                TemplateItem::Pk { key: key.clone() },
+            ]))
+        }
+        Terminal::PkK(key) => {
+            let (asset_key, derivation_hint) = assets
+                .keys
+                .iter()
+                .find_map(|asset_key| Some((asset_key, asset_key.can_derive(key)?)))?;
+            Some(TermPlan::new(vec![TemplateItem::Sign(PlanKey {
+                asset_key: asset_key.clone(),
+                derivation_hint,
+                descriptor_key: key.clone(),
+            })]))
+        }
+        Terminal::RawPkH(_pk_hash) => {
+            /* TODO */
+            None
+        }
+        Terminal::After(locktime) => {
+            let max_locktime = assets.max_locktime?;
+            let locktime = LockTime::from(locktime);
+            let (height, time) = match max_locktime {
+                LockTime::Blocks(height) => (height, Time::from_consensus(0).unwrap()),
+                LockTime::Seconds(seconds) => (Height::from_consensus(0).unwrap(), seconds),
+            };
+            if max_locktime.is_satisfied_by(height, time) {
+                Some(TermPlan {
+                    min_locktime: Some(locktime),
+                    ..Default::default()
+                })
+            } else {
+                None
+            }
+        }
+        Terminal::Older(older) => {
+            // FIXME: older should be a height or time not a sequence.
+            let max_sequence = assets.txo_age?;
+            //TODO: this whole thing is probably wrong but upstream should provide a way of
+            // doing it properly.
+            if max_sequence.is_height_locked() == older.is_height_locked() {
+                if max_sequence.to_consensus_u32() >= older.to_consensus_u32() {
+                    Some(TermPlan {
+                        min_sequence: Some(*older),
+                        ..Default::default()
+                    })
+                } else {
+                    None
+                }
+            } else {
+                None
+            }
+        }
+        Terminal::Sha256(image) => {
+            if assets.sha256.contains(&image) {
+                Some(TermPlan::new(vec![TemplateItem::Sha256(image.clone())]))
+            } else {
+                None
+            }
+        }
+        Terminal::Hash256(image) => {
+            if assets.hash256.contains(image) {
+                Some(TermPlan::new(vec![TemplateItem::Hash256(image.clone())]))
+            } else {
+                None
+            }
+        }
+        Terminal::Ripemd160(image) => {
+            if assets.ripemd160.contains(&image) {
+                Some(TermPlan::new(vec![TemplateItem::Ripemd160(image.clone())]))
+            } else {
+                None
+            }
+        }
+        Terminal::Hash160(image) => {
+            if assets.hash160.contains(&image) {
+                Some(TermPlan::new(vec![TemplateItem::Hash160(image.clone())]))
+            } else {
+                None
+            }
+        }
+        Terminal::Alt(ms)
+        | Terminal::Swap(ms)
+        | Terminal::Check(ms)
+        | Terminal::Verify(ms)
+        | Terminal::NonZero(ms)
+        | Terminal::ZeroNotEqual(ms) => plan_steps(&ms.node, assets),
+        Terminal::DupIf(ms) => {
+            let mut plan = plan_steps(&ms.node, assets)?;
+            plan.template.push(TemplateItem::One);
+            Some(plan)
+        }
+        Terminal::AndV(l, r) | Terminal::AndB(l, r) => {
+            let lhs = plan_steps(&l.node, assets)?;
+            let rhs = plan_steps(&r.node, assets)?;
+            lhs.combine(rhs)
+        }
+        Terminal::AndOr(_, _, _) => todo!(),
+        Terminal::OrB(_, _) => todo!(),
+        Terminal::OrD(_, _) => todo!(),
+        Terminal::OrC(_, _) => todo!(),
+        Terminal::OrI(lhs, rhs) => {
+            let lplan = plan_steps(&lhs.node, assets).map(|mut plan| {
+                plan.template.push(TemplateItem::One);
+                plan
+            });
+            let rplan = plan_steps(&rhs.node, assets).map(|mut plan| {
+                plan.template.push(TemplateItem::Zero);
+                plan
+            });
+            match (lplan, rplan) {
+                (Some(lplan), Some(rplan)) => {
+                    if lplan.expected_size() <= rplan.expected_size() {
+                        Some(lplan)
+                    } else {
+                        Some(rplan)
+                    }
+                }
+                (lplan, rplan) => lplan.or(rplan),
+            }
+        }
+        Terminal::Thresh(_, _) => todo!(),
+        Terminal::Multi(_, _) => todo!(),
+        Terminal::MultiA(_, _) => todo!(),
+    }
+}
diff --git a/nursery/tmp_plan/bdk_tmp_plan/src/requirements.rs b/nursery/tmp_plan/bdk_tmp_plan/src/requirements.rs
new file mode 100644 (file)
index 0000000..3aa6ac2
--- /dev/null
@@ -0,0 +1,218 @@
+use bdk_chain::{bitcoin, collections::*, miniscript};
+use core::ops::Deref;
+
+use bitcoin::{
+    hashes::{hash160, ripemd160, sha256},
+    psbt::Prevouts,
+    secp256k1::{KeyPair, Message, PublicKey, Signing, Verification},
+    util::{bip32, sighash, sighash::SighashCache, taproot},
+    EcdsaSighashType, SchnorrSighashType, Transaction, TxOut, XOnlyPublicKey,
+};
+
+use super::*;
+use miniscript::{
+    descriptor::{DescriptorSecretKey, KeyMap},
+    hash256,
+};
+
+#[derive(Clone, Debug)]
+/// Signatures and hash pre-images that must be provided to complete the plan.
+pub struct Requirements<Ak> {
+    /// required signatures
+    pub signatures: RequiredSignatures<Ak>,
+    /// required sha256 pre-images
+    pub sha256_images: HashSet<sha256::Hash>,
+    /// required hash160 pre-images
+    pub hash160_images: HashSet<hash160::Hash>,
+    /// required hash256 pre-images
+    pub hash256_images: HashSet<hash256::Hash>,
+    /// required ripemd160 pre-images
+    pub ripemd160_images: HashSet<ripemd160::Hash>,
+}
+
+impl<Ak> Default for RequiredSignatures<Ak> {
+    fn default() -> Self {
+        RequiredSignatures::Legacy {
+            keys: Default::default(),
+        }
+    }
+}
+
+impl<Ak> Default for Requirements<Ak> {
+    fn default() -> Self {
+        Self {
+            signatures: Default::default(),
+            sha256_images: Default::default(),
+            hash160_images: Default::default(),
+            hash256_images: Default::default(),
+            ripemd160_images: Default::default(),
+        }
+    }
+}
+
+impl<Ak> Requirements<Ak> {
+    /// Whether any hash pre-images are required in the plan
+    pub fn requires_hash_preimages(&self) -> bool {
+        !(self.sha256_images.is_empty()
+            && self.hash160_images.is_empty()
+            && self.hash256_images.is_empty()
+            && self.ripemd160_images.is_empty())
+    }
+}
+
+/// The signatures required to complete the plan
+#[derive(Clone, Debug)]
+pub enum RequiredSignatures<Ak> {
+    /// Legacy ECDSA signatures are required
+    Legacy { keys: Vec<PlanKey<Ak>> },
+    /// Segwitv0 ECDSA signatures are required
+    Segwitv0 { keys: Vec<PlanKey<Ak>> },
+    /// A Taproot key spend signature is required
+    TapKey {
+        /// the internal key
+        plan_key: PlanKey<Ak>,
+        /// The merkle root of the taproot output
+        merkle_root: Option<TapBranchHash>,
+    },
+    /// Taproot script path signatures are required
+    TapScript {
+        /// The leaf hash of the script being used
+        leaf_hash: TapLeafHash,
+        /// The keys in the script that require signatures
+        plan_keys: Vec<PlanKey<Ak>>,
+    },
+}
+
+#[derive(Clone, Debug)]
+pub enum SigningError {
+    SigHashError(sighash::Error),
+    DerivationError(bip32::Error),
+}
+
+impl From<sighash::Error> for SigningError {
+    fn from(e: sighash::Error) -> Self {
+        Self::SigHashError(e)
+    }
+}
+
+impl core::fmt::Display for SigningError {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            SigningError::SigHashError(e) => e.fmt(f),
+            SigningError::DerivationError(e) => e.fmt(f),
+        }
+    }
+}
+
+impl From<bip32::Error> for SigningError {
+    fn from(e: bip32::Error) -> Self {
+        Self::DerivationError(e)
+    }
+}
+
+#[cfg(feature = "std")]
+impl std::error::Error for SigningError {}
+
+impl RequiredSignatures<DescriptorPublicKey> {
+    pub fn sign_with_keymap<T: Deref<Target = Transaction>>(
+        &self,
+        input_index: usize,
+        keymap: &KeyMap,
+        prevouts: &Prevouts<'_, impl core::borrow::Borrow<TxOut>>,
+        schnorr_sighashty: Option<SchnorrSighashType>,
+        _ecdsa_sighashty: Option<EcdsaSighashType>,
+        sighash_cache: &mut SighashCache<T>,
+        auth_data: &mut SatisfactionMaterial,
+        secp: &Secp256k1<impl Signing + Verification>,
+    ) -> Result<bool, SigningError> {
+        match self {
+            RequiredSignatures::Legacy { .. } | RequiredSignatures::Segwitv0 { .. } => todo!(),
+            RequiredSignatures::TapKey {
+                plan_key,
+                merkle_root,
+            } => {
+                let schnorr_sighashty = schnorr_sighashty.unwrap_or(SchnorrSighashType::Default);
+                let sighash = sighash_cache.taproot_key_spend_signature_hash(
+                    input_index,
+                    prevouts,
+                    schnorr_sighashty,
+                )?;
+                let secret_key = match keymap.get(&plan_key.asset_key) {
+                    Some(secret_key) => secret_key,
+                    None => return Ok(false),
+                };
+                let secret_key = match secret_key {
+                    DescriptorSecretKey::Single(single) => single.key.inner,
+                    DescriptorSecretKey::XPrv(xprv) => {
+                        xprv.xkey
+                            .derive_priv(&secp, &plan_key.derivation_hint)?
+                            .private_key
+                    }
+                };
+
+                let pubkey = PublicKey::from_secret_key(&secp, &secret_key);
+                let x_only_pubkey = XOnlyPublicKey::from(pubkey);
+
+                let tweak =
+                    taproot::TapTweakHash::from_key_and_tweak(x_only_pubkey, merkle_root.clone());
+                let keypair = KeyPair::from_secret_key(&secp, &secret_key.clone())
+                    .add_xonly_tweak(&secp, &tweak.to_scalar())
+                    .unwrap();
+
+                let msg = Message::from_slice(sighash.as_ref()).expect("Sighashes are 32 bytes");
+                let sig = secp.sign_schnorr_no_aux_rand(&msg, &keypair);
+
+                let bitcoin_sig = SchnorrSig {
+                    sig,
+                    hash_ty: schnorr_sighashty,
+                };
+
+                auth_data
+                    .schnorr_sigs
+                    .insert(plan_key.descriptor_key.clone(), bitcoin_sig);
+                Ok(true)
+            }
+            RequiredSignatures::TapScript {
+                leaf_hash,
+                plan_keys,
+            } => {
+                let sighash_type = schnorr_sighashty.unwrap_or(SchnorrSighashType::Default);
+                let sighash = sighash_cache.taproot_script_spend_signature_hash(
+                    input_index,
+                    prevouts,
+                    *leaf_hash,
+                    sighash_type,
+                )?;
+
+                let mut modified = false;
+
+                for plan_key in plan_keys {
+                    if let Some(secret_key) = keymap.get(&plan_key.asset_key) {
+                        let secret_key = match secret_key {
+                            DescriptorSecretKey::Single(single) => single.key.inner,
+                            DescriptorSecretKey::XPrv(xprv) => {
+                                xprv.xkey
+                                    .derive_priv(&secp, &plan_key.derivation_hint)?
+                                    .private_key
+                            }
+                        };
+                        let keypair = KeyPair::from_secret_key(&secp, &secret_key.clone());
+                        let msg =
+                            Message::from_slice(sighash.as_ref()).expect("Sighashes are 32 bytes");
+                        let sig = secp.sign_schnorr_no_aux_rand(&msg, &keypair);
+                        let bitcoin_sig = SchnorrSig {
+                            sig,
+                            hash_ty: sighash_type,
+                        };
+
+                        auth_data
+                            .schnorr_sigs
+                            .insert(plan_key.descriptor_key.clone(), bitcoin_sig);
+                        modified = true;
+                    }
+                }
+                Ok(modified)
+            }
+        }
+    }
+}
diff --git a/nursery/tmp_plan/bdk_tmp_plan/src/template.rs b/nursery/tmp_plan/bdk_tmp_plan/src/template.rs
new file mode 100644 (file)
index 0000000..cf94540
--- /dev/null
@@ -0,0 +1,76 @@
+use bdk_chain::{bitcoin, miniscript};
+use bitcoin::{
+    hashes::{hash160, ripemd160, sha256},
+    util::bip32::DerivationPath,
+};
+
+use super::*;
+use crate::{hash256, varint_len, DefiniteDescriptorKey};
+
+#[derive(Clone, Debug)]
+pub(crate) enum TemplateItem<Ak> {
+    Sign(PlanKey<Ak>),
+    Pk { key: DefiniteDescriptorKey },
+    One,
+    Zero,
+    Sha256(sha256::Hash),
+    Hash256(hash256::Hash),
+    Ripemd160(ripemd160::Hash),
+    Hash160(hash160::Hash),
+}
+
+/// A plan key contains the asset key originally provided along with key in the descriptor it
+/// purports to be able to derive for along with a "hint" on how to derive it.
+#[derive(Clone, Debug)]
+pub struct PlanKey<Ak> {
+    /// The key the planner will sign with
+    pub asset_key: Ak,
+    /// A hint from how to get from the asset key to the concrete key we need to sign with.
+    pub derivation_hint: DerivationPath,
+    /// The key that was in the descriptor that we are satisfying with the signature from the asset
+    /// key.
+    pub descriptor_key: DefiniteDescriptorKey,
+}
+
+impl<Ak> TemplateItem<Ak> {
+    pub fn expected_size(&self) -> usize {
+        match self {
+            TemplateItem::Sign { .. } => 64, /*size of sig TODO: take into consideration sighash falg*/
+            TemplateItem::Pk { .. } => 32,
+            TemplateItem::One => varint_len(1),
+            TemplateItem::Zero => 0, /* zero means an empty witness element */
+            // I'm not sure if it should be 32 here (it's a 20 byte hash) but that's what other
+            // parts of the code were doing.
+            TemplateItem::Hash160(_) | TemplateItem::Ripemd160(_) => 32,
+            TemplateItem::Sha256(_) | TemplateItem::Hash256(_) => 32,
+        }
+    }
+
+    // this can only be called if we are sure that auth_data has what we need
+    pub(super) fn to_witness_stack(&self, auth_data: &SatisfactionMaterial) -> Vec<Vec<u8>> {
+        match self {
+            TemplateItem::Sign(plan_key) => {
+                vec![auth_data
+                    .schnorr_sigs
+                    .get(&plan_key.descriptor_key)
+                    .unwrap()
+                    .to_vec()]
+            }
+            TemplateItem::One => vec![vec![1]],
+            TemplateItem::Zero => vec![vec![]],
+            TemplateItem::Sha256(image) => {
+                vec![auth_data.sha256_preimages.get(image).unwrap().to_vec()]
+            }
+            TemplateItem::Hash160(image) => {
+                vec![auth_data.hash160_preimages.get(image).unwrap().to_vec()]
+            }
+            TemplateItem::Ripemd160(image) => {
+                vec![auth_data.ripemd160_preimages.get(image).unwrap().to_vec()]
+            }
+            TemplateItem::Hash256(image) => {
+                vec![auth_data.hash256_preimages.get(image).unwrap().to_vec()]
+            }
+            TemplateItem::Pk { key } => vec![key.to_public_key().to_bytes()],
+        }
+    }
+}
diff --git a/nursery/tmp_plan/src/lib.rs b/nursery/tmp_plan/src/lib.rs
new file mode 100644 (file)
index 0000000..a64d449
--- /dev/null
@@ -0,0 +1,436 @@
+#![allow(unused)]
+#![allow(missing_docs)]
+//! A spending plan or *plan* for short is a representation of a particular spending path on a
+//! descriptor. This allows us to analayze a choice of spending path without producing any
+//! signatures or other witness data for it.
+//!
+//! To make a plan you provide the descriptor with "assets" like which keys you are able to use, hash
+//! pre-images you have access to, the current block height etc.
+//!
+//! Once you've got a plan it can tell you its expected satisfaction weight which can be useful for
+//! doing coin selection. Furthermore it provides which subset of those keys and hash pre-images you
+//! will actually need as well as what locktime or sequence number you need to set.
+//!
+//! Once you've obstained signatures, hash pre-images etc required by the plan, it can create a
+//! witness/script_sig for the input.
+use bdk_chain::{bitcoin, collections::*, miniscript};
+use bitcoin::{
+    blockdata::{locktime::LockTime, transaction::Sequence},
+    hashes::{hash160, ripemd160, sha256},
+    secp256k1::Secp256k1,
+    util::{
+        address::WitnessVersion,
+        bip32::{DerivationPath, Fingerprint, KeySource},
+        taproot::{LeafVersion, TapBranchHash, TapLeafHash},
+    },
+    EcdsaSig, SchnorrSig, Script, TxIn, Witness,
+};
+use miniscript::{
+    descriptor::{InnerXKey, Tr},
+    hash256, DefiniteDescriptorKey, Descriptor, DescriptorPublicKey, ScriptContext, ToPublicKey,
+};
+
+pub(crate) fn varint_len(v: usize) -> usize {
+    bitcoin::VarInt(v as u64).len() as usize
+}
+
+mod plan_impls;
+mod requirements;
+mod template;
+pub use requirements::*;
+pub use template::PlanKey;
+use template::TemplateItem;
+
+#[derive(Clone, Debug)]
+enum TrSpend {
+    KeySpend,
+    LeafSpend {
+        script: Script,
+        leaf_version: LeafVersion,
+    },
+}
+
+#[derive(Clone, Debug)]
+enum Target {
+    Legacy,
+    Segwitv0 {
+        script_code: Script,
+    },
+    Segwitv1 {
+        tr: Tr<DefiniteDescriptorKey>,
+        tr_plan: TrSpend,
+    },
+}
+
+impl Target {}
+
+#[derive(Clone, Debug)]
+/// A plan represents a particular spending path for a descriptor.
+///
+/// See the module level documentation for more info.
+pub struct Plan<AK> {
+    template: Vec<TemplateItem<AK>>,
+    target: Target,
+    set_locktime: Option<LockTime>,
+    set_sequence: Option<Sequence>,
+}
+
+impl Default for Target {
+    fn default() -> Self {
+        Target::Legacy
+    }
+}
+
+#[derive(Clone, Debug, Default)]
+/// Signatures and hash pre-images that can be used to complete a plan.
+pub struct SatisfactionMaterial {
+    /// Schnorr signautres under their keys
+    pub schnorr_sigs: BTreeMap<DefiniteDescriptorKey, SchnorrSig>,
+    /// ECDSA signatures under their keys
+    pub ecdsa_sigs: BTreeMap<DefiniteDescriptorKey, EcdsaSig>,
+    /// SHA256 pre-images under their images
+    pub sha256_preimages: BTreeMap<sha256::Hash, Vec<u8>>,
+    /// hash160 pre-images under their images
+    pub hash160_preimages: BTreeMap<hash160::Hash, Vec<u8>>,
+    /// hash256 pre-images under their images
+    pub hash256_preimages: BTreeMap<hash256::Hash, Vec<u8>>,
+    /// ripemd160 pre-images under their images
+    pub ripemd160_preimages: BTreeMap<ripemd160::Hash, Vec<u8>>,
+}
+
+impl<Ak> Plan<Ak>
+where
+    Ak: Clone,
+{
+    /// The expected satisfaction weight for the plan if it is completed.
+    pub fn expected_weight(&self) -> usize {
+        let script_sig_size = match self.target {
+            Target::Legacy => unimplemented!(), // self
+            // .template
+            // .iter()
+            // .map(|step| {
+            //     let size = step.expected_size();
+            //     size + push_opcode_size(size)
+            // })
+            // .sum()
+            Target::Segwitv0 { .. } | Target::Segwitv1 { .. } => 1,
+        };
+        let witness_elem_sizes: Option<Vec<usize>> = match &self.target {
+            Target::Legacy => None,
+            Target::Segwitv0 { .. } => Some(
+                self.template
+                    .iter()
+                    .map(|step| step.expected_size())
+                    .collect(),
+            ),
+            Target::Segwitv1 { tr, tr_plan } => {
+                let mut witness_elems = self
+                    .template
+                    .iter()
+                    .map(|step| step.expected_size())
+                    .collect::<Vec<_>>();
+
+                if let TrSpend::LeafSpend {
+                    script,
+                    leaf_version,
+                } = tr_plan
+                {
+                    let control_block = tr
+                        .spend_info()
+                        .control_block(&(script.clone(), *leaf_version))
+                        .expect("must exist");
+                    witness_elems.push(script.len());
+                    witness_elems.push(control_block.size());
+                }
+
+                Some(witness_elems)
+            }
+        };
+
+        let witness_size: usize = match witness_elem_sizes {
+            Some(elems) => {
+                varint_len(elems.len())
+                    + elems
+                        .into_iter()
+                        .map(|elem| varint_len(elem) + elem)
+                        .sum::<usize>()
+            }
+            None => 0,
+        };
+
+        script_sig_size * 4 + witness_size
+    }
+
+    pub fn requirements(&self) -> Requirements<Ak> {
+        match self.try_complete(&SatisfactionMaterial::default()) {
+            PlanState::Complete { .. } => Requirements::default(),
+            PlanState::Incomplete(requirements) => requirements,
+        }
+    }
+
+    pub fn try_complete(&self, auth_data: &SatisfactionMaterial) -> PlanState<Ak> {
+        let unsatisfied_items = self
+            .template
+            .iter()
+            .filter(|step| match step {
+                TemplateItem::Sign(key) => {
+                    !auth_data.schnorr_sigs.contains_key(&key.descriptor_key)
+                }
+                TemplateItem::Hash160(image) => !auth_data.hash160_preimages.contains_key(image),
+                TemplateItem::Hash256(image) => !auth_data.hash256_preimages.contains_key(image),
+                TemplateItem::Sha256(image) => !auth_data.sha256_preimages.contains_key(image),
+                TemplateItem::Ripemd160(image) => {
+                    !auth_data.ripemd160_preimages.contains_key(image)
+                }
+                TemplateItem::Pk { .. } | TemplateItem::One | TemplateItem::Zero => false,
+            })
+            .collect::<Vec<_>>();
+
+        if unsatisfied_items.is_empty() {
+            let mut witness = self
+                .template
+                .iter()
+                .flat_map(|step| step.to_witness_stack(&auth_data))
+                .collect::<Vec<_>>();
+            match &self.target {
+                Target::Segwitv0 { .. } => todo!(),
+                Target::Legacy => todo!(),
+                Target::Segwitv1 {
+                    tr_plan: TrSpend::KeySpend,
+                    ..
+                } => PlanState::Complete {
+                    final_script_sig: None,
+                    final_script_witness: Some(Witness::from_vec(witness)),
+                },
+                Target::Segwitv1 {
+                    tr,
+                    tr_plan:
+                        TrSpend::LeafSpend {
+                            script,
+                            leaf_version,
+                        },
+                } => {
+                    let spend_info = tr.spend_info();
+                    let control_block = spend_info
+                        .control_block(&(script.clone(), *leaf_version))
+                        .expect("must exist");
+                    witness.push(script.clone().into_bytes());
+                    witness.push(control_block.serialize());
+
+                    PlanState::Complete {
+                        final_script_sig: None,
+                        final_script_witness: Some(Witness::from_vec(witness)),
+                    }
+                }
+            }
+        } else {
+            let mut requirements = Requirements::default();
+
+            match &self.target {
+                Target::Legacy => {
+                    todo!()
+                }
+                Target::Segwitv0 { .. } => {
+                    todo!()
+                }
+                Target::Segwitv1 { tr, tr_plan } => {
+                    let spend_info = tr.spend_info();
+                    match tr_plan {
+                        TrSpend::KeySpend => match &self.template[..] {
+                            [TemplateItem::Sign(ref plan_key)] => {
+                                requirements.signatures = RequiredSignatures::TapKey {
+                                    merkle_root: spend_info.merkle_root(),
+                                    plan_key: plan_key.clone(),
+                                };
+                            }
+                            _ => unreachable!("tapkey spend will always have only one sign step"),
+                        },
+                        TrSpend::LeafSpend {
+                            script,
+                            leaf_version,
+                        } => {
+                            let leaf_hash = TapLeafHash::from_script(&script, *leaf_version);
+                            requirements.signatures = RequiredSignatures::TapScript {
+                                leaf_hash,
+                                plan_keys: vec![],
+                            }
+                        }
+                    }
+                }
+            }
+
+            let required_signatures = match requirements.signatures {
+                RequiredSignatures::Legacy { .. } => todo!(),
+                RequiredSignatures::Segwitv0 { .. } => todo!(),
+                RequiredSignatures::TapKey { .. } => return PlanState::Incomplete(requirements),
+                RequiredSignatures::TapScript {
+                    plan_keys: ref mut keys,
+                    ..
+                } => keys,
+            };
+
+            for step in unsatisfied_items {
+                match step {
+                    TemplateItem::Sign(plan_key) => {
+                        required_signatures.push(plan_key.clone());
+                    }
+                    TemplateItem::Hash160(image) => {
+                        requirements.hash160_images.insert(image.clone());
+                    }
+                    TemplateItem::Hash256(image) => {
+                        requirements.hash256_images.insert(image.clone());
+                    }
+                    TemplateItem::Sha256(image) => {
+                        requirements.sha256_images.insert(image.clone());
+                    }
+                    TemplateItem::Ripemd160(image) => {
+                        requirements.ripemd160_images.insert(image.clone());
+                    }
+                    TemplateItem::Pk { .. } | TemplateItem::One | TemplateItem::Zero => { /* no requirements */
+                    }
+                }
+            }
+
+            PlanState::Incomplete(requirements)
+        }
+    }
+
+    /// Witness version for the plan
+    pub fn witness_version(&self) -> Option<WitnessVersion> {
+        match self.target {
+            Target::Legacy => None,
+            Target::Segwitv0 { .. } => Some(WitnessVersion::V0),
+            Target::Segwitv1 { .. } => Some(WitnessVersion::V1),
+        }
+    }
+
+    /// The minimum required locktime height or time on the transaction using the plan.
+    pub fn required_locktime(&self) -> Option<LockTime> {
+        self.set_locktime.clone()
+    }
+
+    /// The minimum required sequence (height or time) on the input to satisfy the plan
+    pub fn required_sequence(&self) -> Option<Sequence> {
+        self.set_sequence.clone()
+    }
+
+    /// The minmum required transaction version required on the transaction using the plan.
+    pub fn min_version(&self) -> Option<u32> {
+        if let Some(_) = self.set_sequence {
+            Some(2)
+        } else {
+            Some(1)
+        }
+    }
+}
+
+/// The returned value from [`Plan::try_complete`].
+pub enum PlanState<Ak> {
+    /// The plan is complete
+    Complete {
+        /// The script sig that should be set on the input
+        final_script_sig: Option<Script>,
+        /// The witness that should be set on the input
+        final_script_witness: Option<Witness>,
+    },
+    Incomplete(Requirements<Ak>),
+}
+
+#[derive(Clone, Debug)]
+pub struct Assets<K> {
+    pub keys: Vec<K>,
+    pub txo_age: Option<Sequence>,
+    pub max_locktime: Option<LockTime>,
+    pub sha256: Vec<sha256::Hash>,
+    pub hash256: Vec<hash256::Hash>,
+    pub ripemd160: Vec<ripemd160::Hash>,
+    pub hash160: Vec<hash160::Hash>,
+}
+
+impl<K> Default for Assets<K> {
+    fn default() -> Self {
+        Self {
+            keys: Default::default(),
+            txo_age: Default::default(),
+            max_locktime: Default::default(),
+            sha256: Default::default(),
+            hash256: Default::default(),
+            ripemd160: Default::default(),
+            hash160: Default::default(),
+        }
+    }
+}
+
+pub trait CanDerive {
+    fn can_derive(&self, key: &DefiniteDescriptorKey) -> Option<DerivationPath>;
+}
+
+impl CanDerive for KeySource {
+    fn can_derive(&self, key: &DefiniteDescriptorKey) -> Option<DerivationPath> {
+        match DescriptorPublicKey::from(key.clone()) {
+            DescriptorPublicKey::Single(single_pub) => {
+                path_to_child(self, single_pub.origin.as_ref()?, None)
+            }
+            DescriptorPublicKey::XPub(dxk) => {
+                let origin = dxk.origin.clone().unwrap_or_else(|| {
+                    let secp = Secp256k1::signing_only();
+                    (dxk.xkey.xkey_fingerprint(&secp), DerivationPath::master())
+                });
+
+                path_to_child(self, &origin, Some(&dxk.derivation_path))
+            }
+        }
+    }
+}
+
+impl CanDerive for DescriptorPublicKey {
+    fn can_derive(&self, key: &DefiniteDescriptorKey) -> Option<DerivationPath> {
+        match (self, DescriptorPublicKey::from(key.clone())) {
+            (parent, child) if parent == &child => Some(DerivationPath::master()),
+            (DescriptorPublicKey::XPub(parent), _) => {
+                let origin = parent.origin.clone().unwrap_or_else(|| {
+                    let secp = Secp256k1::signing_only();
+                    (
+                        parent.xkey.xkey_fingerprint(&secp),
+                        DerivationPath::master(),
+                    )
+                });
+                KeySource::from(origin).can_derive(key)
+            }
+            _ => None,
+        }
+    }
+}
+
+fn path_to_child(
+    parent: &KeySource,
+    child_origin: &(Fingerprint, DerivationPath),
+    child_derivation: Option<&DerivationPath>,
+) -> Option<DerivationPath> {
+    if parent.0 == child_origin.0 {
+        let mut remaining_derivation =
+            DerivationPath::from(child_origin.1[..].strip_prefix(&parent.1[..])?);
+        remaining_derivation =
+            remaining_derivation.extend(child_derivation.unwrap_or(&DerivationPath::master()));
+        Some(remaining_derivation)
+    } else {
+        None
+    }
+}
+
+pub fn plan_satisfaction<Ak>(
+    desc: &Descriptor<DefiniteDescriptorKey>,
+    assets: &Assets<Ak>,
+) -> Option<Plan<Ak>>
+where
+    Ak: CanDerive + Clone,
+{
+    match desc {
+        Descriptor::Bare(_) => todo!(),
+        Descriptor::Pkh(_) => todo!(),
+        Descriptor::Wpkh(_) => todo!(),
+        Descriptor::Sh(_) => todo!(),
+        Descriptor::Wsh(_) => todo!(),
+        Descriptor::Tr(tr) => crate::plan_impls::plan_satisfaction_tr(tr, assets),
+    }
+}
diff --git a/nursery/tmp_plan/src/plan_impls.rs b/nursery/tmp_plan/src/plan_impls.rs
new file mode 100644 (file)
index 0000000..79f5cf6
--- /dev/null
@@ -0,0 +1,323 @@
+use bdk_chain::{bitcoin, miniscript};
+use bitcoin::locktime::{Height, Time};
+use miniscript::Terminal;
+
+use super::*;
+
+impl<Ak> TermPlan<Ak> {
+    fn combine(self, other: Self) -> Option<Self> {
+        let min_locktime = {
+            match (self.min_locktime, other.min_locktime) {
+                (Some(lhs), Some(rhs)) => {
+                    if lhs.is_same_unit(rhs) {
+                        Some(if lhs.to_consensus_u32() > rhs.to_consensus_u32() {
+                            lhs
+                        } else {
+                            rhs
+                        })
+                    } else {
+                        return None;
+                    }
+                }
+                _ => self.min_locktime.or(other.min_locktime),
+            }
+        };
+
+        let min_sequence = {
+            match (self.min_sequence, other.min_sequence) {
+                (Some(lhs), Some(rhs)) => {
+                    if lhs.is_height_locked() == rhs.is_height_locked() {
+                        Some(if lhs.to_consensus_u32() > rhs.to_consensus_u32() {
+                            lhs
+                        } else {
+                            rhs
+                        })
+                    } else {
+                        return None;
+                    }
+                }
+                _ => self.min_sequence.or(other.min_sequence),
+            }
+        };
+
+        let mut template = self.template;
+        template.extend(other.template);
+
+        Some(Self {
+            min_locktime,
+            min_sequence,
+            template,
+        })
+    }
+
+    pub(crate) fn expected_size(&self) -> usize {
+        self.template.iter().map(|step| step.expected_size()).sum()
+    }
+}
+
+// impl crate::descriptor::Pkh<DefiniteDescriptorKey> {
+//     pub(crate) fn plan_satisfaction<Ak>(&self, assets: &Assets<Ak>) -> Option<Plan<Ak>>
+//     where
+//         Ak: CanDerive + Clone,
+//     {
+//         let (asset_key, derivation_hint) = assets.keys.iter().find_map(|asset_key| {
+//             let derivation_hint = asset_key.can_derive(self.as_inner())?;
+//             Some((asset_key, derivation_hint))
+//         })?;
+
+//         Some(Plan {
+//             template: vec![TemplateItem::Sign(PlanKey {
+//                 asset_key: asset_key.clone(),
+//                 descriptor_key: self.as_inner().clone(),
+//                 derivation_hint,
+//             })],
+//             target: Target::Legacy,
+//             set_locktime: None,
+//             set_sequence: None,
+//         })
+//     }
+// }
+
+// impl crate::descriptor::Wpkh<DefiniteDescriptorKey> {
+//     pub(crate) fn plan_satisfaction<Ak>(&self, assets: &Assets<Ak>) -> Option<Plan<Ak>>
+//     where
+//         Ak: CanDerive + Clone,
+//     {
+//         let (asset_key, derivation_hint) = assets.keys.iter().find_map(|asset_key| {
+//             let derivation_hint = asset_key.can_derive(self.as_inner())?;
+//             Some((asset_key, derivation_hint))
+//         })?;
+
+//         Some(Plan {
+//             template: vec![TemplateItem::Sign(PlanKey {
+//                 asset_key: asset_key.clone(),
+//                 descriptor_key: self.as_inner().clone(),
+//                 derivation_hint,
+//             })],
+//             target: Target::Segwitv0,
+//             set_locktime: None,
+//             set_sequence: None,
+//         })
+//     }
+// }
+
+pub(crate) fn plan_satisfaction_tr<Ak>(
+    tr: &miniscript::descriptor::Tr<DefiniteDescriptorKey>,
+    assets: &Assets<Ak>,
+) -> Option<Plan<Ak>>
+where
+    Ak: CanDerive + Clone,
+{
+    let key_path_spend = assets.keys.iter().find_map(|asset_key| {
+        let derivation_hint = asset_key.can_derive(tr.internal_key())?;
+        Some((asset_key, derivation_hint))
+    });
+
+    if let Some((asset_key, derivation_hint)) = key_path_spend {
+        return Some(Plan {
+            template: vec![TemplateItem::Sign(PlanKey {
+                asset_key: asset_key.clone(),
+                descriptor_key: tr.internal_key().clone(),
+                derivation_hint,
+            })],
+            target: Target::Segwitv1 {
+                tr: tr.clone(),
+                tr_plan: TrSpend::KeySpend,
+            },
+            set_locktime: None,
+            set_sequence: None,
+        });
+    }
+
+    let mut plans = tr
+        .iter_scripts()
+        .filter_map(|(_, ms)| Some((ms, (plan_steps(&ms.node, assets)?))))
+        .collect::<Vec<_>>();
+
+    plans.sort_by_cached_key(|(_, plan)| plan.expected_size());
+
+    let (script, best_plan) = plans.into_iter().next()?;
+
+    Some(Plan {
+        target: Target::Segwitv1 {
+            tr: tr.clone(),
+            tr_plan: TrSpend::LeafSpend {
+                script: script.encode(),
+                leaf_version: LeafVersion::TapScript,
+            },
+        },
+        set_locktime: best_plan.min_locktime.clone(),
+        set_sequence: best_plan.min_sequence.clone(),
+        template: best_plan.template,
+    })
+}
+
+#[derive(Debug)]
+struct TermPlan<Ak> {
+    pub min_locktime: Option<LockTime>,
+    pub min_sequence: Option<Sequence>,
+    pub template: Vec<TemplateItem<Ak>>,
+}
+
+impl<Ak> TermPlan<Ak> {
+    fn new(template: Vec<TemplateItem<Ak>>) -> Self {
+        TermPlan {
+            template,
+            ..Default::default()
+        }
+    }
+}
+
+impl<Ak> Default for TermPlan<Ak> {
+    fn default() -> Self {
+        Self {
+            min_locktime: Default::default(),
+            min_sequence: Default::default(),
+            template: Default::default(),
+        }
+    }
+}
+
+fn plan_steps<Ak: Clone + CanDerive, Ctx: ScriptContext>(
+    term: &Terminal<DefiniteDescriptorKey, Ctx>,
+    assets: &Assets<Ak>,
+) -> Option<TermPlan<Ak>> {
+    match term {
+        Terminal::True => Some(TermPlan::new(vec![])),
+        Terminal::False => return None,
+        Terminal::PkH(key) => {
+            let (asset_key, derivation_hint) = assets
+                .keys
+                .iter()
+                .find_map(|asset_key| Some((asset_key, asset_key.can_derive(key)?)))?;
+            Some(TermPlan::new(vec![
+                TemplateItem::Sign(PlanKey {
+                    asset_key: asset_key.clone(),
+                    derivation_hint,
+                    descriptor_key: key.clone(),
+                }),
+                TemplateItem::Pk { key: key.clone() },
+            ]))
+        }
+        Terminal::PkK(key) => {
+            let (asset_key, derivation_hint) = assets
+                .keys
+                .iter()
+                .find_map(|asset_key| Some((asset_key, asset_key.can_derive(key)?)))?;
+            Some(TermPlan::new(vec![TemplateItem::Sign(PlanKey {
+                asset_key: asset_key.clone(),
+                derivation_hint,
+                descriptor_key: key.clone(),
+            })]))
+        }
+        Terminal::RawPkH(_pk_hash) => {
+            /* TODO */
+            None
+        }
+        Terminal::After(locktime) => {
+            let max_locktime = assets.max_locktime?;
+            let locktime = LockTime::from(locktime);
+            let (height, time) = match max_locktime {
+                LockTime::Blocks(height) => (height, Time::from_consensus(0).unwrap()),
+                LockTime::Seconds(seconds) => (Height::from_consensus(0).unwrap(), seconds),
+            };
+            if max_locktime.is_satisfied_by(height, time) {
+                Some(TermPlan {
+                    min_locktime: Some(locktime),
+                    ..Default::default()
+                })
+            } else {
+                None
+            }
+        }
+        Terminal::Older(older) => {
+            // FIXME: older should be a height or time not a sequence.
+            let max_sequence = assets.txo_age?;
+            //TODO: this whole thing is probably wrong but upstream should provide a way of
+            // doing it properly.
+            if max_sequence.is_height_locked() == older.is_height_locked() {
+                if max_sequence.to_consensus_u32() >= older.to_consensus_u32() {
+                    Some(TermPlan {
+                        min_sequence: Some(*older),
+                        ..Default::default()
+                    })
+                } else {
+                    None
+                }
+            } else {
+                None
+            }
+        }
+        Terminal::Sha256(image) => {
+            if assets.sha256.contains(&image) {
+                Some(TermPlan::new(vec![TemplateItem::Sha256(image.clone())]))
+            } else {
+                None
+            }
+        }
+        Terminal::Hash256(image) => {
+            if assets.hash256.contains(image) {
+                Some(TermPlan::new(vec![TemplateItem::Hash256(image.clone())]))
+            } else {
+                None
+            }
+        }
+        Terminal::Ripemd160(image) => {
+            if assets.ripemd160.contains(&image) {
+                Some(TermPlan::new(vec![TemplateItem::Ripemd160(image.clone())]))
+            } else {
+                None
+            }
+        }
+        Terminal::Hash160(image) => {
+            if assets.hash160.contains(&image) {
+                Some(TermPlan::new(vec![TemplateItem::Hash160(image.clone())]))
+            } else {
+                None
+            }
+        }
+        Terminal::Alt(ms)
+        | Terminal::Swap(ms)
+        | Terminal::Check(ms)
+        | Terminal::Verify(ms)
+        | Terminal::NonZero(ms)
+        | Terminal::ZeroNotEqual(ms) => plan_steps(&ms.node, assets),
+        Terminal::DupIf(ms) => {
+            let mut plan = plan_steps(&ms.node, assets)?;
+            plan.template.push(TemplateItem::One);
+            Some(plan)
+        }
+        Terminal::AndV(l, r) | Terminal::AndB(l, r) => {
+            let lhs = plan_steps(&l.node, assets)?;
+            let rhs = plan_steps(&r.node, assets)?;
+            lhs.combine(rhs)
+        }
+        Terminal::AndOr(_, _, _) => todo!(),
+        Terminal::OrB(_, _) => todo!(),
+        Terminal::OrD(_, _) => todo!(),
+        Terminal::OrC(_, _) => todo!(),
+        Terminal::OrI(lhs, rhs) => {
+            let lplan = plan_steps(&lhs.node, assets).map(|mut plan| {
+                plan.template.push(TemplateItem::One);
+                plan
+            });
+            let rplan = plan_steps(&rhs.node, assets).map(|mut plan| {
+                plan.template.push(TemplateItem::Zero);
+                plan
+            });
+            match (lplan, rplan) {
+                (Some(lplan), Some(rplan)) => {
+                    if lplan.expected_size() <= rplan.expected_size() {
+                        Some(lplan)
+                    } else {
+                        Some(rplan)
+                    }
+                }
+                (lplan, rplan) => lplan.or(rplan),
+            }
+        }
+        Terminal::Thresh(_, _) => todo!(),
+        Terminal::Multi(_, _) => todo!(),
+        Terminal::MultiA(_, _) => todo!(),
+    }
+}
diff --git a/nursery/tmp_plan/src/requirements.rs b/nursery/tmp_plan/src/requirements.rs
new file mode 100644 (file)
index 0000000..3aa6ac2
--- /dev/null
@@ -0,0 +1,218 @@
+use bdk_chain::{bitcoin, collections::*, miniscript};
+use core::ops::Deref;
+
+use bitcoin::{
+    hashes::{hash160, ripemd160, sha256},
+    psbt::Prevouts,
+    secp256k1::{KeyPair, Message, PublicKey, Signing, Verification},
+    util::{bip32, sighash, sighash::SighashCache, taproot},
+    EcdsaSighashType, SchnorrSighashType, Transaction, TxOut, XOnlyPublicKey,
+};
+
+use super::*;
+use miniscript::{
+    descriptor::{DescriptorSecretKey, KeyMap},
+    hash256,
+};
+
+#[derive(Clone, Debug)]
+/// Signatures and hash pre-images that must be provided to complete the plan.
+pub struct Requirements<Ak> {
+    /// required signatures
+    pub signatures: RequiredSignatures<Ak>,
+    /// required sha256 pre-images
+    pub sha256_images: HashSet<sha256::Hash>,
+    /// required hash160 pre-images
+    pub hash160_images: HashSet<hash160::Hash>,
+    /// required hash256 pre-images
+    pub hash256_images: HashSet<hash256::Hash>,
+    /// required ripemd160 pre-images
+    pub ripemd160_images: HashSet<ripemd160::Hash>,
+}
+
+impl<Ak> Default for RequiredSignatures<Ak> {
+    fn default() -> Self {
+        RequiredSignatures::Legacy {
+            keys: Default::default(),
+        }
+    }
+}
+
+impl<Ak> Default for Requirements<Ak> {
+    fn default() -> Self {
+        Self {
+            signatures: Default::default(),
+            sha256_images: Default::default(),
+            hash160_images: Default::default(),
+            hash256_images: Default::default(),
+            ripemd160_images: Default::default(),
+        }
+    }
+}
+
+impl<Ak> Requirements<Ak> {
+    /// Whether any hash pre-images are required in the plan
+    pub fn requires_hash_preimages(&self) -> bool {
+        !(self.sha256_images.is_empty()
+            && self.hash160_images.is_empty()
+            && self.hash256_images.is_empty()
+            && self.ripemd160_images.is_empty())
+    }
+}
+
+/// The signatures required to complete the plan
+#[derive(Clone, Debug)]
+pub enum RequiredSignatures<Ak> {
+    /// Legacy ECDSA signatures are required
+    Legacy { keys: Vec<PlanKey<Ak>> },
+    /// Segwitv0 ECDSA signatures are required
+    Segwitv0 { keys: Vec<PlanKey<Ak>> },
+    /// A Taproot key spend signature is required
+    TapKey {
+        /// the internal key
+        plan_key: PlanKey<Ak>,
+        /// The merkle root of the taproot output
+        merkle_root: Option<TapBranchHash>,
+    },
+    /// Taproot script path signatures are required
+    TapScript {
+        /// The leaf hash of the script being used
+        leaf_hash: TapLeafHash,
+        /// The keys in the script that require signatures
+        plan_keys: Vec<PlanKey<Ak>>,
+    },
+}
+
+#[derive(Clone, Debug)]
+pub enum SigningError {
+    SigHashError(sighash::Error),
+    DerivationError(bip32::Error),
+}
+
+impl From<sighash::Error> for SigningError {
+    fn from(e: sighash::Error) -> Self {
+        Self::SigHashError(e)
+    }
+}
+
+impl core::fmt::Display for SigningError {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            SigningError::SigHashError(e) => e.fmt(f),
+            SigningError::DerivationError(e) => e.fmt(f),
+        }
+    }
+}
+
+impl From<bip32::Error> for SigningError {
+    fn from(e: bip32::Error) -> Self {
+        Self::DerivationError(e)
+    }
+}
+
+#[cfg(feature = "std")]
+impl std::error::Error for SigningError {}
+
+impl RequiredSignatures<DescriptorPublicKey> {
+    pub fn sign_with_keymap<T: Deref<Target = Transaction>>(
+        &self,
+        input_index: usize,
+        keymap: &KeyMap,
+        prevouts: &Prevouts<'_, impl core::borrow::Borrow<TxOut>>,
+        schnorr_sighashty: Option<SchnorrSighashType>,
+        _ecdsa_sighashty: Option<EcdsaSighashType>,
+        sighash_cache: &mut SighashCache<T>,
+        auth_data: &mut SatisfactionMaterial,
+        secp: &Secp256k1<impl Signing + Verification>,
+    ) -> Result<bool, SigningError> {
+        match self {
+            RequiredSignatures::Legacy { .. } | RequiredSignatures::Segwitv0 { .. } => todo!(),
+            RequiredSignatures::TapKey {
+                plan_key,
+                merkle_root,
+            } => {
+                let schnorr_sighashty = schnorr_sighashty.unwrap_or(SchnorrSighashType::Default);
+                let sighash = sighash_cache.taproot_key_spend_signature_hash(
+                    input_index,
+                    prevouts,
+                    schnorr_sighashty,
+                )?;
+                let secret_key = match keymap.get(&plan_key.asset_key) {
+                    Some(secret_key) => secret_key,
+                    None => return Ok(false),
+                };
+                let secret_key = match secret_key {
+                    DescriptorSecretKey::Single(single) => single.key.inner,
+                    DescriptorSecretKey::XPrv(xprv) => {
+                        xprv.xkey
+                            .derive_priv(&secp, &plan_key.derivation_hint)?
+                            .private_key
+                    }
+                };
+
+                let pubkey = PublicKey::from_secret_key(&secp, &secret_key);
+                let x_only_pubkey = XOnlyPublicKey::from(pubkey);
+
+                let tweak =
+                    taproot::TapTweakHash::from_key_and_tweak(x_only_pubkey, merkle_root.clone());
+                let keypair = KeyPair::from_secret_key(&secp, &secret_key.clone())
+                    .add_xonly_tweak(&secp, &tweak.to_scalar())
+                    .unwrap();
+
+                let msg = Message::from_slice(sighash.as_ref()).expect("Sighashes are 32 bytes");
+                let sig = secp.sign_schnorr_no_aux_rand(&msg, &keypair);
+
+                let bitcoin_sig = SchnorrSig {
+                    sig,
+                    hash_ty: schnorr_sighashty,
+                };
+
+                auth_data
+                    .schnorr_sigs
+                    .insert(plan_key.descriptor_key.clone(), bitcoin_sig);
+                Ok(true)
+            }
+            RequiredSignatures::TapScript {
+                leaf_hash,
+                plan_keys,
+            } => {
+                let sighash_type = schnorr_sighashty.unwrap_or(SchnorrSighashType::Default);
+                let sighash = sighash_cache.taproot_script_spend_signature_hash(
+                    input_index,
+                    prevouts,
+                    *leaf_hash,
+                    sighash_type,
+                )?;
+
+                let mut modified = false;
+
+                for plan_key in plan_keys {
+                    if let Some(secret_key) = keymap.get(&plan_key.asset_key) {
+                        let secret_key = match secret_key {
+                            DescriptorSecretKey::Single(single) => single.key.inner,
+                            DescriptorSecretKey::XPrv(xprv) => {
+                                xprv.xkey
+                                    .derive_priv(&secp, &plan_key.derivation_hint)?
+                                    .private_key
+                            }
+                        };
+                        let keypair = KeyPair::from_secret_key(&secp, &secret_key.clone());
+                        let msg =
+                            Message::from_slice(sighash.as_ref()).expect("Sighashes are 32 bytes");
+                        let sig = secp.sign_schnorr_no_aux_rand(&msg, &keypair);
+                        let bitcoin_sig = SchnorrSig {
+                            sig,
+                            hash_ty: sighash_type,
+                        };
+
+                        auth_data
+                            .schnorr_sigs
+                            .insert(plan_key.descriptor_key.clone(), bitcoin_sig);
+                        modified = true;
+                    }
+                }
+                Ok(modified)
+            }
+        }
+    }
+}
diff --git a/nursery/tmp_plan/src/template.rs b/nursery/tmp_plan/src/template.rs
new file mode 100644 (file)
index 0000000..cf94540
--- /dev/null
@@ -0,0 +1,76 @@
+use bdk_chain::{bitcoin, miniscript};
+use bitcoin::{
+    hashes::{hash160, ripemd160, sha256},
+    util::bip32::DerivationPath,
+};
+
+use super::*;
+use crate::{hash256, varint_len, DefiniteDescriptorKey};
+
+#[derive(Clone, Debug)]
+pub(crate) enum TemplateItem<Ak> {
+    Sign(PlanKey<Ak>),
+    Pk { key: DefiniteDescriptorKey },
+    One,
+    Zero,
+    Sha256(sha256::Hash),
+    Hash256(hash256::Hash),
+    Ripemd160(ripemd160::Hash),
+    Hash160(hash160::Hash),
+}
+
+/// A plan key contains the asset key originally provided along with key in the descriptor it
+/// purports to be able to derive for along with a "hint" on how to derive it.
+#[derive(Clone, Debug)]
+pub struct PlanKey<Ak> {
+    /// The key the planner will sign with
+    pub asset_key: Ak,
+    /// A hint from how to get from the asset key to the concrete key we need to sign with.
+    pub derivation_hint: DerivationPath,
+    /// The key that was in the descriptor that we are satisfying with the signature from the asset
+    /// key.
+    pub descriptor_key: DefiniteDescriptorKey,
+}
+
+impl<Ak> TemplateItem<Ak> {
+    pub fn expected_size(&self) -> usize {
+        match self {
+            TemplateItem::Sign { .. } => 64, /*size of sig TODO: take into consideration sighash falg*/
+            TemplateItem::Pk { .. } => 32,
+            TemplateItem::One => varint_len(1),
+            TemplateItem::Zero => 0, /* zero means an empty witness element */
+            // I'm not sure if it should be 32 here (it's a 20 byte hash) but that's what other
+            // parts of the code were doing.
+            TemplateItem::Hash160(_) | TemplateItem::Ripemd160(_) => 32,
+            TemplateItem::Sha256(_) | TemplateItem::Hash256(_) => 32,
+        }
+    }
+
+    // this can only be called if we are sure that auth_data has what we need
+    pub(super) fn to_witness_stack(&self, auth_data: &SatisfactionMaterial) -> Vec<Vec<u8>> {
+        match self {
+            TemplateItem::Sign(plan_key) => {
+                vec![auth_data
+                    .schnorr_sigs
+                    .get(&plan_key.descriptor_key)
+                    .unwrap()
+                    .to_vec()]
+            }
+            TemplateItem::One => vec![vec![1]],
+            TemplateItem::Zero => vec![vec![]],
+            TemplateItem::Sha256(image) => {
+                vec![auth_data.sha256_preimages.get(image).unwrap().to_vec()]
+            }
+            TemplateItem::Hash160(image) => {
+                vec![auth_data.hash160_preimages.get(image).unwrap().to_vec()]
+            }
+            TemplateItem::Ripemd160(image) => {
+                vec![auth_data.ripemd160_preimages.get(image).unwrap().to_vec()]
+            }
+            TemplateItem::Hash256(image) => {
+                vec![auth_data.hash256_preimages.get(image).unwrap().to_vec()]
+            }
+            TemplateItem::Pk { key } => vec![key.to_public_key().to_bytes()],
+        }
+    }
+}