]> Untitled Git - bdk/commitdiff
Move bdk_chain into the bdk repo :tada:
authorDaniela Brozzoni <danielabrozzoni@protonmail.com>
Wed, 1 Mar 2023 10:09:08 +0000 (11:09 +0100)
committerDaniela Brozzoni <danielabrozzoni@protonmail.com>
Thu, 2 Mar 2023 09:56:33 +0000 (10:56 +0100)
Original repository: https://github.com/LLFourn/bdk_core_staging/tree/250b4f1dcce10805adfb2f201901675eb6076554/bdk_chain

Co-authored-by: Steve Myers <steve@notmandatory.org>
Co-authored-by: 志宇 <hello@evanlinjin.me>
Co-authored-by: LLFourn <lloyd.fourn@gmail.com>
Co-authored-by: rajarshimaitra <rajarshi149@gmail.com>
Co-authored-by: LagginTimes <wzc110@gmail.com>
Co-authored-by: Steve Myers <steve@notmandatory.org>
Co-authored-by: Vladimir Fomene <vladimirfomene@gmail.com>
22 files changed:
Cargo.toml
crates/chain/Cargo.toml [new file with mode: 0644]
crates/chain/src/chain_data.rs [new file with mode: 0644]
crates/chain/src/chain_graph.rs [new file with mode: 0644]
crates/chain/src/descriptor_ext.rs [new file with mode: 0644]
crates/chain/src/example_utils.rs [new file with mode: 0644]
crates/chain/src/keychain.rs [new file with mode: 0644]
crates/chain/src/keychain/persist.rs [new file with mode: 0644]
crates/chain/src/keychain/tracker.rs [new file with mode: 0644]
crates/chain/src/keychain/txout_index.rs [new file with mode: 0644]
crates/chain/src/lib.rs [new file with mode: 0644]
crates/chain/src/sparse_chain.rs [new file with mode: 0644]
crates/chain/src/spk_txout_index.rs [new file with mode: 0644]
crates/chain/src/tx_data_traits.rs [new file with mode: 0644]
crates/chain/src/tx_graph.rs [new file with mode: 0644]
crates/chain/tests/common/mod.rs [new file with mode: 0644]
crates/chain/tests/test_chain_graph.rs [new file with mode: 0644]
crates/chain/tests/test_keychain_tracker.rs [new file with mode: 0644]
crates/chain/tests/test_keychain_txout_index.rs [new file with mode: 0644]
crates/chain/tests/test_sparse_chain.rs [new file with mode: 0644]
crates/chain/tests/test_spk_txout_index.rs [new file with mode: 0644]
crates/chain/tests/test_tx_graph.rs [new file with mode: 0644]

index 9549e4235c0cf02350417e732525be5915a60149..83b199b9278544cd0f5cb7120080f9ee62e0024c 100644 (file)
@@ -1,6 +1,7 @@
 [workspace]
 members = [
     "crates/bdk",
+    "crates/bdk_chain",
     "example-crates/esplora-wallet",
     "example-crates/electrum-wallet",
 ]
diff --git a/crates/chain/Cargo.toml b/crates/chain/Cargo.toml
new file mode 100644 (file)
index 0000000..6877b2b
--- /dev/null
@@ -0,0 +1,27 @@
+[package]
+name = "bdk_chain"
+version = "0.3.1"
+edition = "2021"
+homepage = "https://bitcoindevkit.org"
+repository = "https://github.com/LLFourn/bdk_core_staging"
+documentation = "https://docs.rs/bdk_chain"
+description = "Collection of core structures for Bitcoin Dev Kit."
+license = "MIT OR Apache-2.0"
+readme = "../README.md"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+bitcoin = { version = "0.29" }
+serde_crate = { package = "serde", version = "1", optional = true, features = ["derive"] }
+# Use hashbrown as a feature flag to have HashSet and HashMap from it.
+hashbrown = { version = "0.13.2", optional = true }
+miniscript = { version = "9.0.0", optional = true  }
+
+[dev-dependencies]
+rand = "0.8"
+
+[features]
+default = ["std", "miniscript"]
+std = []
+serde = ["serde_crate", "bitcoin/serde"]
diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs
new file mode 100644 (file)
index 0000000..51b1e3b
--- /dev/null
@@ -0,0 +1,218 @@
+use bitcoin::{hashes::Hash, BlockHash, OutPoint, TxOut, Txid};
+
+use crate::{
+    sparse_chain::{self, ChainPosition},
+    COINBASE_MATURITY,
+};
+
+/// Represents the height in which a transaction is confirmed at.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[cfg_attr(
+    feature = "serde",
+    derive(serde::Deserialize, serde::Serialize),
+    serde(crate = "serde_crate")
+)]
+pub enum TxHeight {
+    Confirmed(u32),
+    Unconfirmed,
+}
+
+impl Default for TxHeight {
+    fn default() -> Self {
+        Self::Unconfirmed
+    }
+}
+
+impl core::fmt::Display for TxHeight {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            Self::Confirmed(h) => core::write!(f, "confirmed_at({})", h),
+            Self::Unconfirmed => core::write!(f, "unconfirmed"),
+        }
+    }
+}
+
+impl From<Option<u32>> for TxHeight {
+    fn from(opt: Option<u32>) -> Self {
+        match opt {
+            Some(h) => Self::Confirmed(h),
+            None => Self::Unconfirmed,
+        }
+    }
+}
+
+impl From<TxHeight> for Option<u32> {
+    fn from(height: TxHeight) -> Self {
+        match height {
+            TxHeight::Confirmed(h) => Some(h),
+            TxHeight::Unconfirmed => None,
+        }
+    }
+}
+
+impl crate::sparse_chain::ChainPosition for TxHeight {
+    fn height(&self) -> TxHeight {
+        *self
+    }
+
+    fn max_ord_of_height(height: TxHeight) -> Self {
+        height
+    }
+
+    fn min_ord_of_height(height: TxHeight) -> Self {
+        height
+    }
+}
+
+impl TxHeight {
+    pub fn is_confirmed(&self) -> bool {
+        matches!(self, Self::Confirmed(_))
+    }
+}
+
+/// Block height and timestamp in which a transaction is confirmed in.
+#[derive(Debug, Clone, PartialEq, Eq, Copy, PartialOrd, Ord, core::hash::Hash)]
+#[cfg_attr(
+    feature = "serde",
+    derive(serde::Deserialize, serde::Serialize),
+    serde(crate = "serde_crate")
+)]
+pub enum ConfirmationTime {
+    Confirmed { height: u32, time: u64 },
+    Unconfirmed,
+}
+
+impl sparse_chain::ChainPosition for ConfirmationTime {
+    fn height(&self) -> TxHeight {
+        match self {
+            ConfirmationTime::Confirmed { height, .. } => TxHeight::Confirmed(*height),
+            ConfirmationTime::Unconfirmed => TxHeight::Unconfirmed,
+        }
+    }
+
+    fn max_ord_of_height(height: TxHeight) -> Self {
+        match height {
+            TxHeight::Confirmed(height) => Self::Confirmed {
+                height,
+                time: u64::MAX,
+            },
+            TxHeight::Unconfirmed => Self::Unconfirmed,
+        }
+    }
+
+    fn min_ord_of_height(height: TxHeight) -> Self {
+        match height {
+            TxHeight::Confirmed(height) => Self::Confirmed {
+                height,
+                time: u64::MIN,
+            },
+            TxHeight::Unconfirmed => Self::Unconfirmed,
+        }
+    }
+}
+
+impl ConfirmationTime {
+    pub fn is_confirmed(&self) -> bool {
+        matches!(self, Self::Confirmed { .. })
+    }
+}
+
+/// A reference to a block in the cannonical chain.
+#[derive(Debug, Clone, PartialEq, Eq, Copy, PartialOrd, Ord)]
+#[cfg_attr(
+    feature = "serde",
+    derive(serde::Deserialize, serde::Serialize),
+    serde(crate = "serde_crate")
+)]
+pub struct BlockId {
+    /// The height the block was confirmed at
+    pub height: u32,
+    /// The hash of the block
+    pub hash: BlockHash,
+}
+
+impl Default for BlockId {
+    fn default() -> Self {
+        Self {
+            height: Default::default(),
+            hash: BlockHash::from_inner([0u8; 32]),
+        }
+    }
+}
+
+impl From<(u32, BlockHash)> for BlockId {
+    fn from((height, hash): (u32, BlockHash)) -> Self {
+        Self { height, hash }
+    }
+}
+
+impl From<BlockId> for (u32, BlockHash) {
+    fn from(block_id: BlockId) -> Self {
+        (block_id.height, block_id.hash)
+    }
+}
+
+impl From<(&u32, &BlockHash)> for BlockId {
+    fn from((height, hash): (&u32, &BlockHash)) -> Self {
+        Self {
+            height: *height,
+            hash: *hash,
+        }
+    }
+}
+
+/// A `TxOut` with as much data as we can retreive about it
+#[derive(Debug, Clone, PartialEq)]
+pub struct FullTxOut<I> {
+    /// The location of the `TxOut`
+    pub outpoint: OutPoint,
+    /// The `TxOut`
+    pub txout: TxOut,
+    /// The position of the transaction in `outpoint` in the overall chain.
+    pub chain_position: I,
+    /// The txid and chain position of the transaction (if any) that has spent this output.
+    pub spent_by: Option<(I, Txid)>,
+    /// Whether this output is on a coinbase transaction
+    pub is_on_coinbase: bool,
+}
+
+impl<I: ChainPosition> FullTxOut<I> {
+    /// Whether the utxo is/was/will be spendable at `height`.
+    ///
+    /// It is spendable if it is not an immature coinbase output and no spending tx has been
+    /// confirmed by that heigt.
+    pub fn is_spendable_at(&self, height: u32) -> bool {
+        if !self.is_mature(height) {
+            return false;
+        }
+
+        if self.chain_position.height() > TxHeight::Confirmed(height) {
+            return false;
+        }
+
+        match &self.spent_by {
+            Some((spending_height, _)) => spending_height.height() > TxHeight::Confirmed(height),
+            None => true,
+        }
+    }
+
+    pub fn is_mature(&self, height: u32) -> bool {
+        if self.is_on_coinbase {
+            let tx_height = match self.chain_position.height() {
+                TxHeight::Confirmed(tx_height) => tx_height,
+                TxHeight::Unconfirmed => {
+                    debug_assert!(false, "coinbase tx can never be unconfirmed");
+                    return false;
+                }
+            };
+            let age = height.saturating_sub(tx_height);
+            if age + 1 < COINBASE_MATURITY {
+                return false;
+            }
+        }
+
+        true
+    }
+}
+
+// TOOD: make test
diff --git a/crates/chain/src/chain_graph.rs b/crates/chain/src/chain_graph.rs
new file mode 100644 (file)
index 0000000..78a56a9
--- /dev/null
@@ -0,0 +1,620 @@
+//! Module for structures that combine the features of [`sparse_chain`] and [`tx_graph`].
+use crate::{
+    collections::HashSet,
+    sparse_chain::{self, ChainPosition, SparseChain},
+    tx_graph::{self, TxGraph},
+    AsTransaction, BlockId, ForEachTxOut, FullTxOut, IntoOwned, TxHeight,
+};
+use alloc::{borrow::Cow, string::ToString, vec::Vec};
+use bitcoin::{OutPoint, Transaction, TxOut, Txid};
+use core::fmt::Debug;
+
+/// A consistent combination of a [`SparseChain<P>`] and a [`TxGraph<T>`].
+///
+/// `SparseChain` only keeps track of transaction ids and their position in the chain but you often
+/// want to store the full transactions as well. Additionally you want to make sure that everything
+/// in the chain is consistent with the full transaction data. `ChainGraph` enforces these two
+/// invariants:
+///
+/// 1. Every transaction that is in the chain is also in the graph (you always have the full
+/// transaction).
+/// 2. No transactions in the chain conflict with each other i.e. they don't double spend each
+/// other or have ancestors that double spend each other.
+///
+/// Note that the `ChainGraph` guarantees a 1:1 mapping between transactions in the `chain` and
+/// `graph` but not the other way around. Transactions may fall out of the *chain* (via re-org or
+/// mempool eviction) but will remain in the *graph*.
+#[derive(Clone, Debug, PartialEq)]
+pub struct ChainGraph<P = TxHeight, T = Transaction> {
+    chain: SparseChain<P>,
+    graph: TxGraph<T>,
+}
+
+impl<P, T> Default for ChainGraph<P, T> {
+    fn default() -> Self {
+        Self {
+            chain: Default::default(),
+            graph: Default::default(),
+        }
+    }
+}
+
+impl<P, T> AsRef<SparseChain<P>> for ChainGraph<P, T> {
+    fn as_ref(&self) -> &SparseChain<P> {
+        &self.chain
+    }
+}
+
+impl<P, T> AsRef<TxGraph<T>> for ChainGraph<P, T> {
+    fn as_ref(&self) -> &TxGraph<T> {
+        &self.graph
+    }
+}
+
+impl<P, T> AsRef<ChainGraph<P, T>> for ChainGraph<P, T> {
+    fn as_ref(&self) -> &ChainGraph<P, T> {
+        self
+    }
+}
+
+impl<P, T> ChainGraph<P, T> {
+    /// Returns a reference to the internal [`SparseChain`].
+    pub fn chain(&self) -> &SparseChain<P> {
+        &self.chain
+    }
+
+    /// Returns a reference to the internal [`TxGraph`].
+    pub fn graph(&self) -> &TxGraph<T> {
+        &self.graph
+    }
+}
+
+impl<P, T> ChainGraph<P, T>
+where
+    P: ChainPosition,
+    T: AsTransaction + Clone + Ord,
+{
+    /// Create a new chain graph from a `chain` and a `graph`.
+    ///
+    /// There are two reasons this can return an `Err`:
+    ///
+    /// 1. There is a transaction in the `chain` that does not have its corresponding full
+    /// transaction in `graph`.
+    /// 2. The `chain` has two transactions that allegedly in it but they conflict in the `graph`
+    /// (so could not possibly be in the same chain).
+    pub fn new(chain: SparseChain<P>, graph: TxGraph<T>) -> Result<Self, NewError<P>> {
+        let mut missing = HashSet::default();
+        for (pos, txid) in chain.txids() {
+            if let Some(tx) = graph.get_tx(*txid) {
+                let conflict = graph
+                    .walk_conflicts(tx.as_tx(), |_, txid| {
+                        Some((chain.tx_position(txid)?.clone(), txid))
+                    })
+                    .next();
+                if let Some((conflict_pos, conflict)) = conflict {
+                    return Err(NewError::Conflict {
+                        a: (pos.clone(), *txid),
+                        b: (conflict_pos, conflict),
+                    });
+                }
+            } else {
+                missing.insert(*txid);
+            }
+        }
+
+        if !missing.is_empty() {
+            return Err(NewError::Missing(missing));
+        }
+
+        Ok(Self { chain, graph })
+    }
+
+    /// Take an update in the form of a [`SparseChain<P>`][`SparseChain`] and attempt to turn it
+    /// into a chain graph by filling in full transactions from `self` and from `new_txs`. This
+    /// returns a `ChainGraph<P, Cow<T>>` where the [`Cow<'a, T>`] will borrow the transaction if it
+    /// got it from `self`.
+    ///
+    /// This is useful when interacting with services like an electrum server which returns a list
+    /// of txids and heights when calling [`script_get_history`] which can easily be inserted into a
+    /// [`SparseChain<TxHeight>`][`SparseChain`]. From there you need to figure out which full
+    /// transactions you are missing in your chain graph and form `new_txs`. You then use
+    /// `inflate_update` to turn this into an update `ChainGraph<P, Cow<Transaction>>` and finally
+    /// use [`determine_changeset`] to generate the changeset from it.
+    ///
+    /// [`SparseChain`]: crate::sparse_chain::SparseChain
+    /// [`Cow<'a, T>`]: std::borrow::Cow
+    /// [`script_get_history`]: https://docs.rs/electrum-client/latest/electrum_client/trait.ElectrumApi.html#tymethod.script_get_history
+    /// [`determine_changeset`]: Self::determine_changeset
+    pub fn inflate_update(
+        &self,
+        update: SparseChain<P>,
+        new_txs: impl IntoIterator<Item = T>,
+    ) -> Result<ChainGraph<P, Cow<T>>, NewError<P>> {
+        let mut inflated_graph = TxGraph::default();
+        for (_, txid) in update.txids() {
+            if let Some(tx) = self.graph.get_tx(*txid) {
+                let _ = inflated_graph.insert_tx(Cow::Borrowed(tx));
+            }
+        }
+
+        for tx in new_txs {
+            let _ = inflated_graph.insert_tx(Cow::Owned(tx));
+        }
+
+        ChainGraph::new(update, inflated_graph)
+    }
+
+    /// Sets the checkpoint limit.
+    ///
+    /// Refer to [`SparseChain::checkpoint_limit`] for more.
+    pub fn checkpoint_limit(&self) -> Option<usize> {
+        self.chain.checkpoint_limit()
+    }
+
+    /// Sets the checkpoint limit.
+    ///
+    /// Refer to [`SparseChain::set_checkpoint_limit`] for more.
+    pub fn set_checkpoint_limit(&mut self, limit: Option<usize>) {
+        self.chain.set_checkpoint_limit(limit)
+    }
+
+    /// Determines the changes required to invalidate checkpoints `from_height` (inclusive) and
+    /// above. Displaced transactions will have their positions moved to [`TxHeight::Unconfirmed`].
+    pub fn invalidate_checkpoints_preview(&self, from_height: u32) -> ChangeSet<P, T> {
+        ChangeSet {
+            chain: self.chain.invalidate_checkpoints_preview(from_height),
+            ..Default::default()
+        }
+    }
+
+    /// Invalidate checkpoints `from_height` (inclusive) and above. Displaced transactions will be
+    /// re-positioned to [`TxHeight::Unconfirmed`].
+    ///
+    /// This is equivalent to calling [`Self::invalidate_checkpoints_preview`] and
+    /// [`Self::apply_changeset`] in sequence.
+    pub fn invalidate_checkpoints(&mut self, from_height: u32) -> ChangeSet<P, T>
+    where
+        ChangeSet<P, T>: Clone,
+    {
+        let changeset = self.invalidate_checkpoints_preview(from_height);
+        self.apply_changeset(changeset.clone());
+        changeset
+    }
+
+    /// Get a transaction that is currently in the underlying [`SparseChain`].
+    ///
+    /// This does not necessarily mean that it is *confirmed* in the blockchain, it might just be in
+    /// the unconfirmed transaction list within the [`SparseChain`].
+    pub fn get_tx_in_chain(&self, txid: Txid) -> Option<(&P, &T)> {
+        let position = self.chain.tx_position(txid)?;
+        let full_tx = self.graph.get_tx(txid).expect("must exist");
+        Some((position, full_tx))
+    }
+
+    /// Determines the changes required to insert a transaction into the inner [`ChainGraph`] and
+    /// [`SparseChain`] at the given `position`.
+    ///
+    /// If inserting it into the chain `position` will result in conflicts, the returned
+    /// [`ChangeSet`] should evict conflicting transactions.
+    pub fn insert_tx_preview(&self, tx: T, pos: P) -> Result<ChangeSet<P, T>, InsertTxError<P>> {
+        let mut changeset = ChangeSet {
+            chain: self.chain.insert_tx_preview(tx.as_tx().txid(), pos)?,
+            graph: self.graph.insert_tx_preview(tx),
+        };
+        self.fix_conflicts(&mut changeset)?;
+        Ok(changeset)
+    }
+
+    /// Inserts [`Transaction`] at given chain position.
+    ///
+    /// This is equivalent to calling [`Self::insert_tx_preview`] and [`Self::apply_changeset`] in
+    /// sequence.
+    pub fn insert_tx(&mut self, tx: T, pos: P) -> Result<ChangeSet<P, T>, InsertTxError<P>> {
+        let changeset = self.insert_tx_preview(tx, pos)?;
+        self.apply_changeset(changeset.clone());
+        Ok(changeset)
+    }
+
+    /// Determines the changes required to insert a [`TxOut`] into the internal [`TxGraph`].
+    pub fn insert_txout_preview(&self, outpoint: OutPoint, txout: TxOut) -> ChangeSet<P, T> {
+        ChangeSet {
+            chain: Default::default(),
+            graph: self.graph.insert_txout_preview(outpoint, txout),
+        }
+    }
+
+    /// Inserts a [`TxOut`] into the internal [`TxGraph`].
+    ///
+    /// This is equivalent to calling [`Self::insert_txout_preview`] and [`Self::apply_changeset`]
+    /// in sequence.
+    pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> ChangeSet<P, T> {
+        let changeset = self.insert_txout_preview(outpoint, txout);
+        self.apply_changeset(changeset.clone());
+        changeset
+    }
+
+    /// Determines the changes required to insert a `block_id` (a height and block hash) into the
+    /// chain.
+    ///
+    /// If a checkpoint already exists at that height with a different hash this will return
+    /// an error.
+    pub fn insert_checkpoint_preview(
+        &self,
+        block_id: BlockId,
+    ) -> Result<ChangeSet<P, T>, InsertCheckpointError> {
+        self.chain
+            .insert_checkpoint_preview(block_id)
+            .map(|chain_changeset| ChangeSet {
+                chain: chain_changeset,
+                ..Default::default()
+            })
+    }
+
+    /// Inserts checkpoint into [`Self`].
+    ///
+    /// This is equivalent to calling [`Self::insert_checkpoint_preview`] and
+    /// [`Self::apply_changeset`] in sequence.
+    pub fn insert_checkpoint(
+        &mut self,
+        block_id: BlockId,
+    ) -> Result<ChangeSet<P, T>, InsertCheckpointError> {
+        let changeset = self.insert_checkpoint_preview(block_id)?;
+        self.apply_changeset(changeset.clone());
+        Ok(changeset)
+    }
+
+    /// Calculates the difference between self and `update` in the form of a [`ChangeSet`].
+    pub fn determine_changeset<'a, T2>(
+        &self,
+        update: &ChainGraph<P, T2>,
+    ) -> Result<ChangeSet<P, T>, UpdateError<P>>
+    where
+        T2: IntoOwned<T> + Clone,
+    {
+        let chain_changeset = self
+            .chain
+            .determine_changeset(&update.chain)
+            .map_err(UpdateError::Chain)?;
+
+        let mut changeset = ChangeSet {
+            chain: chain_changeset,
+            graph: self.graph.determine_additions(&update.graph),
+        };
+
+        self.fix_conflicts(&mut changeset)?;
+        Ok(changeset)
+    }
+
+    /// Given a transaction, return an iterator of `txid`s that conflict with it (spends at least
+    /// one of the same inputs). This includes all descendants of conflicting transactions.
+    ///
+    /// This method only returns conflicts that exist in the [`SparseChain`] as transactions that
+    /// are not included in [`SparseChain`] are already considered as evicted.
+    pub fn tx_conflicts_in_chain<'a>(
+        &'a self,
+        tx: &'a Transaction,
+    ) -> impl Iterator<Item = (&'a P, Txid)> + 'a {
+        self.graph.walk_conflicts(tx, |_, conflict_txid| {
+            self.chain
+                .tx_position(conflict_txid)
+                .map(|conflict_pos| (conflict_pos, conflict_txid))
+        })
+    }
+
+    /// Fix changeset conflicts.
+    ///
+    /// **WARNING:** If there are any missing full txs, conflict resolution will not be complete. In
+    /// debug mode, this will result in panic.
+    fn fix_conflicts(
+        &self,
+        changeset: &mut ChangeSet<P, T>,
+    ) -> Result<(), UnresolvableConflict<P>> {
+        let chain_conflicts = changeset
+            .chain
+            .txids
+            .iter()
+            // we want to find new txid additions by the changeset (all txid entries in the
+            // changeset with Some(position_change))
+            .filter_map(|(&txid, pos_change)| pos_change.as_ref().map(|pos| (txid, pos)))
+            // we don't care about txids that move, only newly added txids
+            .filter(|&(txid, _)| self.chain.tx_position(txid).is_none())
+            // full tx should exist (either in graph, or additions)
+            .filter_map(|(txid, pos)| {
+                let full_tx = self
+                    .graph
+                    .get_tx(txid)
+                    .or_else(|| {
+                        changeset
+                            .graph
+                            .tx
+                            .iter()
+                            .find(|tx| tx.as_tx().txid() == txid)
+                    })
+                    .map(|tx| (txid, tx, pos));
+                debug_assert!(full_tx.is_some(), "should have full tx at this point");
+                full_tx
+            })
+            .flat_map(|(new_txid, new_tx, new_pos)| {
+                self.tx_conflicts_in_chain(new_tx.as_tx()).map(
+                    move |(conflict_pos, conflict_txid)| {
+                        (new_pos.clone(), new_txid, conflict_pos, conflict_txid)
+                    },
+                )
+            })
+            .collect::<Vec<_>>();
+
+        for (update_pos, update_txid, conflicting_pos, conflicting_txid) in chain_conflicts {
+            // We have found a tx that conflicts with our update txid. Only allow this when the
+            // conflicting tx will be positioned as "unconfirmed" after the update is applied.
+            // If so, we will modify the changeset to evict the conflicting txid.
+
+            // determine the position of the conflicting txid after current changeset is applied
+            let conflicting_new_pos = changeset
+                .chain
+                .txids
+                .get(&conflicting_txid)
+                .map(Option::as_ref)
+                .unwrap_or(Some(conflicting_pos));
+
+            match conflicting_new_pos {
+                None => {
+                    // conflicting txid will be deleted, can ignore
+                }
+                Some(existing_new_pos) => match existing_new_pos.height() {
+                    TxHeight::Confirmed(_) => {
+                        // the new postion of the conflicting tx is "confirmed", therefore cannot be
+                        // evicted, return error
+                        return Err(UnresolvableConflict {
+                            already_confirmed_tx: (conflicting_pos.clone(), conflicting_txid),
+                            update_tx: (update_pos.clone(), update_txid),
+                        });
+                    }
+                    TxHeight::Unconfirmed => {
+                        // the new position of the conflicting tx is "unconfirmed", therefore it can
+                        // be evicted
+                        changeset.chain.txids.insert(conflicting_txid, None);
+                    }
+                },
+            };
+        }
+
+        Ok(())
+    }
+
+    /// Applies `changeset` to `self`.
+    ///
+    /// **Warning** this method assumes the changeset is assumed to be correctly formed. If it isn't
+    /// then the chain graph may not behave correctly in the future and may panic unexpectedly.
+    pub fn apply_changeset(&mut self, changeset: ChangeSet<P, T>) {
+        self.chain.apply_changeset(changeset.chain);
+        self.graph.apply_additions(changeset.graph);
+    }
+
+    /// Applies the `update` chain graph. Note this is shorthand for calling
+    /// [`Self::determine_changeset()`] and [`Self::apply_changeset()`] in sequence.
+    pub fn apply_update<T2: IntoOwned<T> + Clone>(
+        &mut self,
+        update: ChainGraph<P, T2>,
+    ) -> Result<ChangeSet<P, T>, UpdateError<P>> {
+        let changeset = self.determine_changeset(&update)?;
+        self.apply_changeset(changeset.clone());
+        Ok(changeset)
+    }
+
+    /// Get the full transaction output at an outpoint if it exists in the chain and the graph.
+    pub fn full_txout(&self, outpoint: OutPoint) -> Option<FullTxOut<P>> {
+        self.chain.full_txout(&self.graph, outpoint)
+    }
+
+    /// Iterate over the full transactions and their position in the chain ordered by their position
+    /// in ascending order.
+    pub fn transactions_in_chain(&self) -> impl DoubleEndedIterator<Item = (&P, &T)> {
+        self.chain
+            .txids()
+            .map(|(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist")))
+    }
+
+    /// Finds the transaction in the chain that spends `outpoint` given the input/output
+    /// relationships in `graph`. Note that the transaction including `outpoint` does not need to be
+    /// in the `graph` or the `chain` for this to return `Some(_)`.
+    pub fn spent_by(&self, outpoint: OutPoint) -> Option<(&P, Txid)> {
+        self.chain.spent_by(&self.graph, outpoint)
+    }
+
+    /// Whether the chain graph contains any data whatsoever.
+    pub fn is_empty(&self) -> bool {
+        self.chain.is_empty() && self.graph.is_empty()
+    }
+}
+
+/// Represents changes to [`ChainGraph`].
+///
+/// This is essentially a combination of [`sparse_chain::ChangeSet`] and [`tx_graph::Additions`].
+#[derive(Debug, Clone, PartialEq)]
+#[cfg_attr(
+    feature = "serde",
+    derive(serde::Deserialize, serde::Serialize),
+    serde(
+        crate = "serde_crate",
+        bound(
+            deserialize = "P: serde::Deserialize<'de>, T: Ord + serde::Deserialize<'de>",
+            serialize = "P: serde::Serialize, T: Ord + serde::Serialize"
+        )
+    )
+)]
+#[must_use]
+pub struct ChangeSet<P, T> {
+    pub chain: sparse_chain::ChangeSet<P>,
+    pub graph: tx_graph::Additions<T>,
+}
+
+impl<P, T> ChangeSet<P, T> {
+    /// Returns `true` if this [`ChangeSet`] records no changes.
+    pub fn is_empty(&self) -> bool {
+        self.chain.is_empty() && self.graph.is_empty()
+    }
+
+    /// Returns `true` if this [`ChangeSet`] contains transaction evictions.
+    pub fn contains_eviction(&self) -> bool {
+        self.chain
+            .txids
+            .iter()
+            .any(|(_, new_pos)| new_pos.is_none())
+    }
+
+    /// Appends the changes in `other` into self such that applying `self` afterwards has the same
+    /// effect as sequentially applying the original `self` and `other`.
+    pub fn append(&mut self, other: ChangeSet<P, T>)
+    where
+        P: ChainPosition,
+        T: Ord,
+    {
+        self.chain.append(other.chain);
+        self.graph.append(other.graph);
+    }
+}
+
+impl<P, T> Default for ChangeSet<P, T> {
+    fn default() -> Self {
+        Self {
+            chain: Default::default(),
+            graph: Default::default(),
+        }
+    }
+}
+
+impl<P, T: AsTransaction> ForEachTxOut for ChainGraph<P, T> {
+    fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) {
+        self.graph.for_each_txout(f)
+    }
+}
+
+impl<P, T: AsTransaction> ForEachTxOut for ChangeSet<P, T> {
+    fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) {
+        self.graph.for_each_txout(f)
+    }
+}
+
+/// Error that may occur when calling [`ChainGraph::new`].
+#[derive(Clone, Debug, PartialEq)]
+pub enum NewError<P> {
+    /// Two transactions within the sparse chain conflicted with each other
+    Conflict { a: (P, Txid), b: (P, Txid) },
+    /// One or more transactions in the chain were not in the graph
+    Missing(HashSet<Txid>),
+}
+
+impl<P: core::fmt::Debug> core::fmt::Display for NewError<P> {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            NewError::Conflict { a, b } => write!(
+                f,
+                "Unable to inflate sparse chain to chain graph since transactions {:?} and {:?}",
+                a, b
+            ),
+            NewError::Missing(missing) => write!(
+                f,
+                "missing full transactions for {}",
+                missing
+                    .into_iter()
+                    .map(|txid| txid.to_string())
+                    .collect::<Vec<_>>()
+                    .join(", ")
+            ),
+        }
+    }
+}
+
+#[cfg(feature = "std")]
+impl<P: core::fmt::Debug> std::error::Error for NewError<P> {}
+
+/// Error that may occur when inserting a transaction.
+///
+/// Refer to [`ChainGraph::insert_tx_preview`] and [`ChainGraph::insert_tx`].
+#[derive(Clone, Debug, PartialEq)]
+pub enum InsertTxError<P> {
+    Chain(sparse_chain::InsertTxError<P>),
+    UnresolvableConflict(UnresolvableConflict<P>),
+}
+
+impl<P: core::fmt::Debug> core::fmt::Display for InsertTxError<P> {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            InsertTxError::Chain(inner) => core::fmt::Display::fmt(inner, f),
+            InsertTxError::UnresolvableConflict(inner) => core::fmt::Display::fmt(inner, f),
+        }
+    }
+}
+
+impl<P> From<sparse_chain::InsertTxError<P>> for InsertTxError<P> {
+    fn from(inner: sparse_chain::InsertTxError<P>) -> Self {
+        Self::Chain(inner)
+    }
+}
+
+#[cfg(feature = "std")]
+impl<P: core::fmt::Debug> std::error::Error for InsertTxError<P> {}
+
+/// A nice alias of [`sparse_chain::InsertCheckpointError`].
+pub type InsertCheckpointError = sparse_chain::InsertCheckpointError;
+
+/// Represents an update failure.
+#[derive(Clone, Debug, PartialEq)]
+pub enum UpdateError<P> {
+    /// The update chain was inconsistent with the existing chain
+    Chain(sparse_chain::UpdateError<P>),
+    /// A transaction in the update spent the same input as an already confirmed transaction
+    UnresolvableConflict(UnresolvableConflict<P>),
+}
+
+impl<P: core::fmt::Debug> core::fmt::Display for UpdateError<P> {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            UpdateError::Chain(inner) => core::fmt::Display::fmt(inner, f),
+            UpdateError::UnresolvableConflict(inner) => core::fmt::Display::fmt(inner, f),
+        }
+    }
+}
+
+impl<P> From<sparse_chain::UpdateError<P>> for UpdateError<P> {
+    fn from(inner: sparse_chain::UpdateError<P>) -> Self {
+        Self::Chain(inner)
+    }
+}
+
+#[cfg(feature = "std")]
+impl<P: core::fmt::Debug> std::error::Error for UpdateError<P> {}
+
+/// Represents an unresolvable conflict between an update's transaction and an
+/// already-confirmed transaction.
+#[derive(Clone, Debug, PartialEq)]
+pub struct UnresolvableConflict<P> {
+    pub already_confirmed_tx: (P, Txid),
+    pub update_tx: (P, Txid),
+}
+
+impl<P: core::fmt::Debug> core::fmt::Display for UnresolvableConflict<P> {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        let Self {
+            already_confirmed_tx,
+            update_tx,
+        } = self;
+        write!(f, "update transaction {} at height {:?} conflicts with an already confirmed transaction {} at height {:?}",
+            update_tx.1, update_tx.0, already_confirmed_tx.1, already_confirmed_tx.0)
+    }
+}
+
+impl<P> From<UnresolvableConflict<P>> for UpdateError<P> {
+    fn from(inner: UnresolvableConflict<P>) -> Self {
+        Self::UnresolvableConflict(inner)
+    }
+}
+
+impl<P> From<UnresolvableConflict<P>> for InsertTxError<P> {
+    fn from(inner: UnresolvableConflict<P>) -> Self {
+        Self::UnresolvableConflict(inner)
+    }
+}
+
+#[cfg(feature = "std")]
+impl<P: core::fmt::Debug> std::error::Error for UnresolvableConflict<P> {}
diff --git a/crates/chain/src/descriptor_ext.rs b/crates/chain/src/descriptor_ext.rs
new file mode 100644 (file)
index 0000000..e74928b
--- /dev/null
@@ -0,0 +1,16 @@
+use crate::miniscript::{Descriptor, DescriptorPublicKey};
+
+/// A trait to extend the functionality of a miniscript descriptor.
+pub trait DescriptorExt {
+    /// Returns the minimum value (in satoshis) that an output should have to be broadcastable.
+    fn dust_value(&self) -> u64;
+}
+
+impl DescriptorExt for Descriptor<DescriptorPublicKey> {
+    fn dust_value(&self) -> u64 {
+        self.at_derivation_index(0)
+            .script_pubkey()
+            .dust_value()
+            .to_sat()
+    }
+}
diff --git a/crates/chain/src/example_utils.rs b/crates/chain/src/example_utils.rs
new file mode 100644 (file)
index 0000000..3aee352
--- /dev/null
@@ -0,0 +1,30 @@
+#![allow(unused)]
+use alloc::vec::Vec;
+use bitcoin::{
+    consensus,
+    hashes::{hex::FromHex, Hash},
+    Transaction,
+};
+
+use crate::BlockId;
+
+pub const RAW_TX_1: &'static str = "0200000000010116d6174da7183d70d0a7d4dc314d517a7d135db79ad63515028b293a76f4f9d10000000000feffffff023a21fc8350060000160014531c405e1881ef192294b8813631e258bf98ea7a1027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c024730440220591b1a172a122da49ba79a3e79f98aaa03fd7a372f9760da18890b6a327e6010022013e82319231da6c99abf8123d7c07e13cf9bd8d76e113e18dc452e5024db156d012102318a2d558b2936c52e320decd6d92a88d7f530be91b6fe0af5caf41661e77da3ef2e0100";
+pub const RAW_TX_2: &'static str = "02000000000101a688607020cfae91a61e7c516b5ef1264d5d77f17200c3866826c6c808ebf1620000000000feffffff021027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c20fd48ff530600001600146886c525e41d4522042bd0b159dfbade2504a6bb024730440220740ff7e665cd20565d4296b549df8d26b941be3f1e3af89a0b60e50c0dbeb69a02206213ab7030cf6edc6c90d4ccf33010644261e029950a688dc0b1a9ebe6ddcc5a012102f2ac6b396a97853cb6cd62242c8ae4842024742074475023532a51e9c53194253e760100";
+pub const RAW_TX_3: &'static str = "0200000000010135d67ee47b557e68b8c6223958f597381965ed719f1207ee2b9e20432a24a5dc0100000000feffffff021027000000000000225120a82f29944d65b86ae6b5e5cc75e294ead6c59391a1edc5e016e3498c67fc7bbb62215a5055060000160014070df7671dea67a50c4799a744b5c9be8f4bac690247304402207ebf8d29f71fd03e7e6977b3ea78ca5fcc5c49a42ae822348fc401862fdd766c02201d7e4ff0684ecb008b6142f36ead1b0b4d615524c4f58c261113d361f4427e25012103e6a75e2fab85e5ecad641afc4ffba7222f998649d9f18cac92f0fcc8618883b3ee760100";
+pub const RAW_TX_4: &'static str = "02000000000101d00e8f76ed313e19b339ee293c0f52b0325c95e24c8f3966fa353fb2bedbcf580100000000feffffff021027000000000000225120882d74e5d0572d5a816cef0041a96b6c1de832f6f9676d9605c44d5e9a97d3dc9cda55fe53060000160014852b5864b8edd42fab4060c87f818e50780865ff0247304402201dccbb9bed7fba924b6d249c5837cc9b37470c0e3d8fbea77cb59baba3efe6fa0220700cc170916913b9bfc2bc0fefb6af776e8b542c561702f136cddc1c7aa43141012103acec3fc79dbbca745815c2a807dc4e81010c80e308e84913f59cb42a275dad97f3760100";
+
+pub fn tx_from_hex(s: &str) -> Transaction {
+    let raw = Vec::from_hex(s).expect("data must be in hex");
+    consensus::deserialize(raw.as_slice()).expect("must deserialize")
+}
+
+pub fn new_hash<H: Hash>(s: &str) -> H {
+    <H as bitcoin::hashes::Hash>::hash(s.as_bytes())
+}
+
+pub fn new_block_id(height: u32, hash: &str) -> BlockId {
+    BlockId {
+        height,
+        hash: new_hash(hash),
+    }
+}
diff --git a/crates/chain/src/keychain.rs b/crates/chain/src/keychain.rs
new file mode 100644 (file)
index 0000000..4111083
--- /dev/null
@@ -0,0 +1,313 @@
+//! Module for keychain based structures.
+//!
+//! A keychain here is a set of application defined indexes for a minscript descriptor where we can
+//! derive script pubkeys at a particular derivation index. The application's index is simply
+//! anything that implements `Ord`.
+//!
+//! [`KeychainTxOutIndex`] indexes script pubkeys of keychains and scans in relevant outpoints (that
+//! has a `txout` containing an indexed script pubkey). Internally, this uses [`SpkTxOutIndex`], but
+//! also maintains "revealed" and "lookahead" index count per keychain.
+//!
+//! [`KeychainTracker`] combines [`ChainGraph`] and [`KeychainTxOutIndex`] and enforces atomic
+//! changes between both these structures. [`KeychainScan`] is a structure used to update to
+//! [`KeychainTracker`] and changes made on a [`KeychainTracker`] are reported by
+//! [`KeychainChangeSet`]s.
+//!
+//! [`SpkTxOutIndex`]: crate::SpkTxOutIndex
+use crate::{
+    chain_graph::{self, ChainGraph},
+    collections::BTreeMap,
+    sparse_chain::ChainPosition,
+    tx_graph::TxGraph,
+    AsTransaction, ForEachTxOut,
+};
+use bitcoin::Transaction;
+
+#[cfg(feature = "miniscript")]
+pub mod persist;
+#[cfg(feature = "miniscript")]
+pub use persist::*;
+#[cfg(feature = "miniscript")]
+mod tracker;
+#[cfg(feature = "miniscript")]
+pub use tracker::*;
+#[cfg(feature = "miniscript")]
+mod txout_index;
+#[cfg(feature = "miniscript")]
+pub use txout_index::*;
+
+/// Represents updates to the derivation index of a [`KeychainTxOutIndex`].
+///
+/// It can be applied to [`KeychainTxOutIndex`] with [`apply_additions`]. [`DerivationAdditions] are
+/// monotone in that they will never decrease the revealed derivation index.
+///
+/// [`KeychainTxOutIndex`]: crate::keychain::KeychainTxOutIndex
+/// [`apply_additions`]: crate::keychain::KeychainTxOutIndex::apply_additions
+#[derive(Clone, Debug, PartialEq)]
+#[cfg_attr(
+    feature = "serde",
+    derive(serde::Deserialize, serde::Serialize),
+    serde(
+        crate = "serde_crate",
+        bound(
+            deserialize = "K: Ord + serde::Deserialize<'de>",
+            serialize = "K: Ord + serde::Serialize"
+        )
+    )
+)]
+#[must_use]
+pub struct DerivationAdditions<K>(BTreeMap<K, u32>);
+
+impl<K> DerivationAdditions<K> {
+    /// Returns whether the additions are empty.
+    pub fn is_empty(&self) -> bool {
+        self.0.is_empty()
+    }
+
+    /// Get the inner map of keychain to its new derivation index.
+    pub fn as_inner(&self) -> &BTreeMap<K, u32> {
+        &self.0
+    }
+}
+
+impl<K: Ord> DerivationAdditions<K> {
+    /// Append another [`DerivationAdditions`] into self.
+    ///
+    /// If keychain already exists, increases the index when other's index > self's index.
+    /// If keychain did not exist, append the new keychain.
+    pub fn append(&mut self, mut other: Self) {
+        self.0.iter_mut().for_each(|(key, index)| {
+            if let Some(other_index) = other.0.remove(key) {
+                *index = other_index.max(*index);
+            }
+        });
+
+        self.0.append(&mut other.0);
+    }
+}
+
+impl<K> Default for DerivationAdditions<K> {
+    fn default() -> Self {
+        Self(Default::default())
+    }
+}
+
+impl<K> AsRef<BTreeMap<K, u32>> for DerivationAdditions<K> {
+    fn as_ref(&self) -> &BTreeMap<K, u32> {
+        &self.0
+    }
+}
+
+#[derive(Clone, Debug, PartialEq)]
+/// An update that includes the last active indexes of each keychain.
+pub struct KeychainScan<K, P, T = Transaction> {
+    /// The update data in the form of a chain that could be applied
+    pub update: ChainGraph<P, T>,
+    /// The last active indexes of each keychain
+    pub last_active_indices: BTreeMap<K, u32>,
+}
+
+impl<K, P, T> Default for KeychainScan<K, P, T> {
+    fn default() -> Self {
+        Self {
+            update: Default::default(),
+            last_active_indices: Default::default(),
+        }
+    }
+}
+
+impl<K, P, T> From<ChainGraph<P, T>> for KeychainScan<K, P, T> {
+    fn from(update: ChainGraph<P, T>) -> Self {
+        KeychainScan {
+            update,
+            last_active_indices: Default::default(),
+        }
+    }
+}
+
+/// Represents changes to a [`KeychainTracker`].
+///
+/// This is essentially a combination of [`DerivationAdditions`] and [`chain_graph::ChangeSet`].
+#[derive(Clone, Debug)]
+#[cfg_attr(
+    feature = "serde",
+    derive(serde::Deserialize, serde::Serialize),
+    serde(
+        crate = "serde_crate",
+        bound(
+            deserialize = "K: Ord + serde::Deserialize<'de>, P: serde::Deserialize<'de>, T: Ord + serde::Deserialize<'de>",
+            serialize = "K: Ord + serde::Serialize, P: serde::Serialize, T: Ord+ serde::Serialize"
+        )
+    )
+)]
+#[must_use]
+pub struct KeychainChangeSet<K, P, T = Transaction> {
+    /// The changes in local keychain derivation indices
+    pub derivation_indices: DerivationAdditions<K>,
+    /// The changes that have occurred in the blockchain
+    pub chain_graph: chain_graph::ChangeSet<P, T>,
+}
+
+impl<K, P, T> Default for KeychainChangeSet<K, P, T> {
+    fn default() -> Self {
+        Self {
+            chain_graph: Default::default(),
+            derivation_indices: Default::default(),
+        }
+    }
+}
+
+impl<K, P, T> KeychainChangeSet<K, P, T> {
+    /// Returns whether the [`KeychainChangeSet`] is empty (no changes recorded).
+    pub fn is_empty(&self) -> bool {
+        self.chain_graph.is_empty() && self.derivation_indices.is_empty()
+    }
+
+    /// Appends the changes in `other` into `self` such that applying `self` afterwards has the same
+    /// effect as sequentially applying the original `self` and `other`.
+    ///
+    /// Note the derivation indices cannot be decreased so `other` will only change the derivation
+    /// index for a keychain if it's entry is higher than the one in `self`.
+    pub fn append(&mut self, other: KeychainChangeSet<K, P, T>)
+    where
+        K: Ord,
+        P: ChainPosition,
+        T: Ord,
+    {
+        self.derivation_indices.append(other.derivation_indices);
+        self.chain_graph.append(other.chain_graph);
+    }
+}
+
+impl<K, P, T> From<chain_graph::ChangeSet<P, T>> for KeychainChangeSet<K, P, T> {
+    fn from(changeset: chain_graph::ChangeSet<P, T>) -> Self {
+        Self {
+            chain_graph: changeset,
+            ..Default::default()
+        }
+    }
+}
+
+impl<K, P, T> From<DerivationAdditions<K>> for KeychainChangeSet<K, P, T> {
+    fn from(additions: DerivationAdditions<K>) -> Self {
+        Self {
+            derivation_indices: additions,
+            ..Default::default()
+        }
+    }
+}
+
+impl<K, P, T> AsRef<TxGraph<T>> for KeychainScan<K, P, T> {
+    fn as_ref(&self) -> &TxGraph<T> {
+        self.update.graph()
+    }
+}
+
+impl<K, P, T: AsTransaction> ForEachTxOut for KeychainChangeSet<K, P, T> {
+    fn for_each_txout(&self, f: impl FnMut((bitcoin::OutPoint, &bitcoin::TxOut))) {
+        self.chain_graph.for_each_txout(f)
+    }
+}
+
+/// Balance differentiated in various categories.
+#[derive(Debug, PartialEq, Eq, Clone, Default)]
+#[cfg_attr(
+    feature = "serde",
+    derive(serde::Deserialize, serde::Serialize),
+    serde(crate = "serde_crate",)
+)]
+pub struct Balance {
+    /// All coinbase outputs not yet matured
+    pub immature: u64,
+    /// Unconfirmed UTXOs generated by a wallet tx
+    pub trusted_pending: u64,
+    /// Unconfirmed UTXOs received from an external wallet
+    pub untrusted_pending: u64,
+    /// Confirmed and immediately spendable balance
+    pub confirmed: u64,
+}
+
+impl Balance {
+    /// Get sum of trusted_pending and confirmed coins.
+    ///
+    /// This is the balance you can spend right now that shouldn't get cancelled via another party
+    /// double spending it.
+    pub fn trusted_spendable(&self) -> u64 {
+        self.confirmed + self.trusted_pending
+    }
+
+    /// Get the whole balance visible to the wallet.
+    pub fn total(&self) -> u64 {
+        self.confirmed + self.trusted_pending + self.untrusted_pending + self.immature
+    }
+}
+
+impl core::fmt::Display for Balance {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        write!(
+            f,
+            "{{ immature: {}, trusted_pending: {}, untrusted_pending: {}, confirmed: {} }}",
+            self.immature, self.trusted_pending, self.untrusted_pending, self.confirmed
+        )
+    }
+}
+
+impl core::ops::Add for Balance {
+    type Output = Self;
+
+    fn add(self, other: Self) -> Self {
+        Self {
+            immature: self.immature + other.immature,
+            trusted_pending: self.trusted_pending + other.trusted_pending,
+            untrusted_pending: self.untrusted_pending + other.untrusted_pending,
+            confirmed: self.confirmed + other.confirmed,
+        }
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use bitcoin::Transaction;
+
+    use crate::TxHeight;
+
+    use super::*;
+    #[test]
+    fn append_keychain_derivation_indices() {
+        #[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug)]
+        enum Keychain {
+            One,
+            Two,
+            Three,
+            Four,
+        }
+        let mut lhs_di = BTreeMap::<Keychain, u32>::default();
+        let mut rhs_di = BTreeMap::<Keychain, u32>::default();
+        lhs_di.insert(Keychain::One, 7);
+        lhs_di.insert(Keychain::Two, 0);
+        rhs_di.insert(Keychain::One, 3);
+        rhs_di.insert(Keychain::Two, 5);
+        lhs_di.insert(Keychain::Three, 3);
+        rhs_di.insert(Keychain::Four, 4);
+        let mut lhs = KeychainChangeSet {
+            derivation_indices: DerivationAdditions(lhs_di),
+            chain_graph: chain_graph::ChangeSet::<TxHeight, Transaction>::default(),
+        };
+
+        let rhs = KeychainChangeSet {
+            derivation_indices: DerivationAdditions(rhs_di),
+            chain_graph: chain_graph::ChangeSet::<TxHeight, Transaction>::default(),
+        };
+
+        lhs.append(rhs);
+
+        // Exiting index doesn't update if new index in `other` is lower than `self`
+        assert_eq!(lhs.derivation_indices.0.get(&Keychain::One), Some(&7));
+        // Existing index updates if new index in `other` is higher than `self.
+        assert_eq!(lhs.derivation_indices.0.get(&Keychain::Two), Some(&5));
+        // Existing index unchanged, if keychain doesn't exist in `other`
+        assert_eq!(lhs.derivation_indices.0.get(&Keychain::Three), Some(&3));
+        // New keychain gets added if keychain is in `other`, but not in `self`.
+        assert_eq!(lhs.derivation_indices.0.get(&Keychain::Four), Some(&4));
+    }
+}
diff --git a/crates/chain/src/keychain/persist.rs b/crates/chain/src/keychain/persist.rs
new file mode 100644 (file)
index 0000000..8bf68cb
--- /dev/null
@@ -0,0 +1,105 @@
+//! Persistence for changes made to a [`KeychainTracker`].
+//!
+//! BDK's [`KeychainTracker`] needs somewhere to persist changes it makes during operation.
+//! Operations like giving out a new address are crucial to persist so that next time the
+//! application is loaded it can find transactions related to that address.
+//!
+//! Note that the [`KeychainTracker`] does not read this persisted data during operation since it
+//! always has a copy in memory.
+//!
+//! [`KeychainTracker`]: crate::keychain::KeychainTracker
+
+use crate::{keychain, sparse_chain::ChainPosition};
+
+/// `Persist` wraps a [`PersistBackend`] to create a convenient staging area for changes before they
+/// are persisted. Not all changes made to the [`KeychainTracker`] need to be written to disk right
+/// away so you can use [`Persist::stage`] to *stage* it first and then [`Persist::commit`] to
+/// finally write it to disk.
+///
+/// [`KeychainTracker`]: keychain::KeychainTracker
+#[derive(Debug)]
+pub struct Persist<K, P, B> {
+    backend: B,
+    stage: keychain::KeychainChangeSet<K, P>,
+}
+
+impl<K, P, B: PersistBackend<K, P>> Persist<K, P, B> {
+    /// Create a new `Persist` from a [`PersistBackend`].
+    pub fn new(backend: B) -> Self {
+        Self {
+            backend,
+            stage: Default::default(),
+        }
+    }
+
+    /// Stage a `changeset` to later persistence with [`commit`].
+    ///
+    /// [`commit`]: Self::commit
+    pub fn stage(&mut self, changeset: keychain::KeychainChangeSet<K, P>)
+    where
+        K: Ord,
+        P: ChainPosition,
+    {
+        self.stage.append(changeset)
+    }
+
+    /// Get the changes that haven't been commited yet
+    pub fn staged(&self) -> &keychain::KeychainChangeSet<K, P> {
+        &self.stage
+    }
+
+    /// Commit the staged changes to the underlying persistence backend.
+    ///
+    /// Retuns a backend defined error if this fails
+    pub fn commit(&mut self) -> Result<(), B::WriteError> {
+        self.backend.append_changeset(&self.stage)?;
+        self.stage = Default::default();
+        Ok(())
+    }
+}
+
+/// A persistence backend for [`Persist`].
+pub trait PersistBackend<K, P> {
+    /// The error the backend returns when it fails to write.
+    type WriteError: core::fmt::Debug;
+
+    /// The error the backend returns when it fails to load.
+    type LoadError: core::fmt::Debug;
+
+    /// Appends a new changeset to the persistance backend.
+    ///
+    /// It is up to the backend what it does with this. It could store every changeset in a list or
+    /// it insert the actual changes to a more structured database. All it needs to guarantee is
+    /// that [`load_into_keychain_tracker`] restores a keychain tracker to what it should be if all
+    /// changesets had been applied sequentially.
+    ///
+    /// [`load_into_keychain_tracker`]: Self::load_into_keychain_tracker
+    fn append_changeset(
+        &mut self,
+        changeset: &keychain::KeychainChangeSet<K, P>,
+    ) -> Result<(), Self::WriteError>;
+
+    /// Applies all the changesets the backend has received to `tracker`.
+    fn load_into_keychain_tracker(
+        &mut self,
+        tracker: &mut keychain::KeychainTracker<K, P>,
+    ) -> Result<(), Self::LoadError>;
+}
+
+impl<K, P> PersistBackend<K, P> for () {
+    type WriteError = ();
+    type LoadError = ();
+
+    fn append_changeset(
+        &mut self,
+        _changeset: &keychain::KeychainChangeSet<K, P>,
+    ) -> Result<(), Self::WriteError> {
+        Ok(())
+    }
+    fn load_into_keychain_tracker(
+        &mut self,
+        _tracker: &mut keychain::KeychainTracker<K, P>,
+    ) -> Result<(), Self::LoadError> {
+        Ok(())
+    }
+}
diff --git a/crates/chain/src/keychain/tracker.rs b/crates/chain/src/keychain/tracker.rs
new file mode 100644 (file)
index 0000000..ba74889
--- /dev/null
@@ -0,0 +1,315 @@
+use bitcoin::Transaction;
+use miniscript::{Descriptor, DescriptorPublicKey};
+
+use crate::{
+    chain_graph::{self, ChainGraph},
+    collections::*,
+    keychain::{KeychainChangeSet, KeychainScan, KeychainTxOutIndex},
+    sparse_chain::{self, SparseChain},
+    tx_graph::TxGraph,
+    AsTransaction, BlockId, FullTxOut, IntoOwned, TxHeight,
+};
+
+use super::{Balance, DerivationAdditions};
+
+/// A convenient combination of a [`KeychainTxOutIndex`] and a [`ChainGraph`].
+///
+/// The [`KeychainTracker`] atomically updates its [`KeychainTxOutIndex`] whenever new chain data is
+/// incorporated into its internal [`ChainGraph`].
+#[derive(Clone, Debug)]
+pub struct KeychainTracker<K, P, T = Transaction> {
+    /// Index between script pubkeys to transaction outputs
+    pub txout_index: KeychainTxOutIndex<K>,
+    chain_graph: ChainGraph<P, T>,
+}
+
+impl<K, P, T> KeychainTracker<K, P, T>
+where
+    P: sparse_chain::ChainPosition,
+    K: Ord + Clone + core::fmt::Debug,
+    T: AsTransaction + Clone + Ord,
+{
+    /// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses for it.
+    /// This is just shorthand for calling [`KeychainTxOutIndex::add_keychain`] on the internal
+    /// `txout_index`.
+    ///
+    /// Adding a keychain means you will be able to derive new script pubkeys under that keychain
+    /// and the tracker will discover transaction outputs with those script pubkeys.
+    pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor<DescriptorPublicKey>) {
+        self.txout_index.add_keychain(keychain, descriptor)
+    }
+
+    /// Get the internal map of keychains to their descriptors. This is just shorthand for calling
+    /// [`KeychainTxOutIndex::keychains`] on the internal `txout_index`.
+    pub fn keychains(&mut self) -> &BTreeMap<K, Descriptor<DescriptorPublicKey>> {
+        self.txout_index.keychains()
+    }
+
+    /// Get the checkpoint limit of the internal [`SparseChain`].
+    ///
+    /// Refer to [`SparseChain::checkpoint_limit`] for more.
+    pub fn checkpoint_limit(&self) -> Option<usize> {
+        self.chain_graph.checkpoint_limit()
+    }
+
+    /// Set the checkpoint limit of the internal [`SparseChain`].
+    ///
+    /// Refer to [`SparseChain::set_checkpoint_limit`] for more.
+    pub fn set_checkpoint_limit(&mut self, limit: Option<usize>) {
+        self.chain_graph.set_checkpoint_limit(limit)
+    }
+
+    /// Determines the resultant [`KeychainChangeSet`] if the given [`KeychainScan`] is applied.
+    ///
+    /// Internally, we call [`ChainGraph::determine_changeset`] and also determine the additions of
+    /// [`KeychainTxOutIndex`].
+    pub fn determine_changeset<T2>(
+        &self,
+        scan: &KeychainScan<K, P, T2>,
+    ) -> Result<KeychainChangeSet<K, P, T>, chain_graph::UpdateError<P>>
+    where
+        T2: IntoOwned<T> + Clone,
+    {
+        // TODO: `KeychainTxOutIndex::determine_additions`
+        let mut derivation_indices = scan.last_active_indices.clone();
+        derivation_indices.retain(|keychain, index| {
+            match self.txout_index.last_revealed_index(keychain) {
+                Some(existing) => *index > existing,
+                None => true,
+            }
+        });
+
+        Ok(KeychainChangeSet {
+            derivation_indices: DerivationAdditions(derivation_indices),
+            chain_graph: self.chain_graph.determine_changeset(&scan.update)?,
+        })
+    }
+
+    /// Directly applies a [`KeychainScan`] on [`KeychainTracker`].
+    ///
+    /// This is equivilant to calling [`determine_changeset`] and [`apply_changeset`] in sequence.
+    ///
+    /// [`determine_changeset`]: Self::determine_changeset
+    /// [`apply_changeset`]: Self::apply_changeset
+    pub fn apply_update<T2>(
+        &mut self,
+        scan: KeychainScan<K, P, T2>,
+    ) -> Result<KeychainChangeSet<K, P, T>, chain_graph::UpdateError<P>>
+    where
+        T2: IntoOwned<T> + Clone,
+    {
+        let changeset = self.determine_changeset(&scan)?;
+        self.apply_changeset(changeset.clone());
+        Ok(changeset)
+    }
+
+    /// Applies the changes in `changeset` to [`KeychainTracker`].
+    ///
+    /// Internally, this calls [`KeychainTxOutIndex::apply_additions`] and
+    /// [`ChainGraph::apply_changeset`] in sequence.
+    pub fn apply_changeset(&mut self, changeset: KeychainChangeSet<K, P, T>) {
+        let KeychainChangeSet {
+            derivation_indices,
+            chain_graph,
+        } = changeset;
+        self.txout_index.apply_additions(derivation_indices);
+        let _ = self.txout_index.scan(&chain_graph);
+        self.chain_graph.apply_changeset(chain_graph)
+    }
+
+    /// Iterates through [`FullTxOut`]s that are considered to exist in our representation of the
+    /// blockchain/mempool.
+    ///
+    /// In other words, these are `txout`s of confirmed and in-mempool transactions, based on our
+    /// view of the blockchain/mempool.
+    pub fn full_txouts(&self) -> impl Iterator<Item = (&(K, u32), FullTxOut<P>)> + '_ {
+        self.txout_index
+            .txouts()
+            .filter_map(|(spk_i, op, _)| Some((spk_i, self.chain_graph.full_txout(op)?)))
+    }
+
+    /// Iterates through [`FullTxOut`]s that are unspent outputs.
+    ///
+    /// Refer to [`full_txouts`] for more.
+    ///
+    /// [`full_txouts`]: Self::full_txouts
+    pub fn full_utxos(&self) -> impl Iterator<Item = (&(K, u32), FullTxOut<P>)> + '_ {
+        self.full_txouts()
+            .filter(|(_, txout)| txout.spent_by.is_none())
+    }
+
+    /// Returns a reference to the internal [`ChainGraph`].
+    pub fn chain_graph(&self) -> &ChainGraph<P, T> {
+        &self.chain_graph
+    }
+
+    /// Returns a reference to the internal [`TxGraph`] (which is part of the [`ChainGraph`]).
+    pub fn graph(&self) -> &TxGraph<T> {
+        &self.chain_graph().graph()
+    }
+
+    /// Returns a reference to the internal [`SparseChain`] (which is part of the [`ChainGraph`]).
+    pub fn chain(&self) -> &SparseChain<P> {
+        &self.chain_graph().chain()
+    }
+
+    /// Determines the changes as result of inserting `block_id` (a height and block hash) into the
+    /// tracker.
+    ///
+    /// The caller is responsible for guaranteeing that a block exists at that height. If a
+    /// checkpoint already exists at that height with a different hash this will return an error.
+    /// Otherwise it will return `Ok(true)` if the checkpoint didn't already exist or `Ok(false)`
+    /// if it did.
+    ///
+    /// **Warning**: This function modifies the internal state of the tracker. You are responsible
+    /// for persisting these changes to disk if you need to restore them.
+    pub fn insert_checkpoint_preview(
+        &self,
+        block_id: BlockId,
+    ) -> Result<KeychainChangeSet<K, P, T>, chain_graph::InsertCheckpointError> {
+        Ok(KeychainChangeSet {
+            chain_graph: self.chain_graph.insert_checkpoint_preview(block_id)?,
+            ..Default::default()
+        })
+    }
+
+    /// Directly insert a `block_id` into the tracker.
+    ///
+    /// This is equivalent of calling [`insert_checkpoint_preview`] and [`apply_changeset`] in
+    /// sequence.
+    ///
+    /// [`insert_checkpoint_preview`]: Self::insert_checkpoint_preview
+    /// [`apply_changeset`]: Self::apply_changeset
+    pub fn insert_checkpoint(
+        &mut self,
+        block_id: BlockId,
+    ) -> Result<KeychainChangeSet<K, P, T>, chain_graph::InsertCheckpointError> {
+        let changeset = self.insert_checkpoint_preview(block_id)?;
+        self.apply_changeset(changeset.clone());
+        Ok(changeset)
+    }
+
+    /// Determines the changes as result of inserting a transaction into the inner [`ChainGraph`]
+    /// and optionally into the inner chain at `position`.
+    ///
+    /// **Warning**: This function modifies the internal state of the chain graph. You are
+    /// responsible for persisting these changes to disk if you need to restore them.
+    pub fn insert_tx_preview(
+        &self,
+        tx: T,
+        pos: P,
+    ) -> Result<KeychainChangeSet<K, P, T>, chain_graph::InsertTxError<P>> {
+        Ok(KeychainChangeSet {
+            chain_graph: self.chain_graph.insert_tx_preview(tx, pos)?,
+            ..Default::default()
+        })
+    }
+
+    /// Directly insert a transaction into the inner [`ChainGraph`] and optionally into the inner
+    /// chain at `position`.
+    ///
+    /// This is equivilant of calling [`insert_tx_preview`] and [`apply_changeset`] in sequence.
+    ///
+    /// [`insert_tx_preview`]: Self::insert_tx_preview
+    /// [`apply_changeset`]: Self::apply_changeset
+    pub fn insert_tx(
+        &mut self,
+        tx: T,
+        pos: P,
+    ) -> Result<KeychainChangeSet<K, P, T>, chain_graph::InsertTxError<P>> {
+        let changeset = self.insert_tx_preview(tx, pos)?;
+        self.apply_changeset(changeset.clone());
+        Ok(changeset)
+    }
+
+    /// Returns the *balance* of the keychain i.e. the value of unspent transaction outputs tracked.
+    ///
+    /// The caller provides a `should_trust` predicate which must decide whether the value of
+    /// unconfirmed outputs on this keychain are guaranteed to be realized or not. For example:
+    ///
+    /// - For an *internal* (change) keychain `should_trust` should in general be `true` since even if
+    /// you lose an internal output due to eviction you will always gain back the value from whatever output the
+    /// unconfirmed transaction was spending (since that output is presumeably from your wallet).
+    /// - For an *external* keychain you might want `should_trust` to return  `false` since someone may cancel (by double spending)
+    /// a payment made to addresses on that keychain.
+    ///
+    /// When in doubt set `should_trust` to return false. This doesn't do anything other than change
+    /// where the unconfirmed output's value is accounted for in `Balance`.
+    pub fn balance(&self, mut should_trust: impl FnMut(&K) -> bool) -> Balance {
+        let mut immature = 0;
+        let mut trusted_pending = 0;
+        let mut untrusted_pending = 0;
+        let mut confirmed = 0;
+        let last_sync_height = self.chain().latest_checkpoint().map(|latest| latest.height);
+        for ((keychain, _), utxo) in self.full_utxos() {
+            let chain_position = &utxo.chain_position;
+
+            match chain_position.height() {
+                TxHeight::Confirmed(_) => {
+                    if utxo.is_on_coinbase {
+                        if utxo.is_mature(
+                            last_sync_height
+                                .expect("since it's confirmed we must have a checkpoint"),
+                        ) {
+                            confirmed += utxo.txout.value;
+                        } else {
+                            immature += utxo.txout.value;
+                        }
+                    } else {
+                        confirmed += utxo.txout.value;
+                    }
+                }
+                TxHeight::Unconfirmed => {
+                    if should_trust(keychain) {
+                        trusted_pending += utxo.txout.value;
+                    } else {
+                        untrusted_pending += utxo.txout.value;
+                    }
+                }
+            }
+        }
+
+        Balance {
+            immature,
+            trusted_pending,
+            untrusted_pending,
+            confirmed,
+        }
+    }
+
+    /// Returns the balance of all spendable confirmed unspent outputs of this tracker at a
+    /// particular height.
+    pub fn balance_at(&self, height: u32) -> u64 {
+        self.full_txouts()
+            .filter(|(_, full_txout)| full_txout.is_spendable_at(height))
+            .map(|(_, full_txout)| full_txout.txout.value)
+            .sum()
+    }
+}
+
+impl<K, P> Default for KeychainTracker<K, P> {
+    fn default() -> Self {
+        Self {
+            txout_index: Default::default(),
+            chain_graph: Default::default(),
+        }
+    }
+}
+
+impl<K, P> AsRef<SparseChain<P>> for KeychainTracker<K, P> {
+    fn as_ref(&self) -> &SparseChain<P> {
+        self.chain_graph.chain()
+    }
+}
+
+impl<K, P> AsRef<TxGraph> for KeychainTracker<K, P> {
+    fn as_ref(&self) -> &TxGraph {
+        self.chain_graph.graph()
+    }
+}
+
+impl<K, P> AsRef<ChainGraph<P>> for KeychainTracker<K, P> {
+    fn as_ref(&self) -> &ChainGraph<P> {
+        &self.chain_graph
+    }
+}
diff --git a/crates/chain/src/keychain/txout_index.rs b/crates/chain/src/keychain/txout_index.rs
new file mode 100644 (file)
index 0000000..547957d
--- /dev/null
@@ -0,0 +1,585 @@
+use crate::{
+    collections::*,
+    miniscript::{Descriptor, DescriptorPublicKey},
+    ForEachTxOut, SpkTxOutIndex,
+};
+use alloc::{borrow::Cow, vec::Vec};
+use bitcoin::{secp256k1::Secp256k1, OutPoint, Script, TxOut};
+use core::{fmt::Debug, ops::Deref};
+
+use super::DerivationAdditions;
+
+/// Maximum [BIP32](https://bips.xyz/32) derivation index.
+pub const BIP32_MAX_INDEX: u32 = 1 << 31 - 1;
+
+/// A convenient wrapper around [`SpkTxOutIndex`] that relates script pubkeys to miniscript public
+/// [`Descriptor`]s.
+///
+/// Descriptors are referenced by the provided keychain generic (`K`).
+///
+/// Script pubkeys for a descriptor are revealed chronologically from index 0. I.e. If the last
+/// revealed index of a descriptor is 5, scripts of indices 0 to 4 are guaranteed to already be
+/// revealed. In addition to revealed scripts, we have a `lookahead` parameter for each keychain
+/// which defines the number of script pubkeys to store ahead of the last revealed index.
+///
+/// Methods that could update the last revealed index will return [`DerivationAdditions`] to report
+/// these changes. This can be persisted for future recovery.
+///
+/// ## Synopsis
+///
+/// ```
+/// use bdk_chain::keychain::KeychainTxOutIndex;
+/// # use bdk_chain::{ miniscript::{Descriptor, DescriptorPublicKey} };
+/// # use core::str::FromStr;
+///
+/// // imagine our service has internal and external addresses but also addresses for users
+/// #[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
+/// enum MyKeychain {
+///     External,
+///     Internal,
+///     MyAppUser {
+///         user_id: u32
+///     }
+/// }
+///
+/// let mut txout_index = KeychainTxOutIndex::<MyKeychain>::default();
+///
+/// # let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
+/// # let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
+/// # let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
+/// # let descriptor_for_user_42 = external_descriptor.clone();
+/// txout_index.add_keychain(MyKeychain::External, external_descriptor);
+/// txout_index.add_keychain(MyKeychain::Internal, internal_descriptor);
+/// txout_index.add_keychain(MyKeychain::MyAppUser { user_id: 42 }, descriptor_for_user_42);
+///
+/// let new_spk_for_user = txout_index.reveal_next_spk(&MyKeychain::MyAppUser{ user_id: 42 });
+/// ```
+///
+/// [`Ord`]: core::cmp::Ord
+/// [`SpkTxOutIndex`]: crate::spk_txout_index::SpkTxOutIndex
+/// [`Descriptor`]: crate::miniscript::Descriptor
+#[derive(Clone, Debug)]
+pub struct KeychainTxOutIndex<K> {
+    inner: SpkTxOutIndex<(K, u32)>,
+    // descriptors of each keychain
+    keychains: BTreeMap<K, Descriptor<DescriptorPublicKey>>,
+    // last stored indexes
+    last_revealed: BTreeMap<K, u32>,
+    // lookahead settings for each keychain
+    lookahead: BTreeMap<K, u32>,
+}
+
+impl<K> Default for KeychainTxOutIndex<K> {
+    fn default() -> Self {
+        Self {
+            inner: SpkTxOutIndex::default(),
+            keychains: BTreeMap::default(),
+            last_revealed: BTreeMap::default(),
+            lookahead: BTreeMap::default(),
+        }
+    }
+}
+
+impl<K> Deref for KeychainTxOutIndex<K> {
+    type Target = SpkTxOutIndex<(K, u32)>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.inner
+    }
+}
+
+impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
+    /// Scans an object for relevant outpoints, which are stored and indexed internally.
+    ///
+    /// If the matched script pubkey is part of the lookahead, the last stored index is updated for
+    /// the script pubkey's keychain and the [`DerivationAdditions`] returned will reflect the
+    /// change.
+    ///
+    /// Typically this method is used in two situations:
+    ///
+    /// 1. After loading transaction data from disk you may scan over all the txouts to restore all
+    /// your txouts.
+    /// 2. When getting new data from the chain you usually scan it before incorporating it into
+    /// your chain state (i.e. `SparseChain`, `ChainGraph`).
+    ///
+    /// See [`ForEachTxout`] for the types that support this.
+    ///
+    /// [`ForEachTxout`]: crate::ForEachTxOut
+    pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> DerivationAdditions<K> {
+        let mut additions = DerivationAdditions::<K>::default();
+        txouts.for_each_txout(|(op, txout)| additions.append(self.scan_txout(op, txout)));
+        additions
+    }
+
+    /// Scan a single outpoint for a matching script pubkey.
+    ///
+    /// If it matches the index will store and index it.
+    pub fn scan_txout(&mut self, op: OutPoint, txout: &TxOut) -> DerivationAdditions<K> {
+        match self.inner.scan_txout(op, txout).cloned() {
+            Some((keychain, index)) => self.reveal_to_target(&keychain, index).1,
+            None => DerivationAdditions::default(),
+        }
+    }
+
+    /// Return a reference to the internal [`SpkTxOutIndex`].
+    pub fn inner(&self) -> &SpkTxOutIndex<(K, u32)> {
+        &self.inner
+    }
+
+    /// Return a reference to the internal map of keychain to descriptors.
+    pub fn keychains(&self) -> &BTreeMap<K, Descriptor<DescriptorPublicKey>> {
+        &self.keychains
+    }
+
+    /// Add a keychain to the tracker's `txout_index` with a descriptor to derive addresses for it.
+    ///
+    /// Adding a keychain means you will be able to derive new script pubkeys under that keychain
+    /// and the txout index will discover transaction outputs with those script pubkeys.
+    ///
+    /// # Panics
+    ///
+    /// This will panic if a different `descriptor` is introduced to the same `keychain`.
+    pub fn add_keychain(&mut self, keychain: K, descriptor: Descriptor<DescriptorPublicKey>) {
+        let old_descriptor = &*self.keychains.entry(keychain).or_insert(descriptor.clone());
+        assert_eq!(
+            &descriptor, old_descriptor,
+            "keychain already contains a different descriptor"
+        );
+    }
+
+    /// Return the lookahead setting for each keychain.
+    ///
+    /// Refer to [`set_lookahead`] for a deeper explanation on `lookahead`.
+    ///
+    /// [`set_lookahead`]: Self::set_lookahead
+    pub fn lookaheads(&self) -> &BTreeMap<K, u32> {
+        &self.lookahead
+    }
+
+    /// Convenience method to call [`set_lookahead`] for all keychains.
+    ///
+    /// [`set_lookahead`]: Self::set_lookahead
+    pub fn set_lookahead_for_all(&mut self, lookahead: u32) {
+        for keychain in &self.keychains.keys().cloned().collect::<Vec<_>>() {
+            self.lookahead.insert(keychain.clone(), lookahead);
+            self.replenish_lookahead(&keychain);
+        }
+    }
+
+    /// Set the lookahead count for `keychain`.
+    ///
+    /// The lookahead is the number of scripts to cache ahead of the last stored script index. This
+    /// is useful during a scan via [`scan`] or [`scan_txout`].
+    ///
+    /// # Panics
+    ///
+    /// This will panic if `keychain` does not exist.
+    ///
+    /// [`scan`]: Self::scan
+    /// [`scan_txout`]: Self::scan_txout
+    pub fn set_lookahead(&mut self, keychain: &K, lookahead: u32) {
+        self.lookahead.insert(keychain.clone(), lookahead);
+        self.replenish_lookahead(keychain);
+    }
+
+    /// Convenience method to call [`lookahead_to_target`] for multiple keychains.
+    ///
+    /// [`lookahead_to_target`]: Self::lookahead_to_target
+    pub fn lookahead_to_target_multi(&mut self, target_indexes: BTreeMap<K, u32>) {
+        for (keychain, target_index) in target_indexes {
+            self.lookahead_to_target(&keychain, target_index)
+        }
+    }
+
+    /// Store lookahead scripts until `target_index`.
+    ///
+    /// This does not change the `lookahead` setting.
+    pub fn lookahead_to_target(&mut self, keychain: &K, target_index: u32) {
+        let next_index = self.next_store_index(keychain);
+        if let Some(temp_lookahead) = target_index.checked_sub(next_index).filter(|&v| v > 0) {
+            let old_lookahead = self.lookahead.insert(keychain.clone(), temp_lookahead);
+            self.replenish_lookahead(keychain);
+
+            // revert
+            match old_lookahead {
+                Some(lookahead) => self.lookahead.insert(keychain.clone(), lookahead),
+                None => self.lookahead.remove(keychain),
+            };
+        }
+    }
+
+    fn replenish_lookahead(&mut self, keychain: &K) {
+        let descriptor = self.keychains.get(keychain).expect("keychain must exist");
+        let next_store_index = self.next_store_index(keychain);
+        let next_reveal_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1);
+        let lookahead = self.lookahead.get(keychain).map_or(0, |v| *v);
+
+        for (new_index, new_spk) in range_descriptor_spks(
+            Cow::Borrowed(descriptor),
+            next_store_index..next_reveal_index + lookahead,
+        ) {
+            let _inserted = self
+                .inner
+                .insert_spk((keychain.clone(), new_index), new_spk);
+            debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={:?}, lookahead={}, next_store_index={}, next_reveal_index={}", keychain, lookahead, next_store_index, next_reveal_index);
+        }
+    }
+
+    fn next_store_index(&self, keychain: &K) -> u32 {
+        self.inner()
+            .all_spks()
+            .range((keychain.clone(), u32::MIN)..(keychain.clone(), u32::MAX))
+            .last()
+            .map_or(0, |((_, v), _)| *v + 1)
+    }
+
+    /// Generates script pubkey iterators for every `keychain`. The iterators iterate over all
+    /// derivable script pubkeys.
+    pub fn spks_of_all_keychains(
+        &self,
+    ) -> BTreeMap<K, impl Iterator<Item = (u32, Script)> + Clone> {
+        self.keychains
+            .iter()
+            .map(|(keychain, descriptor)| {
+                (
+                    keychain.clone(),
+                    range_descriptor_spks(Cow::Owned(descriptor.clone()), 0..),
+                )
+            })
+            .collect()
+    }
+
+    /// Generates a script pubkey iterator for the given `keychain`'s descriptor (if exists). The
+    /// iterator iterates over all derivable scripts of the keychain's descriptor.
+    ///
+    /// # Panics
+    ///
+    /// This will panic if `keychain` does not exist.
+    pub fn spks_of_keychain(&self, keychain: &K) -> impl Iterator<Item = (u32, Script)> + Clone {
+        let descriptor = self
+            .keychains
+            .get(keychain)
+            .expect("keychain must exist")
+            .clone();
+        range_descriptor_spks(Cow::Owned(descriptor), 0..)
+    }
+
+    /// Convenience method to get [`revealed_spks_of_keychain`] of all keychains.
+    ///
+    /// [`revealed_spks_of_keychain`]: Self::revealed_spks_of_keychain
+    pub fn revealed_spks_of_all_keychains(
+        &self,
+    ) -> BTreeMap<K, impl Iterator<Item = (u32, &Script)> + Clone> {
+        self.keychains
+            .keys()
+            .map(|keychain| (keychain.clone(), self.revealed_spks_of_keychain(keychain)))
+            .collect()
+    }
+
+    /// Iterates over the script pubkeys revealed by this index under `keychain`.
+    pub fn revealed_spks_of_keychain(
+        &self,
+        keychain: &K,
+    ) -> impl DoubleEndedIterator<Item = (u32, &Script)> + Clone {
+        let next_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1);
+        self.inner
+            .all_spks()
+            .range((keychain.clone(), u32::MIN)..(keychain.clone(), next_index))
+            .map(|((_, derivation_index), spk)| (*derivation_index, spk))
+    }
+
+    /// Get the next derivation index for `keychain`. This is the index after the last revealed
+    /// derivation index.
+    ///
+    /// The second field in the returned tuple represents whether the next derivation index is new.
+    /// There are two scenarios where the next derivation index is reused (not new):
+    ///
+    /// 1. The keychain's descriptor has no wildcard, and a script has already been revealed.
+    /// 2. The number of revealed scripts has already reached 2^31 (refer to BIP-32).
+    ///
+    /// Not checking the second field of the tuple may result in address reuse.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the `keychain` does not exist.
+    pub fn next_index(&self, keychain: &K) -> (u32, bool) {
+        let descriptor = self.keychains.get(keychain).expect("keychain must exist");
+        let last_index = self.last_revealed.get(keychain).cloned();
+
+        // we can only get the next index if wildcard exists
+        let has_wildcard = descriptor.has_wildcard();
+
+        match last_index {
+            // if there is no index, next_index is always 0
+            None => (0, true),
+            // descriptors without wildcards can only have one index
+            Some(_) if !has_wildcard => (0, false),
+            // derivation index must be < 2^31 (BIP-32)
+            Some(index) if index > BIP32_MAX_INDEX => {
+                unreachable!("index is out of bounds")
+            }
+            Some(index) if index == BIP32_MAX_INDEX => (index, false),
+            // get next derivation index
+            Some(index) => (index + 1, true),
+        }
+    }
+
+    /// Get the last derivation index that is revealed for each keychain.
+    ///
+    /// Keychains with no revealed indices will not be included in the returned [`BTreeMap`].
+    pub fn last_revealed_indices(&self) -> &BTreeMap<K, u32> {
+        &self.last_revealed
+    }
+
+    /// Get the last derivation index revealed for `keychain`.
+    pub fn last_revealed_index(&self, keychain: &K) -> Option<u32> {
+        self.last_revealed.get(keychain).cloned()
+    }
+
+    /// Convenience method to call [`Self::reveal_to_target`] on multiple keychains.
+    pub fn reveal_to_target_multi(
+        &mut self,
+        keychains: &BTreeMap<K, u32>,
+    ) -> (
+        BTreeMap<K, impl Iterator<Item = (u32, Script)>>,
+        DerivationAdditions<K>,
+    ) {
+        let mut additions = DerivationAdditions::default();
+        let mut spks = BTreeMap::new();
+
+        for (keychain, &index) in keychains {
+            let (new_spks, new_additions) = self.reveal_to_target(&keychain, index);
+            if !new_additions.is_empty() {
+                spks.insert(keychain.clone(), new_spks);
+                additions.append(new_additions);
+            }
+        }
+
+        (spks, additions)
+    }
+
+    /// Reveals script pubkeys of the `keychain`'s descriptor **up to and including** the
+    /// `target_index`.
+    ///
+    /// If the `target_index` cannot be reached (due to the descriptor having no wildcard, and/or
+    /// the `target_index` is in the hardened index range), this method will do a best-effort and
+    /// reveal up to the last possible index.
+    ///
+    /// This returns an iterator of newly revealed indices (along side their scripts), and a
+    /// [`DerivationAdditions`] which reports updates to the latest revealed index. If no new script
+    /// pubkeys are revealed, both of these will be empty.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `keychain` does not exist.
+    pub fn reveal_to_target(
+        &mut self,
+        keychain: &K,
+        target_index: u32,
+    ) -> (impl Iterator<Item = (u32, Script)>, DerivationAdditions<K>) {
+        let descriptor = self.keychains.get(keychain).expect("keychain must exist");
+        let has_wildcard = descriptor.has_wildcard();
+
+        let target_index = if has_wildcard { target_index } else { 0 };
+        let next_store_index = self.next_store_index(keychain);
+        let next_reveal_index = self.last_revealed.get(keychain).map_or(0, |v| *v + 1);
+        let lookahead = self.lookahead.get(keychain).map_or(0, |v| *v);
+
+        // if we are able to reveal new indexes, the latest revealed index goes here
+        let mut revealed_index = None;
+
+        // if target is already surpassed, we have nothing to reveal
+        if next_reveal_index <= target_index
+            // if target is already stored (due to lookahead), this can be our new revealed index
+            && target_index < next_reveal_index + lookahead
+        {
+            revealed_index = Some(target_index);
+        }
+
+        // we range over indexes that are not stored
+        let range = next_reveal_index + lookahead..=target_index + lookahead;
+
+        for (new_index, new_spk) in range_descriptor_spks(Cow::Borrowed(descriptor), range) {
+            // no need to store if already stored
+            if new_index >= next_store_index {
+                let _inserted = self
+                    .inner
+                    .insert_spk((keychain.clone(), new_index), new_spk);
+                debug_assert!(_inserted, "must not have existing spk",);
+            }
+
+            // everything after `target_index` is stored for lookahead only
+            if new_index <= target_index {
+                revealed_index = Some(new_index);
+            }
+        }
+
+        match revealed_index {
+            Some(index) => {
+                let _old_index = self.last_revealed.insert(keychain.clone(), index);
+                debug_assert!(_old_index < Some(index));
+                (
+                    range_descriptor_spks(
+                        Cow::Owned(descriptor.clone()),
+                        next_reveal_index..index + 1,
+                    ),
+                    DerivationAdditions([(keychain.clone(), index)].into()),
+                )
+            }
+            None => (
+                range_descriptor_spks(
+                    Cow::Owned(descriptor.clone()),
+                    next_reveal_index..next_reveal_index,
+                ),
+                DerivationAdditions::default(),
+            ),
+        }
+    }
+
+    /// Attempts to reveal the next script pubkey for `keychain`.
+    ///
+    /// Returns the derivation index of the revealed script pubkey, the revealed script pubkey and a
+    /// [`DerivationAdditions`] which represents changes in the last revealed index (if any).
+    ///
+    /// When a new script cannot be revealed, we return the last revealed script and an empty
+    /// [`DerivationAdditions`]. There are two scenarios when a new script pubkey cannot be derived:
+    ///
+    ///  1. The descriptor has no wildcard and already has one script revealed.
+    ///  2. The descriptor has already revealed scripts up to the numeric bound.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the `keychain` does not exist.
+    pub fn reveal_next_spk(&mut self, keychain: &K) -> ((u32, &Script), DerivationAdditions<K>) {
+        let (next_index, _) = self.next_index(keychain);
+        let additions = self.reveal_to_target(keychain, next_index).1;
+        let script = self
+            .inner
+            .spk_at_index(&(keychain.clone(), next_index))
+            .expect("script must already be stored");
+        ((next_index, script), additions)
+    }
+
+    /// Gets the next unused script pubkey in the keychain. I.e. the script pubkey with the lowest
+    /// index that has not been used yet.
+    ///
+    /// This will derive and reveal a new script pubkey if no more unused script pubkeys exist.
+    ///
+    /// If the descriptor has no wildcard and already has a used script pubkey, or if a descriptor
+    /// has used all scripts up to the derivation bounds, the last derived script pubkey will be
+    /// returned.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `keychain` has never been added to the index
+    pub fn next_unused_spk(&mut self, keychain: &K) -> ((u32, &Script), DerivationAdditions<K>) {
+        let need_new = self.unused_spks_of_keychain(keychain).next().is_none();
+        // this rather strange branch is needed because of some lifetime issues
+        if need_new {
+            self.reveal_next_spk(keychain)
+        } else {
+            (
+                self.unused_spks_of_keychain(keychain)
+                    .next()
+                    .expect("we already know next exists"),
+                DerivationAdditions::default(),
+            )
+        }
+    }
+
+    /// Marks the script pubkey at `index` as used even though it hasn't seen an output with it.
+    /// This only has an effect when the `index` had been added to `self` already and was unused.
+    ///
+    /// Returns whether the `index` was originally present as `unused`.
+    ///
+    /// This is useful when you want to reserve a script pubkey for something but don't want to add
+    /// the transaction output using it to the index yet. Other callers will consider `index` on
+    /// `keychain` used until you call [`unmark_used`].
+    ///
+    /// [`unmark_used`]: Self::unmark_used
+    pub fn mark_used(&mut self, keychain: &K, index: u32) -> bool {
+        self.inner.mark_used(&(keychain.clone(), index))
+    }
+
+    /// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
+    /// `unused`.
+    ///
+    /// Note that if `self` has scanned an output with this script pubkey then this will have no
+    /// effect.
+    ///
+    /// [`mark_used`]: Self::mark_used
+    pub fn unmark_used(&mut self, keychain: &K, index: u32) -> bool {
+        self.inner.unmark_used(&(keychain.clone(), index))
+    }
+
+    /// Iterates over all unused script pubkeys for a `keychain` that have been stored in the index.
+    pub fn unused_spks_of_keychain(
+        &self,
+        keychain: &K,
+    ) -> impl DoubleEndedIterator<Item = (u32, &Script)> {
+        let next_index = self.last_revealed.get(keychain).map_or(0, |&v| v + 1);
+        let range = (keychain.clone(), u32::MIN)..(keychain.clone(), next_index);
+        self.inner
+            .unused_spks(range)
+            .map(|((_, i), script)| (*i, script))
+    }
+
+    /// Iterates over all the [`OutPoint`] that have a `TxOut` with a script pubkey derived from
+    /// `keychain`.
+    pub fn txouts_of_keychain(
+        &self,
+        keychain: &K,
+    ) -> impl DoubleEndedIterator<Item = (u32, OutPoint)> + '_ {
+        self.inner
+            .outputs_in_range((keychain.clone(), u32::MIN)..(keychain.clone(), u32::MAX))
+            .map(|((_, i), op)| (*i, op))
+    }
+
+    /// Returns the highest derivation index of the `keychain` where [`KeychainTxOutIndex`] has
+    /// found a [`TxOut`] with it's script pubkey.
+    pub fn last_used_index(&self, keychain: &K) -> Option<u32> {
+        self.txouts_of_keychain(keychain).last().map(|(i, _)| i)
+    }
+
+    /// Returns the highest derivation index of each keychain that [`KeychainTxOutIndex`] has found
+    /// a [`TxOut`] with it's script pubkey.
+    pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
+        self.keychains
+            .iter()
+            .filter_map(|(keychain, _)| {
+                self.last_used_index(keychain)
+                    .map(|index| (keychain.clone(), index))
+            })
+            .collect()
+    }
+
+    /// Applies the derivation additions to the [`KeychainTxOutIndex`], extending the number of
+    /// derived scripts per keychain, as specified in the `additions`.
+    pub fn apply_additions(&mut self, additions: DerivationAdditions<K>) {
+        let _ = self.reveal_to_target_multi(&additions.0);
+    }
+}
+
+fn range_descriptor_spks<'a, R>(
+    descriptor: Cow<'a, Descriptor<DescriptorPublicKey>>,
+    range: R,
+) -> impl Iterator<Item = (u32, Script)> + Clone + Send + 'a
+where
+    R: Iterator<Item = u32> + Clone + Send + 'a,
+{
+    let secp = Secp256k1::verification_only();
+    let has_wildcard = descriptor.has_wildcard();
+    range
+        .into_iter()
+        // non-wildcard descriptors can only have one derivation index (0)
+        .take_while(move |&index| has_wildcard || index == 0)
+        // we can only iterate over non-hardened indices
+        .take_while(|&index| index <= BIP32_MAX_INDEX)
+        // take until failure
+        .map_while(move |index| {
+            descriptor
+                .derived_descriptor(&secp, index)
+                .map(|desc| (index, desc.script_pubkey()))
+                .ok()
+        })
+}
diff --git a/crates/chain/src/lib.rs b/crates/chain/src/lib.rs
new file mode 100644 (file)
index 0000000..7bb4ed0
--- /dev/null
@@ -0,0 +1,89 @@
+//! This crate is a collection of core structures for [Bitcoin Dev Kit] (alpha release).
+//!
+//! The goal of this crate is give wallets the mechanisms needed to:
+//!
+//! 1. Figure out what data they need to fetch.
+//! 2. Process that data in a way that never leads to inconsistent states.
+//! 3. Fully index that data and expose it so that it can be consumed without friction.
+//!
+//! Our design goals for these mechanisms are:
+//!
+//! 1. Data source agnostic -- nothing in `bdk_chain` cares about where you get data from or whether
+//!    you do it synchronously or asynchronously. If you know a fact about the blockchain you can just
+//!    tell `bdk_chain`'s APIs about it and that information will be integrated if it can be done
+//!    consistently.
+//! 2. Error free APIs.
+//! 3. Data persistence agnostic -- `bdk_chain` does not care where you cache on-chain data, what you
+//!    cache or how you fetch it.
+//!
+//! [Bitcoin Dev Kit]: https://bitcoindevkit.org/
+#![no_std]
+pub use bitcoin;
+pub mod chain_graph;
+mod spk_txout_index;
+pub use spk_txout_index::*;
+mod chain_data;
+pub use chain_data::*;
+pub mod keychain;
+pub mod sparse_chain;
+mod tx_data_traits;
+pub mod tx_graph;
+pub use tx_data_traits::*;
+
+#[doc(hidden)]
+pub mod example_utils;
+
+#[cfg(feature = "miniscript")]
+pub use miniscript;
+#[cfg(feature = "miniscript")]
+mod descriptor_ext;
+#[cfg(feature = "miniscript")]
+pub use descriptor_ext::DescriptorExt;
+
+#[allow(unused_imports)]
+#[macro_use]
+extern crate alloc;
+
+#[cfg(feature = "serde")]
+pub extern crate serde_crate as serde;
+
+#[cfg(feature = "bincode")]
+extern crate bincode;
+
+#[cfg(feature = "std")]
+#[macro_use]
+extern crate std;
+
+#[cfg(all(not(feature = "std"), feature = "hashbrown"))]
+extern crate hashbrown;
+
+// When no-std use `alloc`'s Hash collections. This is activated by default
+#[cfg(all(not(feature = "std"), not(feature = "hashbrown")))]
+#[doc(hidden)]
+pub mod collections {
+    #![allow(dead_code)]
+    pub type HashSet<K> = alloc::collections::BTreeSet<K>;
+    pub type HashMap<K, V> = alloc::collections::BTreeMap<K, V>;
+    pub use alloc::collections::{btree_map as hash_map, *};
+}
+
+// When we have std use `std`'s all collections
+#[cfg(all(feature = "std", not(feature = "hashbrown")))]
+#[doc(hidden)]
+pub mod collections {
+    pub use std::collections::{hash_map, *};
+}
+
+// With special feature `hashbrown` use `hashbrown`'s hash collections, and else from `alloc`.
+#[cfg(feature = "hashbrown")]
+#[doc(hidden)]
+pub mod collections {
+    #![allow(dead_code)]
+    pub type HashSet<K> = hashbrown::HashSet<K>;
+    pub type HashMap<K, V> = hashbrown::HashMap<K, V>;
+    pub use alloc::collections::*;
+    pub use hashbrown::hash_map;
+}
+
+/// How many confirmations are needed for a coinbase output to be spent
+pub const COINBASE_MATURITY: u32 = 100;
diff --git a/crates/chain/src/sparse_chain.rs b/crates/chain/src/sparse_chain.rs
new file mode 100644 (file)
index 0000000..17f1eb6
--- /dev/null
@@ -0,0 +1,1008 @@
+//! Module for structures that maintain sparse (purposely incomplete) snapshots of blockchain data.
+//!
+//! [`SparseChain`] stores [`Txid`]s ordered by an index that implements [`ChainPosition`] (this
+//! represents the transaction's position in the blockchain, by default [`TxHeight`] is used).
+//! [`SparseChain`] also contains "checkpoints" which relate block height to block hash. Changes to
+//! a [`SparseChain`] are reported by returning [`ChangeSet`]s.
+//!
+//! # Updating [`SparseChain`]
+//!
+//! A sparsechain can be thought of as a consistent snapshot of history. A [`SparseChain`] can be
+//! updated by applying an update [`SparseChain`] on top, but only if they "connect" via their
+//! checkpoints and don't result in unexpected movements of transactions.
+//!
+//! ```
+//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*};
+//! # use bitcoin::BlockHash;
+//! # let hash_a = new_hash::<BlockHash>("a");
+//! # let hash_b = new_hash::<BlockHash>("b");
+//! # let hash_c = new_hash::<BlockHash>("c");
+//! # let hash_d = new_hash::<BlockHash>("d");
+//! // create empty sparsechain
+//! let mut chain = SparseChain::<TxHeight>::default();
+//!
+//! /* Updating an empty sparsechain will always succeed */
+//!
+//! let update = SparseChain::from_checkpoints(vec![
+//!     BlockId { height: 1, hash: hash_a },
+//!     BlockId { height: 2, hash: hash_b },
+//! ]);
+//! let _ = chain
+//!     .apply_update(update)
+//!     .expect("updating an empty sparsechain will always succeed");
+//!
+//! /* To update a non-empty sparsechain, the update must connect */
+//!
+//! let update = SparseChain::from_checkpoints(vec![
+//!     BlockId { height: 2, hash: hash_b },
+//!     BlockId { height: 3, hash: hash_c },
+//! ]);
+//! let _ = chain
+//!     .apply_update(update)
+//!     .expect("we have connected at block height 2, so this must succeed");
+//! ```
+//!
+//! ## Invalid updates
+//!
+//! As shown above, sparsechains can be "connected" by comparing their checkpoints. However, there
+//! are situations where two sparsechains cannot connect in a way that guarantees consistency.
+//!
+//! ```
+//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*};
+//! # use bitcoin::BlockHash;
+//! # let hash_a = new_hash::<BlockHash>("a");
+//! # let hash_b = new_hash::<BlockHash>("b");
+//! # let hash_c = new_hash::<BlockHash>("c");
+//! # let hash_d = new_hash::<BlockHash>("d");
+//! // our sparsechain has 2 checkpoints
+//! let chain = SparseChain::<TxHeight>::from_checkpoints(vec![
+//!     BlockId { height: 1, hash: hash_a },
+//!     BlockId { height: 2, hash: hash_b },
+//! ]);
+//!
+//! /* Example of an ambiguous update that does not fully connect */
+//!
+//! let ambiguous_update = SparseChain::from_checkpoints(vec![
+//!     // the update sort of "connects" at checkpoint 1, but...
+//!     BlockId { height: 1, hash: hash_a },
+//!     // we cannot determine whether checkpoint 3 connects with checkpoint 2
+//!     BlockId { height: 3, hash: hash_c },
+//! ]);
+//! let _ = chain
+//!     .determine_changeset(&ambiguous_update)
+//!     .expect_err("cannot apply ambiguous update");
+//!
+//! /* Example of an update that completely misses the point */
+//!
+//! let disconnected_update = SparseChain::from_checkpoints(vec![
+//!     // the last checkpoint in chain is 2, so 3 and 4 do not connect
+//!     BlockId { height: 3, hash: hash_c },
+//!     BlockId { height: 4, hash: hash_d },
+//! ]);
+//! let _ = chain
+//!     .determine_changeset(&disconnected_update)
+//!     .expect_err("cannot apply a totally-disconnected update");
+//! ```
+//!
+//! ## Handling reorgs
+//!
+//! Updates can be formed to evict data from the original sparsechain. This is useful for handling
+//! blockchain reorgs.
+//!
+//! ```
+//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*};
+//! # use bitcoin::BlockHash;
+//! # let hash_a = new_hash::<BlockHash>("a");
+//! # let hash_b = new_hash::<BlockHash>("b");
+//! # let hash_c = new_hash::<BlockHash>("c");
+//! # let hash_d = new_hash::<BlockHash>("d");
+//! // our chain has a single checkpoint at height 11
+//! let mut chain = SparseChain::<TxHeight>::from_checkpoints(vec![
+//!     BlockId { height: 11, hash: hash_a },
+//! ]);
+//!
+//! // we detect a reorg at height 11, and we introduce a new checkpoint at height 12
+//! let update = SparseChain::from_checkpoints(vec![
+//!     BlockId { height: 11, hash: hash_b },
+//!     BlockId { height: 12, hash: hash_c },
+//! ]);
+//! let _ = chain
+//!     .apply_update(update)
+//!     .expect("we can evict/replace checkpoint 11 since it is the only checkpoint");
+//!
+//! // now our `chain` has 2 checkpoints (11:hash_b & 12:hash_c)
+//! // we detect another reorg, this time at height 12...
+//! let update = SparseChain::from_checkpoints(vec![
+//!     // we connect at checkpoint 11 as this is our "point of agreement"
+//!     BlockId { height: 11, hash: hash_b },
+//!     BlockId { height: 12, hash: hash_d },
+//! ]);
+//! let _ = chain
+//!     .apply_update(update)
+//!     .expect("we have provided a valid point of agreement, so our reorg update will succeed");
+//! ```
+//!
+//! ## Movement of transactions during update
+//!
+//! If the original sparsechain and update sparsechain contain the same transaction at different
+//! [`ChainPosition`]s, the transaction is considered as "moved". There are various movements of a
+//! transaction that are invalid and update will fail.
+//!
+//! Valid movements:
+//!
+//! * When the transaction moved from unconfirmed (in original) to confirmed (in update). In other
+//!     words, confirming transactions are allowed!
+//! * If there has been a reorg at height x, an originally confirmed transaction at height x or
+//!     above, may move to another height (that is at x or above, including becoming unconfirmed).
+//!
+//! Invalid movements:
+//!
+//! * A confirmed transaction cannot move without a reorg.
+//! * Even with a reorg, an originally confirmed transaction cannot be moved below the height of the
+//!     reorg.
+//!
+//! # Custom [`ChainPosition`]
+//!
+//! [`SparseChain`] maintains a list of txids ordered by [`ChainPosition`]. By default, [`TxHeight`]
+//! is used, however additional data can be incorporated into the implementation.
+//!
+//! For example, we can have "perfect ordering" of transactions if our positional index is a
+//! combination of block height and transaction position in block.
+//!
+//! ```
+//! # use bdk_chain::{BlockId, TxHeight, sparse_chain::*, example_utils::*};
+//! # use bitcoin::{BlockHash, Txid};
+//! # let hash_a = new_hash::<BlockHash>("a");
+//! # let txid_1 = new_hash::<Txid>("1");
+//! # let txid_2 = new_hash::<Txid>("2");
+//! # let txid_3 = new_hash::<Txid>("3");
+//! #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+//! pub enum TxPosition {
+//!     Confirmed {
+//!         height: u32,   // height of block
+//!         position: u32, // position of transaction in the block
+//!     },
+//!     Unconfirmed,
+//! }
+//!
+//! impl Default for TxPosition {
+//!     fn default() -> Self { Self::Unconfirmed }
+//! }
+//!
+//! impl ChainPosition for TxPosition {
+//!     fn height(&self) -> TxHeight {
+//!         match self {
+//!             Self::Confirmed{ height, .. } => TxHeight::Confirmed(*height),
+//!             Self::Unconfirmed => TxHeight::Unconfirmed,
+//!         }
+//!     }
+//!
+//!     fn max_ord_of_height(height: TxHeight) -> Self {
+//!         match height {
+//!             TxHeight::Confirmed(height) => Self::Confirmed{ height, position: u32::MAX },
+//!             TxHeight::Unconfirmed => Self::Unconfirmed,
+//!         }
+//!     }
+//!
+//!     fn min_ord_of_height(height: TxHeight) -> Self {
+//!         match height {
+//!             TxHeight::Confirmed(height) => Self::Confirmed{ height, position: u32::MIN },
+//!             TxHeight::Unconfirmed => Self::Unconfirmed,
+//!         }
+//!     }
+//! }
+//!
+//! let mut chain = SparseChain::<TxPosition>::default();
+//! let _ = chain.insert_checkpoint(BlockId { height: 10, hash: hash_a }).unwrap();
+//! let _ = chain.insert_tx(txid_1, TxPosition::Confirmed{ height: 9, position: 4321 }).unwrap();
+//! let _ = chain.insert_tx(txid_2, TxPosition::Confirmed{ height: 9, position: 1234 }).unwrap();
+//! let _ = chain.insert_tx(txid_3, TxPosition::Confirmed{ height: 10, position: 321 }).unwrap();
+//!
+//! // transactions are ordered correctly
+//! assert_eq!(
+//!     chain.txids().collect::<Vec<_>>(),
+//!     vec![
+//!         &(TxPosition::Confirmed{ height: 9, position: 1234 }, txid_2),
+//!         &(TxPosition::Confirmed{ height: 9, position: 4321 }, txid_1),
+//!         &(TxPosition::Confirmed{ height: 10, position: 321 }, txid_3),
+//!     ],
+//! );
+//!
+//! ```
+use core::{
+    fmt::Debug,
+    ops::{Bound, RangeBounds},
+};
+
+use crate::{collections::*, tx_graph::TxGraph, AsTransaction, BlockId, FullTxOut, TxHeight};
+use bitcoin::{hashes::Hash, BlockHash, OutPoint, Txid};
+
+/// This is a non-monotone structure that tracks relevant [`Txid`]s that are ordered by chain
+/// position `P`.
+///
+/// We use [`BlockHash`]s alongside their chain height as "checkpoints" to enforce consistency.
+///
+/// To "merge" two [`SparseChain`]s, the [`ChangeSet`] can be calculated by calling
+/// [`determine_changeset`] and applying the [`ChangeSet`] via [`apply_changeset`]. For convenience,
+/// [`apply_update`] does the above two steps in one call.
+///
+/// Refer to [module-level documentation] for more.
+///
+/// [`determine_changeset`]: Self::determine_changeset
+/// [`apply_changeset`]: Self::apply_changeset
+/// [`apply_update`]: Self::apply_update
+/// [module-level documentation]: crate::sparse_chain
+#[derive(Clone, Debug, PartialEq)]
+pub struct SparseChain<P = TxHeight> {
+    /// Block height to checkpoint data.
+    checkpoints: BTreeMap<u32, BlockHash>,
+    /// Txids ordered by the pos `P`.
+    ordered_txids: BTreeSet<(P, Txid)>,
+    /// Confirmation heights of txids.
+    txid_to_pos: HashMap<Txid, P>,
+    /// Limit number of checkpoints.
+    checkpoint_limit: Option<usize>,
+}
+
+impl<P> AsRef<SparseChain<P>> for SparseChain<P> {
+    fn as_ref(&self) -> &SparseChain<P> {
+        self
+    }
+}
+
+impl<P> Default for SparseChain<P> {
+    fn default() -> Self {
+        Self {
+            checkpoints: Default::default(),
+            ordered_txids: Default::default(),
+            txid_to_pos: Default::default(),
+            checkpoint_limit: Default::default(),
+        }
+    }
+}
+
+/// Represents a failure when trying to insert a [`Txid`] into [`SparseChain`].
+#[derive(Clone, Debug, PartialEq)]
+pub enum InsertTxError<P> {
+    /// Occurs when the [`Txid`] is to be inserted at a hight higher than the [`SparseChain`]'s tip.
+    TxTooHigh {
+        txid: Txid,
+        tx_height: u32,
+        tip_height: Option<u32>,
+    },
+    /// Occurs when the [`Txid`] is already in the [`SparseChain`] and the insertion would result in
+    /// an unexpected move in [`ChainPosition`].
+    TxMovedUnexpectedly {
+        txid: Txid,
+        original_pos: P,
+        update_pos: P,
+    },
+}
+
+impl<P: core::fmt::Debug> core::fmt::Display for InsertTxError<P> {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            InsertTxError::TxTooHigh {
+                txid,
+                tx_height,
+                tip_height,
+            } => write!(
+                f,
+                "txid ({}) cannot be inserted at height ({}) greater than chain tip ({:?})",
+                txid, tx_height, tip_height
+            ),
+            InsertTxError::TxMovedUnexpectedly {
+                txid,
+                original_pos,
+                update_pos,
+            } => write!(
+                f,
+                "txid ({}) insertion resulted in an expected positional move from {:?} to {:?}",
+                txid, original_pos, update_pos
+            ),
+        }
+    }
+}
+
+#[cfg(feature = "std")]
+impl<P: core::fmt::Debug> std::error::Error for InsertTxError<P> {}
+
+/// Represents a failure when trying to insert a checkpoint into [`SparseChain`].
+#[derive(Clone, Debug, PartialEq)]
+pub enum InsertCheckpointError {
+    /// Occurs when checkpoint of the same height already exists with a different [`BlockHash`].
+    HashNotMatching {
+        height: u32,
+        original_hash: BlockHash,
+        update_hash: BlockHash,
+    },
+}
+
+impl core::fmt::Display for InsertCheckpointError {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        write!(f, "{:?}", self)
+    }
+}
+
+#[cfg(feature = "std")]
+impl std::error::Error for InsertCheckpointError {}
+
+/// Represents an update failure of [`SparseChain`].
+#[derive(Clone, Debug, PartialEq)]
+pub enum UpdateError<P = TxHeight> {
+    /// The update cannot be applied to the chain because the chain suffix it represents did not
+    /// connect to the existing chain. This error case contains the checkpoint height to include so
+    /// that the chains can connect.
+    NotConnected(u32),
+    /// The update contains inconsistent tx states (e.g. it changed the transaction's height). This
+    /// error is usually the inconsistency found.
+    TxInconsistent {
+        txid: Txid,
+        original_pos: P,
+        update_pos: P,
+    },
+}
+
+impl<P: core::fmt::Debug> core::fmt::Display for UpdateError<P> {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        match self {
+            Self::NotConnected(h) =>
+                write!(f, "the checkpoints in the update could not be connected to the checkpoints in the chain, try include checkpoint of height {} to connect",
+                    h),
+            Self::TxInconsistent { txid, original_pos, update_pos } =>
+                write!(f, "tx ({}) had position ({:?}), but is ({:?}) in the update",
+                    txid, original_pos, update_pos),
+        }
+    }
+}
+
+#[cfg(feature = "std")]
+impl<P: core::fmt::Debug> std::error::Error for UpdateError<P> {}
+
+impl<P: ChainPosition> SparseChain<P> {
+    /// Creates a new chain from a list of block hashes and heights. The caller must guarantee they
+    /// are in the same chain.
+    pub fn from_checkpoints<C>(checkpoints: C) -> Self
+    where
+        C: IntoIterator<Item = BlockId>,
+    {
+        let mut chain = Self::default();
+        chain.checkpoints = checkpoints
+            .into_iter()
+            .map(|block_id| block_id.into())
+            .collect();
+        chain
+    }
+
+    /// Get the checkpoint for the last known tip.
+    pub fn latest_checkpoint(&self) -> Option<BlockId> {
+        self.checkpoints
+            .iter()
+            .last()
+            .map(|(&height, &hash)| BlockId { height, hash })
+    }
+
+    /// Get the checkpoint at the given height if it exists.
+    pub fn checkpoint_at(&self, height: u32) -> Option<BlockId> {
+        self.checkpoints
+            .get(&height)
+            .map(|&hash| BlockId { height, hash })
+    }
+
+    /// Return the [`ChainPosition`] of a `txid`.
+    ///
+    /// This returns [`None`] if the transation does not exist.
+    pub fn tx_position(&self, txid: Txid) -> Option<&P> {
+        self.txid_to_pos.get(&txid)
+    }
+
+    /// Return a [`BTreeMap`] of all checkpoints (block hashes by height).
+    pub fn checkpoints(&self) -> &BTreeMap<u32, BlockHash> {
+        &self.checkpoints
+    }
+
+    /// Return an iterator over checkpoints in a height range, in ascending height order.
+    pub fn range_checkpoints(
+        &self,
+        range: impl RangeBounds<u32>,
+    ) -> impl DoubleEndedIterator<Item = BlockId> + '_ {
+        self.checkpoints
+            .range(range)
+            .map(|(&height, &hash)| BlockId { height, hash })
+    }
+
+    /// Preview changes of updating [`Self`] with another chain that connects to it.
+    ///
+    /// If the `update` wishes to introduce confirmed transactions, it must contain a checkpoint
+    /// that is exactly the same height as one of `self`'s checkpoints.
+    ///
+    /// To invalidate from a given checkpoint, `update` must contain a checkpoint of the same height
+    /// but different hash. Invalidated checkpoints result in invalidated transactions becoming
+    /// "unconfirmed".
+    ///
+    /// An error will be returned if an update will result in inconsistencies or if the update does
+    /// not properly connect with `self`.
+    ///
+    /// Refer to [module-level documentation] for more.
+    ///
+    /// [module-level documentation]: crate::sparse_chain
+    pub fn determine_changeset(&self, update: &Self) -> Result<ChangeSet<P>, UpdateError<P>> {
+        let agreement_point = update
+            .checkpoints
+            .iter()
+            .rev()
+            .find(|&(height, hash)| self.checkpoints.get(height) == Some(hash))
+            .map(|(&h, _)| h);
+
+        let last_update_cp = update.checkpoints.iter().last().map(|(&h, _)| h);
+
+        // the lower bound of the invalidation range
+        let invalid_lb = if last_update_cp.is_none() || last_update_cp == agreement_point {
+            // if agreement point is the last update checkpoint, or there is no update checkpoints,
+            // no invalidation is required
+            u32::MAX
+        } else {
+            agreement_point.map(|h| h + 1).unwrap_or(0)
+        };
+
+        // the first checkpoint of the sparsechain to invalidate (if any)
+        let invalid_from = self.checkpoints.range(invalid_lb..).next().map(|(&h, _)| h);
+
+        // the first checkpoint to invalidate (if any) should be represented in the update
+        if let Some(first_invalid) = invalid_from {
+            if !update.checkpoints.contains_key(&first_invalid) {
+                return Err(UpdateError::NotConnected(first_invalid));
+            }
+        }
+
+        for (&txid, update_pos) in &update.txid_to_pos {
+            // ensure all currently confirmed txs are still at the same height (unless they are
+            // within invalidation range, or to be confirmed)
+            if let Some(original_pos) = &self.txid_to_pos.get(&txid) {
+                if original_pos.height() < TxHeight::Confirmed(invalid_lb)
+                    && original_pos != &update_pos
+                {
+                    return Err(UpdateError::TxInconsistent {
+                        txid,
+                        original_pos: P::clone(original_pos),
+                        update_pos: update_pos.clone(),
+                    });
+                }
+            }
+        }
+
+        // create initial change-set, based on checkpoints and txids that are to be "invalidated"
+        let mut changeset = invalid_from
+            .map(|from_height| self.invalidate_checkpoints_preview(from_height))
+            .unwrap_or_default();
+
+        for (&height, &new_hash) in &update.checkpoints {
+            let original_hash = self.checkpoints.get(&height).cloned();
+
+            let update_hash = *changeset
+                .checkpoints
+                .entry(height)
+                .and_modify(|change| *change = Some(new_hash))
+                .or_insert_with(|| Some(new_hash));
+
+            if original_hash == update_hash {
+                changeset.checkpoints.remove(&height);
+            }
+        }
+
+        for (txid, new_pos) in &update.txid_to_pos {
+            let original_pos = self.txid_to_pos.get(txid).cloned();
+
+            let update_pos = changeset
+                .txids
+                .entry(*txid)
+                .and_modify(|change| *change = Some(new_pos.clone()))
+                .or_insert_with(|| Some(new_pos.clone()));
+
+            if original_pos == *update_pos {
+                changeset.txids.remove(txid);
+            }
+        }
+
+        Ok(changeset)
+    }
+
+    /// Updates [`SparseChain`] with another chain that connects to it.
+    ///
+    /// This is equivilant to calling [`determine_changeset`] and [`apply_changeset`] in sequence.
+    ///
+    /// [`determine_changeset`]: Self::determine_changeset
+    /// [`apply_changeset`]: Self::apply_changeset
+    pub fn apply_update(&mut self, update: Self) -> Result<ChangeSet<P>, UpdateError<P>> {
+        let changeset = self.determine_changeset(&update)?;
+        self.apply_changeset(changeset.clone());
+        Ok(changeset)
+    }
+
+    pub fn apply_changeset(&mut self, changeset: ChangeSet<P>) {
+        for (height, update_hash) in changeset.checkpoints {
+            let _original_hash = match update_hash {
+                Some(update_hash) => self.checkpoints.insert(height, update_hash),
+                None => self.checkpoints.remove(&height),
+            };
+        }
+
+        for (txid, update_pos) in changeset.txids {
+            let original_pos = self.txid_to_pos.remove(&txid);
+
+            if let Some(pos) = original_pos {
+                self.ordered_txids.remove(&(pos, txid));
+            }
+
+            if let Some(pos) = update_pos {
+                self.txid_to_pos.insert(txid, pos.clone());
+                self.ordered_txids.insert((pos.clone(), txid));
+            }
+        }
+
+        self.prune_checkpoints();
+    }
+
+    /// Derives a [`ChangeSet`] that assumes that there are no preceding changesets.
+    ///
+    /// The changeset returned will record additions of all [`Txid`]s and checkpoints included in
+    /// [`Self`].
+    pub fn initial_changeset(&self) -> ChangeSet<P> {
+        ChangeSet {
+            checkpoints: self
+                .checkpoints
+                .iter()
+                .map(|(height, hash)| (*height, Some(*hash)))
+                .collect(),
+            txids: self
+                .ordered_txids
+                .iter()
+                .map(|(pos, txid)| (*txid, Some(pos.clone())))
+                .collect(),
+        }
+    }
+
+    /// Determines the [`ChangeSet`] when checkpoints `from_height` (inclusive) and above are
+    /// invalidated. Displaced [`Txid`]s will be repositioned to [`TxHeight::Unconfirmed`].
+    pub fn invalidate_checkpoints_preview(&self, from_height: u32) -> ChangeSet<P> {
+        ChangeSet::<P> {
+            checkpoints: self
+                .checkpoints
+                .range(from_height..)
+                .map(|(height, _)| (*height, None))
+                .collect(),
+            // invalidated transactions become unconfirmed
+            txids: self
+                .range_txids_by_height(TxHeight::Confirmed(from_height)..TxHeight::Unconfirmed)
+                .map(|(_, txid)| (*txid, Some(P::max_ord_of_height(TxHeight::Unconfirmed))))
+                .collect(),
+        }
+    }
+
+    /// Invalidate checkpoints `from_height` (inclusive) and above.
+    ///
+    /// This is equivalent to calling [`invalidate_checkpoints_preview`] and [`apply_changeset`] in
+    /// sequence.
+    ///
+    /// [`invalidate_checkpoints_preview`]: Self::invalidate_checkpoints_preview
+    /// [`apply_changeset`]: Self::apply_changeset
+    pub fn invalidate_checkpoints(&mut self, from_height: u32) -> ChangeSet<P> {
+        let changeset = self.invalidate_checkpoints_preview(from_height);
+        self.apply_changeset(changeset.clone());
+        changeset
+    }
+
+    /// Determines the [`ChangeSet`] when all transactions of height [`TxHeight::Unconfirmed`] are
+    /// removed completely.
+    pub fn clear_mempool_preview(&self) -> ChangeSet<P> {
+        let mempool_range = &(
+            P::min_ord_of_height(TxHeight::Unconfirmed),
+            Txid::all_zeros(),
+        )..;
+
+        let txids = self
+            .ordered_txids
+            .range(mempool_range)
+            .map(|(_, txid)| (*txid, None))
+            .collect();
+
+        ChangeSet::<P> {
+            txids,
+            ..Default::default()
+        }
+    }
+
+    /// Clears all transactions of height [`TxHeight::Unconfirmed`].
+    ///
+    /// This is equivalent to calling [`clear_mempool_preview`] and [`apply_changeset`] in sequence.
+    ///
+    /// [`clear_mempool_preview`]: Self::clear_mempool_preview
+    /// [`apply_changeset`]: Self::apply_changeset
+    /// [`ChangeSet`].
+    pub fn clear_mempool(&mut self) -> ChangeSet<P> {
+        let changeset = self.clear_mempool_preview();
+        self.apply_changeset(changeset.clone());
+        changeset
+    }
+
+    /// Determines the resultant [`ChangeSet`] if [`Txid`] was inserted at position `pos`.
+    ///
+    /// Changes to the [`Txid`]'s position is allowed (under the rules noted in
+    /// [module-level documentation]) and will be reflected in the [`ChangeSet`].
+    ///
+    /// [module-level documentation]: crate::sparse_chain
+    pub fn insert_tx_preview(&self, txid: Txid, pos: P) -> Result<ChangeSet<P>, InsertTxError<P>> {
+        let mut update = Self::default();
+
+        if let Some(block_id) = self.latest_checkpoint() {
+            let _old_hash = update.checkpoints.insert(block_id.height, block_id.hash);
+            debug_assert!(_old_hash.is_none());
+        }
+
+        let tip_height = self.checkpoints.iter().last().map(|(h, _)| *h);
+        if let TxHeight::Confirmed(tx_height) = pos.height() {
+            if Some(tx_height) > tip_height {
+                return Err(InsertTxError::TxTooHigh {
+                    txid,
+                    tx_height,
+                    tip_height,
+                });
+            }
+        }
+
+        let _old_pos = update.txid_to_pos.insert(txid, pos.clone());
+        debug_assert!(_old_pos.is_none());
+
+        let _inserted = update.ordered_txids.insert((pos, txid));
+        debug_assert!(_inserted, "must insert tx");
+
+        match self.determine_changeset(&update) {
+            Ok(changeset) => Ok(changeset),
+            Err(UpdateError::NotConnected(_)) => panic!("should always connect"),
+            Err(UpdateError::TxInconsistent {
+                txid: inconsistent_txid,
+                original_pos,
+                update_pos,
+            }) => Err(InsertTxError::TxMovedUnexpectedly {
+                txid: inconsistent_txid,
+                original_pos,
+                update_pos,
+            }),
+        }
+    }
+
+    /// Inserts a given [`Txid`] at `pos`.
+    ///
+    /// This is equivilant to calling [`insert_tx_preview`] and [`apply_changeset`] in sequence.
+    ///
+    /// [`insert_tx_preview`]: Self::insert_tx_preview
+    /// [`apply_changeset`]: Self::apply_changeset
+    pub fn insert_tx(&mut self, txid: Txid, pos: P) -> Result<ChangeSet<P>, InsertTxError<P>> {
+        let changeset = self.insert_tx_preview(txid, pos)?;
+        self.apply_changeset(changeset.clone());
+        Ok(changeset)
+    }
+
+    /// Determines the resultant [`ChangeSet`] if [`BlockId`] was inserted.
+    ///
+    /// If the change would result in a change in block hash of a certain height, insertion would
+    /// fail.
+    pub fn insert_checkpoint_preview(
+        &self,
+        block_id: BlockId,
+    ) -> Result<ChangeSet<P>, InsertCheckpointError> {
+        let mut update = Self::default();
+
+        if let Some(block_id) = self.latest_checkpoint() {
+            let _old_hash = update.checkpoints.insert(block_id.height, block_id.hash);
+            debug_assert!(_old_hash.is_none());
+        }
+
+        if let Some(original_hash) = update.checkpoints.insert(block_id.height, block_id.hash) {
+            if original_hash != block_id.hash {
+                return Err(InsertCheckpointError::HashNotMatching {
+                    height: block_id.height,
+                    original_hash,
+                    update_hash: block_id.hash,
+                });
+            }
+        }
+
+        match self.determine_changeset(&update) {
+            Ok(changeset) => Ok(changeset),
+            Err(UpdateError::NotConnected(_)) => panic!("error should have caught above"),
+            Err(UpdateError::TxInconsistent { .. }) => panic!("should never add txs"),
+        }
+    }
+
+    /// Insert a checkpoint ([`BlockId`]).
+    ///
+    /// This is equivilant to calling [`insert_checkpoint_preview`] and [`apply_changeset`] in
+    /// sequence.
+    ///
+    /// [`insert_checkpoint_preview`]: Self::insert_checkpoint_preview
+    /// [`apply_changeset`]: Self::apply_changeset
+    pub fn insert_checkpoint(
+        &mut self,
+        block_id: BlockId,
+    ) -> Result<ChangeSet<P>, InsertCheckpointError> {
+        let changeset = self.insert_checkpoint_preview(block_id)?;
+        self.apply_changeset(changeset.clone());
+        Ok(changeset)
+    }
+
+    /// Iterate over all [`Txid`]s ordered by their [`ChainPosition`].
+    pub fn txids(&self) -> impl DoubleEndedIterator<Item = &(P, Txid)> + ExactSizeIterator + '_ {
+        self.ordered_txids.iter()
+    }
+
+    /// Iterate over a sub-range of positioned [`Txid`]s.
+    pub fn range_txids<R>(&self, range: R) -> impl DoubleEndedIterator<Item = &(P, Txid)> + '_
+    where
+        R: RangeBounds<(P, Txid)>,
+    {
+        let map_bound = |b: Bound<&(P, Txid)>| match b {
+            Bound::Included((pos, txid)) => Bound::Included((pos.clone(), *txid)),
+            Bound::Excluded((pos, txid)) => Bound::Excluded((pos.clone(), *txid)),
+            Bound::Unbounded => Bound::Unbounded,
+        };
+
+        self.ordered_txids
+            .range((map_bound(range.start_bound()), map_bound(range.end_bound())))
+    }
+
+    /// Iterate over a sub-range of positioned [`Txid`]s, where the range is defined by
+    /// [`ChainPosition`] only.
+    pub fn range_txids_by_position<R>(
+        &self,
+        range: R,
+    ) -> impl DoubleEndedIterator<Item = &(P, Txid)> + '_
+    where
+        R: RangeBounds<P>,
+    {
+        let map_bound = |b: Bound<&P>, inc: Txid, exc: Txid| match b {
+            Bound::Included(pos) => Bound::Included((pos.clone(), inc)),
+            Bound::Excluded(pos) => Bound::Excluded((pos.clone(), exc)),
+            Bound::Unbounded => Bound::Unbounded,
+        };
+
+        self.ordered_txids.range((
+            map_bound(range.start_bound(), min_txid(), max_txid()),
+            map_bound(range.end_bound(), max_txid(), min_txid()),
+        ))
+    }
+
+    /// Iterate over a sub-range of positioned [`Txid`]s, where the range is define by [`TxHeight`]
+    /// only.
+    pub fn range_txids_by_height<R>(
+        &self,
+        range: R,
+    ) -> impl DoubleEndedIterator<Item = &(P, Txid)> + '_
+    where
+        R: RangeBounds<TxHeight>,
+    {
+        let ord_it = |height, is_max| match is_max {
+            true => P::max_ord_of_height(height),
+            false => P::min_ord_of_height(height),
+        };
+
+        let map_bound = |b: Bound<&TxHeight>, inc: (bool, Txid), exc: (bool, Txid)| match b {
+            Bound::Included(&h) => Bound::Included((ord_it(h, inc.0), inc.1)),
+            Bound::Excluded(&h) => Bound::Excluded((ord_it(h, exc.0), exc.1)),
+            Bound::Unbounded => Bound::Unbounded,
+        };
+
+        self.ordered_txids.range((
+            map_bound(range.start_bound(), (false, min_txid()), (true, max_txid())),
+            map_bound(range.end_bound(), (true, max_txid()), (false, min_txid())),
+        ))
+    }
+
+    /// Attempt to retrieve a [`FullTxOut`] of the given `outpoint`.
+    ///
+    /// This will return `Some` only if the output's transaction is in both `self` and `graph`.
+    pub fn full_txout(
+        &self,
+        graph: &TxGraph<impl AsTransaction>,
+        outpoint: OutPoint,
+    ) -> Option<FullTxOut<P>> {
+        let chain_pos = self.tx_position(outpoint.txid)?;
+
+        let tx = graph.get_tx(outpoint.txid)?;
+        let is_on_coinbase = tx.as_tx().is_coin_base();
+        let txout = tx.as_tx().output.get(outpoint.vout as usize)?.clone();
+
+        let spent_by = self
+            .spent_by(graph, outpoint)
+            .map(|(pos, txid)| (pos.clone(), txid));
+
+        Some(FullTxOut {
+            outpoint,
+            txout,
+            chain_position: chain_pos.clone(),
+            spent_by,
+            is_on_coinbase,
+        })
+    }
+
+    /// Returns the value set as the checkpoint limit.
+    ///
+    /// Refer to [`set_checkpoint_limit`].
+    ///
+    /// [`set_checkpoint_limit`]: Self::set_checkpoint_limit
+    pub fn checkpoint_limit(&self) -> Option<usize> {
+        self.checkpoint_limit
+    }
+
+    /// Set the checkpoint limit.
+    ///
+    /// The checkpoint limit restricts the number of checkpoints that can be stored in [`Self`].
+    /// Oldest checkpoints are pruned first.
+    pub fn set_checkpoint_limit(&mut self, limit: Option<usize>) {
+        self.checkpoint_limit = limit;
+        self.prune_checkpoints();
+    }
+
+    /// Return [`Txid`]s that would be added to the sparse chain if this `changeset` was applied.
+    pub fn changeset_additions<'a>(
+        &'a self,
+        changeset: &'a ChangeSet<P>,
+    ) -> impl Iterator<Item = Txid> + 'a {
+        changeset
+            .txids
+            .iter()
+            .filter(|(&txid, pos)| {
+                pos.is_some() /*it was not a deletion*/ &&
+                self.tx_position(txid).is_none() /*we don't have the txid already*/
+            })
+            .map(|(&txid, _)| txid)
+    }
+
+    fn prune_checkpoints(&mut self) -> Option<BTreeMap<u32, BlockHash>> {
+        let limit = self.checkpoint_limit?;
+
+        // find last height to be pruned
+        let last_height = *self.checkpoints.keys().rev().nth(limit)?;
+        // first height to be kept
+        let keep_height = last_height + 1;
+
+        let mut split = self.checkpoints.split_off(&keep_height);
+        core::mem::swap(&mut self.checkpoints, &mut split);
+
+        Some(split)
+    }
+
+    /// Finds the transaction in the chain that spends `outpoint`.
+    ///
+    /// [`TxGraph`] is used to provide the spend relationships.
+    ///
+    /// Note that the transaction including `outpoint` does not need to be in the `graph` or the
+    /// `chain` for this to return `Some`.
+    pub fn spent_by<T>(&self, graph: &TxGraph<T>, outpoint: OutPoint) -> Option<(&P, Txid)> {
+        graph
+            .outspends(outpoint)
+            .iter()
+            .find_map(|&txid| Some((self.tx_position(txid)?, txid)))
+    }
+
+    /// Returns whether the sparse chain contains any checkpoints or transactions.
+    pub fn is_empty(&self) -> bool {
+        self.checkpoints.is_empty() && self.txid_to_pos.is_empty()
+    }
+}
+
+/// The return value of [`determine_changeset`].
+///
+/// [`determine_changeset`]: SparseChain::determine_changeset.
+#[derive(Debug, Clone, PartialEq)]
+#[cfg_attr(
+    feature = "serde",
+    derive(serde::Deserialize, serde::Serialize),
+    serde(crate = "serde_crate")
+)]
+#[must_use]
+pub struct ChangeSet<P = TxHeight> {
+    pub checkpoints: BTreeMap<u32, Option<BlockHash>>,
+    pub txids: BTreeMap<Txid, Option<P>>,
+}
+
+impl<I> Default for ChangeSet<I> {
+    fn default() -> Self {
+        Self {
+            checkpoints: Default::default(),
+            txids: Default::default(),
+        }
+    }
+}
+
+impl<P> ChangeSet<P> {
+    /// Appends the changes in `other` into self such that applying `self` afterwards has the same
+    /// effect as sequentially applying the original `self` and `other`.
+    pub fn append(&mut self, mut other: Self)
+    where
+        P: ChainPosition,
+    {
+        self.checkpoints.append(&mut other.checkpoints);
+        self.txids.append(&mut other.txids);
+    }
+
+    /// Whether this changeset contains no changes.
+    pub fn is_empty(&self) -> bool {
+        self.checkpoints.is_empty() && self.txids.is_empty()
+    }
+}
+
+fn min_txid() -> Txid {
+    Txid::from_inner([0x00; 32])
+}
+
+fn max_txid() -> Txid {
+    Txid::from_inner([0xff; 32])
+}
+
+/// Represents an position in which transactions are ordered in [`SparseChain`].
+///
+/// [`ChainPosition`] implementations must be [`Ord`] by [`TxHeight`] first.
+pub trait ChainPosition:
+    core::fmt::Debug + Clone + Eq + PartialOrd + Ord + core::hash::Hash + Send + Sync + 'static
+{
+    /// Get the transaction height of the positon.
+    fn height(&self) -> TxHeight;
+
+    /// Get the positon's upper bound of a given height.
+    fn max_ord_of_height(height: TxHeight) -> Self;
+
+    /// Get the position's lower bound of a given height.
+    fn min_ord_of_height(height: TxHeight) -> Self;
+
+    /// Get the unconfirmed position.
+    fn unconfirmed() -> Self {
+        Self::max_ord_of_height(TxHeight::Unconfirmed)
+    }
+}
+
+#[cfg(test)]
+pub mod verify_chain_position {
+    use crate::{sparse_chain::ChainPosition, ConfirmationTime, TxHeight};
+    use alloc::vec::Vec;
+
+    pub fn verify_chain_position<P: ChainPosition>(head_count: u32, tail_count: u32) {
+        let values = (0..head_count)
+            .chain(u32::MAX - tail_count..u32::MAX)
+            .flat_map(|i| {
+                [
+                    P::min_ord_of_height(TxHeight::Confirmed(i)),
+                    P::max_ord_of_height(TxHeight::Confirmed(i)),
+                ]
+            })
+            .chain([
+                P::min_ord_of_height(TxHeight::Unconfirmed),
+                P::max_ord_of_height(TxHeight::Unconfirmed),
+            ])
+            .collect::<Vec<_>>();
+
+        for i in 0..values.len() {
+            for j in 0..values.len() {
+                if i == j {
+                    assert_eq!(values[i], values[j]);
+                }
+                if i < j {
+                    assert!(values[i] <= values[j]);
+                }
+                if i > j {
+                    assert!(values[i] >= values[j]);
+                }
+            }
+        }
+    }
+
+    #[test]
+    fn verify_tx_height() {
+        verify_chain_position::<TxHeight>(1000, 1000);
+    }
+
+    #[test]
+    fn verify_confirmation_time() {
+        verify_chain_position::<ConfirmationTime>(1000, 1000);
+    }
+}
diff --git a/crates/chain/src/spk_txout_index.rs b/crates/chain/src/spk_txout_index.rs
new file mode 100644 (file)
index 0000000..6e6afc9
--- /dev/null
@@ -0,0 +1,312 @@
+use core::ops::RangeBounds;
+
+use crate::{
+    collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap},
+    ForEachTxOut,
+};
+use bitcoin::{self, OutPoint, Script, Transaction, TxOut, Txid};
+
+/// An index storing [`TxOut`]s that have a script pubkey that matches those in a list.
+///
+/// The basic idea is that you insert script pubkeys you care about into the index with
+/// [`insert_spk`] and then when you call [`scan`] the index will look at any txouts you pass in and
+/// store and index any txouts matching one of its script pubkeys.
+///
+/// Each script pubkey is associated with a application defined index script index `I` which must be
+/// [`Ord`]. Usually this is used to associate the derivation index of the script pubkey or even a
+/// combination of `(keychain, derivation_index)`.
+///
+/// Note there is no harm in scanning transactions that disappear from the blockchain or were never
+/// in there in the first place. `SpkTxOutIndex` is intentionally *monotone* -- you cannot delete or
+/// modify txouts that have been indexed. To find out which txouts from the index are actually in the
+/// chain or unspent etc you must use other sources of information like a [`SparseChain`].
+///
+/// [`TxOut`]: bitcoin::TxOut
+/// [`insert_spk`]: Self::insert_spk
+/// [`Ord`]: core::cmp::Ord
+/// [`scan`]: Self::scan
+/// [`SparseChain`]: crate::sparse_chain::SparseChain
+#[derive(Clone, Debug)]
+pub struct SpkTxOutIndex<I> {
+    /// script pubkeys ordered by index
+    spks: BTreeMap<I, Script>,
+    /// A reverse lookup from spk to spk index
+    spk_indices: HashMap<Script, I>,
+    /// The set of unused indexes.
+    unused: BTreeSet<I>,
+    /// Lookup index and txout by outpoint.
+    txouts: BTreeMap<OutPoint, (I, TxOut)>,
+    /// Lookup from spk index to outpoints that had that spk
+    spk_txouts: BTreeSet<(I, OutPoint)>,
+}
+
+impl<I> Default for SpkTxOutIndex<I> {
+    fn default() -> Self {
+        Self {
+            txouts: Default::default(),
+            spks: Default::default(),
+            spk_indices: Default::default(),
+            spk_txouts: Default::default(),
+            unused: Default::default(),
+        }
+    }
+}
+
+/// This macro is used instead of a member function of `SpkTxOutIndex` which would result in a
+/// compiler error[E0521]: "borrowed data escapes out of closure" when we attempt to take a
+/// reference out of the `FprEachTxOut` closure during scanning.
+macro_rules! scan_txout {
+    ($self:ident, $op:expr, $txout:expr) => {{
+        let spk_i = $self.spk_indices.get(&$txout.script_pubkey);
+        if let Some(spk_i) = spk_i {
+            $self.txouts.insert($op, (spk_i.clone(), $txout.clone()));
+            $self.spk_txouts.insert((spk_i.clone(), $op));
+            $self.unused.remove(&spk_i);
+        }
+        spk_i
+    }};
+}
+
+impl<I: Clone + Ord> SpkTxOutIndex<I> {
+    /// Scans an object containing many txouts.
+    ///
+    /// Typically this is used in two situations:
+    ///
+    /// 1. After loading transaction data from disk you may scan over all the txouts to restore all
+    /// your txouts.
+    /// 2. When getting new data from the chain you usually scan it before incorporating it into your chain state.
+    ///
+    /// See [`ForEachTxout`] for the types that support this.
+    ///
+    /// [`ForEachTxout`]: crate::ForEachTxOut
+    pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> BTreeSet<&I> {
+        // let scanner = &mut SpkTxOutScanner::new(self);
+        let mut scanned_indices = BTreeSet::new();
+
+        txouts.for_each_txout(|(op, txout)| {
+            if let Some(spk_i) = scan_txout!(self, op, txout) {
+                scanned_indices.insert(spk_i);
+            }
+        });
+
+        scanned_indices
+    }
+
+    /// Scan a single `TxOut` for a matching script pubkey, and returns the index that matched the
+    /// script pubkey (if any).
+    pub fn scan_txout(&mut self, op: OutPoint, txout: &TxOut) -> Option<&I> {
+        scan_txout!(self, op, txout)
+    }
+
+    /// Iterate over all known txouts that spend to tracked script pubkeys.
+    pub fn txouts(
+        &self,
+    ) -> impl DoubleEndedIterator<Item = (&I, OutPoint, &TxOut)> + ExactSizeIterator {
+        self.txouts
+            .iter()
+            .map(|(op, (index, txout))| (index, *op, txout))
+    }
+
+    /// Finds all txouts on a transaction that has previously been scanned and indexed.
+    pub fn txouts_in_tx(
+        &self,
+        txid: Txid,
+    ) -> impl DoubleEndedIterator<Item = (&I, OutPoint, &TxOut)> {
+        self.txouts
+            .range(OutPoint::new(txid, u32::MIN)..=OutPoint::new(txid, u32::MAX))
+            .map(|(op, (index, txout))| (index, *op, txout))
+    }
+
+    /// Iterates over all outputs with script pubkeys in an index range.
+    pub fn outputs_in_range(
+        &self,
+        range: impl RangeBounds<I>,
+    ) -> impl DoubleEndedIterator<Item = (&I, OutPoint)> {
+        use bitcoin::hashes::Hash;
+        use core::ops::Bound::*;
+        let min_op = OutPoint {
+            txid: Txid::from_inner([0x00; 32]),
+            vout: u32::MIN,
+        };
+        let max_op = OutPoint {
+            txid: Txid::from_inner([0xff; 32]),
+            vout: u32::MAX,
+        };
+
+        let start = match range.start_bound() {
+            Included(index) => Included((index.clone(), min_op)),
+            Excluded(index) => Excluded((index.clone(), max_op)),
+            Unbounded => Unbounded,
+        };
+
+        let end = match range.end_bound() {
+            Included(index) => Included((index.clone(), max_op)),
+            Excluded(index) => Excluded((index.clone(), min_op)),
+            Unbounded => Unbounded,
+        };
+
+        self.spk_txouts.range((start, end)).map(|(i, op)| (i, *op))
+    }
+
+    /// Returns the txout and script pubkey index of the `TxOut` at `OutPoint`.
+    ///
+    /// Returns `None` if the `TxOut` hasn't been scanned or if nothing matching was found there.
+    pub fn txout(&self, outpoint: OutPoint) -> Option<(&I, &TxOut)> {
+        self.txouts
+            .get(&outpoint)
+            .map(|(spk_i, txout)| (spk_i, txout))
+    }
+
+    /// Returns the script that has been inserted at the `index`.
+    ///
+    /// If that index hasn't been inserted yet it will return `None`.
+    pub fn spk_at_index(&self, index: &I) -> Option<&Script> {
+        self.spks.get(index)
+    }
+
+    /// The script pubkeys being tracked by the index.
+    pub fn all_spks(&self) -> &BTreeMap<I, Script> {
+        &self.spks
+    }
+
+    /// Adds a script pubkey to scan for. Returns `false` and does nothing if spk already exists in the map
+    ///
+    /// the index will look for outputs spending to whenever it scans new data.
+    pub fn insert_spk(&mut self, index: I, spk: Script) -> bool {
+        match self.spk_indices.entry(spk.clone()) {
+            Entry::Vacant(value) => {
+                value.insert(index.clone());
+                self.spks.insert(index.clone(), spk);
+                self.unused.insert(index);
+                true
+            }
+            Entry::Occupied(_) => false,
+        }
+    }
+
+    /// Iterates over a unused script pubkeys in a index range.
+    ///
+    /// Here "unused" means that after the script pubkey was stored in the index, the index has
+    /// never scanned a transaction output with it.
+    ///
+    /// # Example
+    ///
+    /// ```rust
+    /// # use bdk_chain::SpkTxOutIndex;
+    ///
+    /// // imagine our spks are indexed like (keychain, derivation_index).
+    /// let txout_index = SpkTxOutIndex::<(u32,u32)>::default();
+    /// let all_unused_spks = txout_index.unused_spks(..);
+    /// let change_index = 1;
+    /// let unused_change_spks = txout_index
+    ///     .unused_spks((change_index, u32::MIN)..(change_index, u32::MAX));
+    /// ```
+    pub fn unused_spks<R>(&self, range: R) -> impl DoubleEndedIterator<Item = (&I, &Script)>
+    where
+        R: RangeBounds<I>,
+    {
+        self.unused
+            .range(range)
+            .map(|index| (index, self.spk_at_index(index).expect("must exist")))
+    }
+
+    /// Returns whether the script pubkey at `index` has been used or not.
+    ///
+    /// Here "unused" means that after the script pubkey was stored in the index, the index has
+    /// never scanned a transaction output with it.
+    pub fn is_used(&self, index: &I) -> bool {
+        self.unused.get(index).is_none()
+    }
+
+    /// Marks the script pubkey at `index` as used even though it hasn't seen an output with it.
+    /// This only has an effect when the `index` had been added to `self` already and was unused.
+    ///
+    /// Returns whether the `index` was originally present as `unused`.
+    ///
+    /// This is useful when you want to reserve a script pubkey for something but don't want to add
+    /// the transaction output using it to the index yet. Other callers will consider `index` used
+    /// until you call [`unmark_used`].
+    ///
+    /// [`unmark_used`]: Self::unmark_used
+    pub fn mark_used(&mut self, index: &I) -> bool {
+        self.unused.remove(index)
+    }
+
+    /// Undoes the effect of [`mark_used`]. Returns whether the `index` is inserted back into
+    /// `unused`.
+    ///
+    /// Note that if `self` has scanned an output with this script pubkey then this will have no
+    /// effect.
+    ///
+    /// [`mark_used`]: Self::mark_used
+    pub fn unmark_used(&mut self, index: &I) -> bool {
+        // we cannot set index as unused when it does not exist
+        if !self.spks.contains_key(index) {
+            return false;
+        }
+        // we cannot set index as unused when txouts are indexed under it
+        if self.outputs_in_range(index..=index).next().is_some() {
+            return false;
+        }
+        return self.unused.insert(index.clone());
+    }
+
+    /// Returns the index associated with the script pubkey.
+    pub fn index_of_spk(&self, script: &Script) -> Option<&I> {
+        self.spk_indices.get(script)
+    }
+
+    /// Computes total input value going from script pubkeys in the index (sent) and total output
+    /// value going to script pubkeys in the index (received) in `tx`. For the `sent` to be computed
+    /// correctly the output being spent must have already been scanned by the index. Calculating
+    /// received just uses the transaction outputs directly so will be correct even if it has not
+    /// been scanned.
+    pub fn sent_and_received(&self, tx: &Transaction) -> (u64, u64) {
+        let mut sent = 0;
+        let mut received = 0;
+
+        for txin in &tx.input {
+            if let Some((_, txout)) = self.txout(txin.previous_output) {
+                sent += txout.value;
+            }
+        }
+        for txout in &tx.output {
+            if self.index_of_spk(&txout.script_pubkey).is_some() {
+                received += txout.value;
+            }
+        }
+
+        (sent, received)
+    }
+
+    /// Computes the net value that this transaction gives to the script pubkeys in the index and
+    /// *takes* from the transaction outputs in the index. Shorthand for calling
+    /// [`sent_and_received`] and subtracting sent from received.
+    ///
+    /// [`sent_and_received`]: Self::sent_and_received
+    pub fn net_value(&self, tx: &Transaction) -> i64 {
+        let (sent, received) = self.sent_and_received(tx);
+        received as i64 - sent as i64
+    }
+
+    /// Whether any of the inputs of this transaction spend a txout tracked or whether any output
+    /// matches one of our script pubkeys.
+    ///
+    /// It is easily possible to misuse this method and get false negatives by calling it before you
+    /// have scanned the `TxOut`s the transaction is spending. For example if you want to filter out
+    /// all the transactions in a block that are irrelevant you **must first scan all the
+    /// transactions in the block** and only then use this method.
+    pub fn is_relevant(&self, tx: &Transaction) -> bool {
+        let input_matches = tx
+            .input
+            .iter()
+            .find(|input| self.txouts.contains_key(&input.previous_output))
+            .is_some();
+        let output_matches = tx
+            .output
+            .iter()
+            .find(|output| self.spk_indices.contains_key(&output.script_pubkey))
+            .is_some();
+        input_matches || output_matches
+    }
+}
diff --git a/crates/chain/src/tx_data_traits.rs b/crates/chain/src/tx_data_traits.rs
new file mode 100644 (file)
index 0000000..af4fe3b
--- /dev/null
@@ -0,0 +1,115 @@
+use core::borrow::Borrow;
+
+use alloc::{borrow::Cow, boxed::Box, rc::Rc, sync::Arc};
+use bitcoin::{Block, OutPoint, Transaction, TxOut};
+
+/// Trait to do something with every txout contained in a structure.
+///
+/// We would prefer just work with things that can give us a `Iterator<Item=(OutPoint, &TxOut)>`
+/// here but rust's type system makes it extremely hard to do this (without trait objects).
+pub trait ForEachTxOut {
+    /// The provided closure `f` will called with each `outpoint/txout` pair.
+    fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut)));
+}
+
+impl ForEachTxOut for Block {
+    fn for_each_txout(&self, mut f: impl FnMut((OutPoint, &TxOut))) {
+        for tx in self.txdata.iter() {
+            tx.for_each_txout(&mut f)
+        }
+    }
+}
+
+/// Trait for things that have a single [`Transaction`] in them.
+///
+/// This alows polymorphism in structures such as [`TxGraph<T>`] where `T` can be anything that
+/// implements `AsTransaction`. You might think that we could just use [`core::convert::AsRef`] for
+/// this but the problem is that we need to implement it on `Cow<T>` where `T: AsTransaction` which
+/// we can't do with a foreign trait like `AsTransaction`.
+///
+/// [`Transaction`]: bitcoin::Transaction
+/// [`TxGraph<T>`]: crate::tx_graph::TxGraph
+pub trait AsTransaction {
+    /// Get a reference to the transaction.
+    fn as_tx(&self) -> &Transaction;
+}
+
+impl AsTransaction for Transaction {
+    fn as_tx(&self) -> &Transaction {
+        self
+    }
+}
+
+impl<T: AsTransaction> AsTransaction for Rc<T> {
+    fn as_tx(&self) -> &Transaction {
+        self.as_ref().as_tx()
+    }
+}
+
+impl<T: AsTransaction> AsTransaction for Arc<T> {
+    fn as_tx(&self) -> &Transaction {
+        self.as_ref().as_tx()
+    }
+}
+
+impl<T: AsTransaction> AsTransaction for Box<T> {
+    fn as_tx(&self) -> &Transaction {
+        self.as_ref().as_tx()
+    }
+}
+
+impl<'a, T: AsTransaction + Clone> AsTransaction for Cow<'a, T> {
+    fn as_tx(&self) -> &Transaction {
+        <Cow<'_, T> as Borrow<T>>::borrow(self).as_tx()
+    }
+}
+
+impl<T> ForEachTxOut for T
+where
+    T: AsTransaction,
+{
+    fn for_each_txout(&self, mut f: impl FnMut((OutPoint, &TxOut))) {
+        let tx = self.as_tx();
+        let txid = tx.txid();
+        for (i, txout) in tx.output.iter().enumerate() {
+            f((
+                OutPoint {
+                    txid,
+                    vout: i as u32,
+                },
+                txout,
+            ))
+        }
+    }
+}
+
+/// A trait like [`core::convert::Into`] for converting one thing into another.
+///
+/// We use it to convert one transaction type into another so that an update for `T2` can be used on
+/// a `TxGraph<T1>` as long as `T2: IntoOwned<T1>`.
+///
+/// We couldn't use `Into` because we needed to implement it for [`Cow<'a, T>`].
+///
+/// [`Cow<'a, T>`]: std::borrow::Cow
+pub trait IntoOwned<T> {
+    /// Converts the provided type into another (owned) type.
+    fn into_owned(self) -> T;
+}
+
+impl<T> IntoOwned<T> for T {
+    fn into_owned(self) -> T {
+        self
+    }
+}
+
+impl<'a, T: Clone> IntoOwned<T> for Cow<'a, T> {
+    fn into_owned(self) -> T {
+        Cow::into_owned(self)
+    }
+}
+
+impl<'a, T: Clone> IntoOwned<T> for &'a T {
+    fn into_owned(self) -> T {
+        self.clone()
+    }
+}
diff --git a/crates/chain/src/tx_graph.rs b/crates/chain/src/tx_graph.rs
new file mode 100644 (file)
index 0000000..df95b82
--- /dev/null
@@ -0,0 +1,637 @@
+//! Module for structures that store and traverse transactions.
+//!
+//! [`TxGraph`] is a monotone structure that inserts transactions and indexes spends. The
+//! [`Additions`] structure reports changes of [`TxGraph`], but can also be applied on to a
+//! [`TxGraph`] as well. Lastly, [`TxDescendants`] is an [`Iterator`] which traverses descendants of
+//! a given transaction.
+//!
+//! Conflicting transactions are allowed to coexist within a [`TxGraph`]. This is useful for
+//! identifying and traversing conflicts and descendants of a given transaction.
+//!
+//! # Previewing and applying changes
+//!
+//! Methods that either preview or apply changes to [`TxGraph`] will return [`Additions`].
+//! [`Additions`] can be applied back on to a [`TxGraph`], or be used to inform persistent storage
+//! of the changes to [`TxGraph`].
+//!
+//! ```
+//! # use bdk_chain::tx_graph::TxGraph;
+//! # use bdk_chain::example_utils::*;
+//! # use bitcoin::Transaction;
+//! # let tx_a = tx_from_hex(RAW_TX_1);
+//! # let tx_b = tx_from_hex(RAW_TX_2);
+//! let mut graph = TxGraph::<Transaction>::default();
+//!
+//! // preview a transaction insertion (not actually inserted)
+//! let additions = graph.insert_tx_preview(tx_a);
+//! // apply the insertion
+//! graph.apply_additions(additions);
+//!
+//! // you can also insert a transaction directly
+//! let already_applied_additions = graph.insert_tx(tx_b);
+//! ```
+//!
+//! A [`TxGraph`] can also be updated with another [`TxGraph`].
+//!
+//! ```
+//! # use bdk_chain::tx_graph::TxGraph;
+//! # use bdk_chain::example_utils::*;
+//! # use bitcoin::Transaction;
+//! # let tx_a = tx_from_hex(RAW_TX_1);
+//! # let tx_b = tx_from_hex(RAW_TX_2);
+//! let mut graph = TxGraph::<Transaction>::default();
+//! let update = TxGraph::<Transaction>::new(vec![tx_a, tx_b]);
+//!
+//! // preview additions as result of the update
+//! let additions = graph.determine_additions(&update);
+//! // apply the additions
+//! graph.apply_additions(additions);
+//!
+//! // we can also apply the update graph directly
+//! // the additions will be empty as we have already applied the same update above
+//! let additions = graph.apply_update(update);
+//! assert!(additions.is_empty());
+//! ```
+//!
+use crate::{collections::*, AsTransaction, ForEachTxOut, IntoOwned};
+use alloc::vec::Vec;
+use bitcoin::{OutPoint, Transaction, TxOut, Txid};
+use core::ops::RangeInclusive;
+
+/// A graph of transactions and spends.
+///
+/// See the [module-level documentation] for more.
+///
+/// [module-level documentation]: crate::tx_graph
+#[derive(Clone, Debug, PartialEq)]
+pub struct TxGraph<T = Transaction> {
+    txs: HashMap<Txid, TxNode<T>>,
+    spends: BTreeMap<OutPoint, HashSet<Txid>>,
+
+    // This atrocity exists so that `TxGraph::outspends()` can return a reference.
+    // FIXME: This can be removed once `HashSet::new` is a const fn.
+    empty_outspends: HashSet<Txid>,
+}
+
+impl<T> Default for TxGraph<T> {
+    fn default() -> Self {
+        Self {
+            txs: Default::default(),
+            spends: Default::default(),
+            empty_outspends: Default::default(),
+        }
+    }
+}
+
+/// Node of a [`TxGraph`]. This can either be a whole transaction, or a partial transaction (where
+/// we only have select outputs).
+#[derive(Clone, Debug, PartialEq)]
+enum TxNode<T = Transaction> {
+    Whole(T),
+    Partial(BTreeMap<u32, TxOut>),
+}
+
+impl<T> Default for TxNode<T> {
+    fn default() -> Self {
+        Self::Partial(BTreeMap::new())
+    }
+}
+
+impl<T: AsTransaction> TxGraph<T> {
+    /// Iterate over all tx outputs known by [`TxGraph`].
+    pub fn all_txouts(&self) -> impl Iterator<Item = (OutPoint, &TxOut)> {
+        self.txs.iter().flat_map(|(txid, tx)| match tx {
+            TxNode::Whole(tx) => tx
+                .as_tx()
+                .output
+                .iter()
+                .enumerate()
+                .map(|(vout, txout)| (OutPoint::new(*txid, vout as _), txout))
+                .collect::<Vec<_>>(),
+            TxNode::Partial(txouts) => txouts
+                .iter()
+                .map(|(vout, txout)| (OutPoint::new(*txid, *vout as _), txout))
+                .collect::<Vec<_>>(),
+        })
+    }
+
+    /// Iterate over all full transactions in the graph.
+    pub fn full_transactions(&self) -> impl Iterator<Item = &T> {
+        self.txs.iter().filter_map(|(_, tx)| match tx {
+            TxNode::Whole(tx) => Some(tx),
+            TxNode::Partial(_) => None,
+        })
+    }
+
+    /// Get a transaction by txid. This only returns `Some` for full transactions.
+    ///
+    /// Refer to [`get_txout`] for getting a specific [`TxOut`].
+    ///
+    /// [`get_txout`]: Self::get_txout
+    pub fn get_tx(&self, txid: Txid) -> Option<&T> {
+        match self.txs.get(&txid)? {
+            TxNode::Whole(tx) => Some(tx),
+            TxNode::Partial(_) => None,
+        }
+    }
+
+    /// Obtains a single tx output (if any) at specified outpoint.
+    pub fn get_txout(&self, outpoint: OutPoint) -> Option<&TxOut> {
+        match self.txs.get(&outpoint.txid)? {
+            TxNode::Whole(tx) => tx.as_tx().output.get(outpoint.vout as usize),
+            TxNode::Partial(txouts) => txouts.get(&outpoint.vout),
+        }
+    }
+
+    /// Returns a [`BTreeMap`] of vout to output of the provided `txid`.
+    pub fn txouts(&self, txid: Txid) -> Option<BTreeMap<u32, &TxOut>> {
+        Some(match self.txs.get(&txid)? {
+            TxNode::Whole(tx) => tx
+                .as_tx()
+                .output
+                .iter()
+                .enumerate()
+                .map(|(vout, txout)| (vout as u32, txout))
+                .collect::<BTreeMap<_, _>>(),
+            TxNode::Partial(txouts) => txouts
+                .iter()
+                .map(|(vout, txout)| (*vout, txout))
+                .collect::<BTreeMap<_, _>>(),
+        })
+    }
+
+    /// Calculates the fee of a given transaction. Returns 0 if `tx` is a coinbase transaction.
+    /// Returns `Some(_)` if we have all the `TxOut`s being spent by `tx` in the graph (either as
+    /// the full transactions or individual txouts). If the returned value is negative then the
+    /// transaction is invalid according to the graph.
+    ///
+    /// Returns `None` if we're missing an input for the tx in the graph.
+    ///
+    /// Note `tx` does not have to be in the graph for this to work.
+    pub fn calculate_fee(&self, tx: &Transaction) -> Option<i64> {
+        if tx.is_coin_base() {
+            return Some(0);
+        }
+        let inputs_sum = tx
+            .input
+            .iter()
+            .map(|txin| {
+                self.get_txout(txin.previous_output)
+                    .map(|txout| txout.value as i64)
+            })
+            .sum::<Option<i64>>()?;
+
+        let outputs_sum = tx
+            .output
+            .iter()
+            .map(|txout| txout.value as i64)
+            .sum::<i64>();
+
+        Some(inputs_sum - outputs_sum)
+    }
+}
+
+impl<T: AsTransaction + Ord + Clone> TxGraph<T> {
+    /// Contruct a new [`TxGraph`] from a list of transaction.
+    pub fn new(txs: impl IntoIterator<Item = T>) -> Self {
+        let mut new = Self::default();
+        for tx in txs.into_iter() {
+            let _ = new.insert_tx(tx);
+        }
+        new
+    }
+    /// Inserts the given [`TxOut`] at [`OutPoint`].
+    ///
+    /// Note this will ignore the action if we already have the full transaction that the txout is
+    /// alledged to be on (even if it doesn't match it!).
+    pub fn insert_txout(&mut self, outpoint: OutPoint, txout: TxOut) -> Additions<T> {
+        let additions = self.insert_txout_preview(outpoint, txout);
+        self.apply_additions(additions.clone());
+        additions
+    }
+
+    /// Inserts the given transaction into [`TxGraph`].
+    ///
+    /// The [`Additions`] returned will be empty if `tx` already exists.
+    pub fn insert_tx(&mut self, tx: T) -> Additions<T> {
+        let additions = self.insert_tx_preview(tx);
+        self.apply_additions(additions.clone());
+        additions
+    }
+
+    /// Extends this graph with another so that `self` becomes the union of the two sets of
+    /// transactions.
+    ///
+    /// The returned [`Additions`] is the set difference of `update` and `self` (transactions that
+    /// exist in `update` but not in `self`).
+    pub fn apply_update<T2>(&mut self, update: TxGraph<T2>) -> Additions<T>
+    where
+        T2: IntoOwned<T> + Clone,
+    {
+        let additions = self.determine_additions(&update);
+        self.apply_additions(additions.clone());
+        additions
+    }
+
+    /// Applies [`Additions`] to [`TxGraph`].
+    pub fn apply_additions(&mut self, additions: Additions<T>) {
+        for tx in additions.tx {
+            let txid = tx.as_tx().txid();
+
+            tx.as_tx()
+                .input
+                .iter()
+                .map(|txin| txin.previous_output)
+                // coinbase spends are not to be counted
+                .filter(|outpoint| !outpoint.is_null())
+                // record spend as this tx has spent this outpoint
+                .for_each(|outpoint| {
+                    self.spends.entry(outpoint).or_default().insert(txid);
+                });
+
+            if let Some(TxNode::Whole(old_tx)) = self.txs.insert(txid, TxNode::Whole(tx)) {
+                debug_assert_eq!(
+                    old_tx.as_tx().txid(),
+                    txid,
+                    "old tx of same txid should not be different"
+                );
+            }
+        }
+
+        for (outpoint, txout) in additions.txout {
+            let tx_entry = self
+                .txs
+                .entry(outpoint.txid)
+                .or_insert_with(TxNode::default);
+
+            match tx_entry {
+                TxNode::Whole(_) => { /* do nothing since we already have full tx */ }
+                TxNode::Partial(txouts) => {
+                    txouts.insert(outpoint.vout, txout);
+                }
+            }
+        }
+    }
+
+    /// Previews the resultant [`Additions`] when [`Self`] is updated against the `update` graph.
+    ///
+    /// The [`Additions`] would be the set difference of `update` and `self` (transactions that
+    /// exist in `update` but not in `self`).
+    pub fn determine_additions<'a, T2>(&self, update: &'a TxGraph<T2>) -> Additions<T>
+    where
+        T2: IntoOwned<T> + Clone,
+    {
+        let mut additions = Additions::<T>::default();
+
+        for (&txid, update_tx) in &update.txs {
+            if self.get_tx(txid).is_some() {
+                continue;
+            }
+
+            match update_tx {
+                TxNode::Whole(tx) => {
+                    if matches!(self.txs.get(&txid), None | Some(TxNode::Partial(_))) {
+                        additions
+                            .tx
+                            .insert(<T2 as IntoOwned<T>>::into_owned(tx.clone()));
+                    }
+                }
+                TxNode::Partial(partial) => {
+                    for (&vout, update_txout) in partial {
+                        let outpoint = OutPoint::new(txid, vout);
+
+                        if self.get_txout(outpoint) != Some(&update_txout) {
+                            additions.txout.insert(outpoint, update_txout.clone());
+                        }
+                    }
+                }
+            }
+        }
+
+        additions
+    }
+
+    /// Returns the resultant [`Additions`] if the given transaction is inserted. Does not actually
+    /// mutate [`Self`].
+    ///
+    /// The [`Additions`] result will be empty if `tx` already existed in `self`.
+    pub fn insert_tx_preview(&self, tx: T) -> Additions<T> {
+        let mut update = Self::default();
+        update.txs.insert(tx.as_tx().txid(), TxNode::Whole(tx));
+        self.determine_additions(&update)
+    }
+
+    /// Returns the resultant [`Additions`] if the given `txout` is inserted at `outpoint`. Does not
+    /// mutate `self`.
+    ///
+    /// The [`Additions`] result will be empty if the `outpoint` (or a full transaction containing
+    /// the `outpoint`) already existed in `self`.
+    pub fn insert_txout_preview(&self, outpoint: OutPoint, txout: TxOut) -> Additions<T> {
+        let mut update = Self::default();
+        update.txs.insert(
+            outpoint.txid,
+            TxNode::Partial([(outpoint.vout, txout)].into()),
+        );
+        self.determine_additions(&update)
+    }
+}
+
+impl<T> TxGraph<T> {
+    /// The transactions spending from this output.
+    ///
+    /// `TxGraph` allows conflicting transactions within the graph. Obviously the transactions in
+    /// the returned will never be in the same blockchain.
+    pub fn outspends(&self, outpoint: OutPoint) -> &HashSet<Txid> {
+        self.spends.get(&outpoint).unwrap_or(&self.empty_outspends)
+    }
+
+    /// Iterates over the transactions spending from `txid`.
+    ///
+    /// The iterator item is a union of `(vout, txid-set)` where:
+    ///
+    /// - `vout` is the provided `txid`'s outpoint that is being spent
+    /// - `txid-set` is the set of txids that is spending the `vout`
+    pub fn tx_outspends(
+        &self,
+        txid: Txid,
+    ) -> impl DoubleEndedIterator<Item = (u32, &HashSet<Txid>)> + '_ {
+        let start = OutPoint { txid, vout: 0 };
+        let end = OutPoint {
+            txid,
+            vout: u32::MAX,
+        };
+        self.spends
+            .range(start..=end)
+            .map(|(outpoint, spends)| (outpoint.vout, spends))
+    }
+
+    /// Iterate over all partial transactions (outputs only) in the graph.
+    pub fn partial_transactions(&self) -> impl Iterator<Item = (Txid, &BTreeMap<u32, TxOut>)> {
+        self.txs.iter().filter_map(|(txid, tx)| match tx {
+            TxNode::Whole(_) => None,
+            TxNode::Partial(partial) => Some((*txid, partial)),
+        })
+    }
+
+    /// Creates an iterator that both filters and maps descendants from the starting `txid`.
+    ///
+    /// The supplied closure takes in two inputs `(depth, descendant_txid)`:
+    ///
+    /// * `depth` is the distance between the starting `txid` and the `descendant_txid`. I.e. if the
+    ///     descendant is spending an output of the starting `txid`, the `depth` will be 1.
+    /// * `descendant_txid` is the descendant's txid which we are considering to walk.
+    ///
+    /// The supplied closure returns an `Option<T>`, allowing the caller to map each node it vists
+    /// and decide whether to visit descendants.
+    pub fn walk_descendants<'g, F, O>(&'g self, txid: Txid, walk_map: F) -> TxDescendants<F, T>
+    where
+        F: FnMut(usize, Txid) -> Option<O> + 'g,
+    {
+        TxDescendants::new_exclude_root(self, txid, walk_map)
+    }
+
+    /// Creates an iterator that both filters and maps conflicting transactions (this includes
+    /// descendants of directly-conflicting transactions, which are also considered conflicts).
+    ///
+    /// Refer to [`Self::walk_descendants`] for `walk_map` usage.
+    pub fn walk_conflicts<'g, F, O>(
+        &'g self,
+        tx: &'g Transaction,
+        walk_map: F,
+    ) -> TxDescendants<F, T>
+    where
+        F: FnMut(usize, Txid) -> Option<O> + 'g,
+    {
+        let txids = self.direct_conflicts_of_tx(tx).map(|(_, txid)| txid);
+        TxDescendants::from_multiple_include_root(self, txids, walk_map)
+    }
+
+    /// Given a transaction, return an iterator of txids which directly conflict with the given
+    /// transaction's inputs (spends). The conflicting txids are returned with the given
+    /// transaction's vin (in which it conflicts).
+    ///
+    /// Note that this only returns directly conflicting txids and does not include descendants of
+    /// those txids (which are technically also conflicting).
+    pub fn direct_conflicts_of_tx<'g>(
+        &'g self,
+        tx: &'g Transaction,
+    ) -> impl Iterator<Item = (usize, Txid)> + '_ {
+        let txid = tx.txid();
+        tx.input
+            .iter()
+            .enumerate()
+            .filter_map(|(vin, txin)| self.spends.get(&txin.previous_output).zip(Some(vin)))
+            .flat_map(|(spends, vin)| core::iter::repeat(vin).zip(spends.iter().cloned()))
+            .filter(move |(_, conflicting_txid)| *conflicting_txid != txid)
+    }
+
+    /// Whether the graph has any transactions or outputs in it.
+    pub fn is_empty(&self) -> bool {
+        self.txs.is_empty()
+    }
+}
+
+/// A structure that represents changes to a [`TxGraph`].
+///
+/// It is named "additions" because [`TxGraph`] is monotone so transactions can only be added and
+/// not removed.
+///
+/// Refer to [module-level documentation] for more.
+///
+/// [module-level documentation]: crate::tx_graph
+#[derive(Debug, Clone, PartialEq)]
+#[cfg_attr(
+    feature = "serde",
+    derive(serde::Deserialize, serde::Serialize),
+    serde(
+        crate = "serde_crate",
+        bound(
+            deserialize = "T: Ord + serde::Deserialize<'de>",
+            serialize = "T: Ord + serde::Serialize"
+        )
+    )
+)]
+#[must_use]
+pub struct Additions<T> {
+    pub tx: BTreeSet<T>,
+    pub txout: BTreeMap<OutPoint, TxOut>,
+}
+
+impl<T> Additions<T> {
+    /// Returns true if the [`Additions`] is empty (no transactions or txouts).
+    pub fn is_empty(&self) -> bool {
+        self.tx.is_empty() && self.txout.is_empty()
+    }
+
+    /// Iterates over all outpoints contained within [`Additions`].
+    pub fn txouts(&self) -> impl Iterator<Item = (OutPoint, &TxOut)>
+    where
+        T: AsTransaction,
+    {
+        self.tx
+            .iter()
+            .flat_map(|tx| {
+                tx.as_tx()
+                    .output
+                    .iter()
+                    .enumerate()
+                    .map(|(vout, txout)| (OutPoint::new(tx.as_tx().txid(), vout as _), txout))
+            })
+            .chain(self.txout.iter().map(|(op, txout)| (*op, txout)))
+    }
+
+    /// Appends the changes in `other` into self such that applying `self` afterwards has the same
+    /// effect as sequentially applying the original `self` and `other`.
+    pub fn append(&mut self, mut other: Additions<T>)
+    where
+        T: Ord,
+    {
+        self.tx.append(&mut other.tx);
+        self.txout.append(&mut other.txout);
+    }
+}
+
+impl<T> Default for Additions<T> {
+    fn default() -> Self {
+        Self {
+            tx: Default::default(),
+            txout: Default::default(),
+        }
+    }
+}
+
+impl AsRef<TxGraph> for TxGraph {
+    fn as_ref(&self) -> &TxGraph {
+        self
+    }
+}
+
+impl<T: AsTransaction> ForEachTxOut for Additions<T> {
+    fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) {
+        self.txouts().for_each(f)
+    }
+}
+
+impl<T: AsTransaction> ForEachTxOut for TxGraph<T> {
+    fn for_each_txout(&self, f: impl FnMut((OutPoint, &TxOut))) {
+        self.all_txouts().for_each(f)
+    }
+}
+
+/// An iterator that traverses transaction descendants.
+///
+/// This `struct` is created by the [`walk_descendants`] method of [`TxGraph`].
+///
+/// [`walk_descendants`]: TxGraph::walk_descendants
+pub struct TxDescendants<'g, F, T> {
+    graph: &'g TxGraph<T>,
+    visited: HashSet<Txid>,
+    stack: Vec<(usize, Txid)>,
+    filter_map: F,
+}
+
+impl<'g, F, T> TxDescendants<'g, F, T> {
+    /// Creates a `TxDescendants` that includes the starting `txid` when iterating.
+    #[allow(unused)]
+    pub(crate) fn new_include_root(graph: &'g TxGraph<T>, txid: Txid, filter_map: F) -> Self {
+        Self {
+            graph,
+            visited: Default::default(),
+            stack: [(0, txid)].into(),
+            filter_map,
+        }
+    }
+
+    /// Creates a `TxDescendants` that excludes the starting `txid` when iterating.
+    pub(crate) fn new_exclude_root(graph: &'g TxGraph<T>, txid: Txid, filter_map: F) -> Self {
+        let mut descendants = Self {
+            graph,
+            visited: Default::default(),
+            stack: Default::default(),
+            filter_map,
+        };
+        descendants.populate_stack(1, txid);
+        descendants
+    }
+
+    /// Creates a `TxDescendants` from multiple starting transactions that includes the starting
+    /// `txid`s when iterating.
+    pub(crate) fn from_multiple_include_root<I>(
+        graph: &'g TxGraph<T>,
+        txids: I,
+        filter_map: F,
+    ) -> Self
+    where
+        I: IntoIterator<Item = Txid>,
+    {
+        Self {
+            graph,
+            visited: Default::default(),
+            stack: txids.into_iter().map(|txid| (0, txid)).collect(),
+            filter_map,
+        }
+    }
+
+    /// Creates a `TxDescendants` from multiple starting transactions that excludes the starting
+    /// `txid`s when iterating.
+    #[allow(unused)]
+    pub(crate) fn from_multiple_exclude_root<I>(
+        graph: &'g TxGraph<T>,
+        txids: I,
+        filter_map: F,
+    ) -> Self
+    where
+        I: IntoIterator<Item = Txid>,
+    {
+        let mut descendants = Self {
+            graph,
+            visited: Default::default(),
+            stack: Default::default(),
+            filter_map,
+        };
+        for txid in txids {
+            descendants.populate_stack(1, txid);
+        }
+        descendants
+    }
+}
+
+impl<'g, F, T> TxDescendants<'g, F, T> {
+    fn populate_stack(&mut self, depth: usize, txid: Txid) {
+        let spend_paths = self
+            .graph
+            .spends
+            .range(tx_outpoint_range(txid))
+            .flat_map(|(_, spends)| spends)
+            .map(|&txid| (depth, txid));
+        self.stack.extend(spend_paths);
+    }
+}
+
+impl<'g, F, O, T> Iterator for TxDescendants<'g, F, T>
+where
+    F: FnMut(usize, Txid) -> Option<O>,
+{
+    type Item = O;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        let (op_spends, txid, item) = loop {
+            // we have exhausted all paths when stack is empty
+            let (op_spends, txid) = self.stack.pop()?;
+            // we do not want to visit the same transaction twice
+            if self.visited.insert(txid) {
+                // ignore paths when user filters them out
+                if let Some(item) = (self.filter_map)(op_spends, txid) {
+                    break (op_spends, txid, item);
+                }
+            }
+        };
+
+        self.populate_stack(op_spends + 1, txid);
+        return Some(item);
+    }
+}
+
+fn tx_outpoint_range(txid: Txid) -> RangeInclusive<OutPoint> {
+    OutPoint::new(txid, u32::MIN)..=OutPoint::new(txid, u32::MAX)
+}
diff --git a/crates/chain/tests/common/mod.rs b/crates/chain/tests/common/mod.rs
new file mode 100644 (file)
index 0000000..e9b7a10
--- /dev/null
@@ -0,0 +1,60 @@
+#[allow(unused_macros)]
+macro_rules! h {
+    ($index:literal) => {{
+        bitcoin::hashes::Hash::hash($index.as_bytes())
+    }};
+}
+
+#[allow(unused_macros)]
+macro_rules! chain {
+    ($([$($tt:tt)*]),*) => { chain!( checkpoints: [$([$($tt)*]),*] ) };
+    (checkpoints: $($tail:tt)*) => { chain!( index: TxHeight, checkpoints: $($tail)*) };
+    (index: $ind:ty, checkpoints: [ $([$height:expr, $block_hash:expr]),* ] $(,txids: [$(($txid:expr, $tx_height:expr)),*])?) => {{
+        #[allow(unused_mut)]
+        let mut chain = bdk_chain::sparse_chain::SparseChain::<$ind>::from_checkpoints([$(($height, $block_hash).into()),*]);
+
+        $(
+            $(
+                let _ = chain.insert_tx($txid, $tx_height).expect("should succeed");
+            )*
+        )?
+
+        chain
+    }};
+}
+
+#[allow(unused_macros)]
+macro_rules! changeset {
+    (checkpoints: $($tail:tt)*) => { changeset!(index: TxHeight, checkpoints: $($tail)*) };
+    (
+        index: $ind:ty,
+        checkpoints: [ $(( $height:expr, $cp_to:expr )),* ]
+        $(,txids: [ $(( $txid:expr, $tx_to:expr )),* ])?
+    ) => {{
+        use bdk_chain::collections::BTreeMap;
+
+        #[allow(unused_mut)]
+        bdk_chain::sparse_chain::ChangeSet::<$ind> {
+            checkpoints: {
+                let mut changes = BTreeMap::default();
+                $(changes.insert($height, $cp_to);)*
+                changes
+            },
+            txids: {
+                let mut changes = BTreeMap::default();
+                $($(changes.insert($txid, $tx_to.map(|h: TxHeight| h.into()));)*)?
+                changes
+            }
+        }
+    }};
+}
+
+#[allow(unused)]
+pub fn new_tx(lt: u32) -> bitcoin::Transaction {
+    bitcoin::Transaction {
+        version: 0x00,
+        lock_time: bitcoin::PackedLockTime(lt),
+        input: vec![],
+        output: vec![],
+    }
+}
diff --git a/crates/chain/tests/test_chain_graph.rs b/crates/chain/tests/test_chain_graph.rs
new file mode 100644 (file)
index 0000000..e071dac
--- /dev/null
@@ -0,0 +1,653 @@
+#[macro_use]
+mod common;
+
+use bdk_chain::{
+    chain_graph::*,
+    collections::HashSet,
+    sparse_chain,
+    tx_graph::{self, TxGraph},
+    BlockId, TxHeight,
+};
+use bitcoin::{OutPoint, PackedLockTime, Script, Sequence, Transaction, TxIn, TxOut, Witness};
+
+#[test]
+fn test_spent_by() {
+    let tx1 = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut::default()],
+    };
+
+    let op = OutPoint {
+        txid: tx1.txid(),
+        vout: 0,
+    };
+
+    let tx2 = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn {
+            previous_output: op,
+            ..Default::default()
+        }],
+        output: vec![],
+    };
+    let tx3 = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(42),
+        input: vec![TxIn {
+            previous_output: op,
+            ..Default::default()
+        }],
+        output: vec![],
+    };
+
+    let mut cg1 = ChainGraph::default();
+    let _ = cg1
+        .insert_tx(tx1, TxHeight::Unconfirmed)
+        .expect("should insert");
+    let mut cg2 = cg1.clone();
+    let _ = cg1
+        .insert_tx(tx2.clone(), TxHeight::Unconfirmed)
+        .expect("should insert");
+    let _ = cg2
+        .insert_tx(tx3.clone(), TxHeight::Unconfirmed)
+        .expect("should insert");
+
+    assert_eq!(cg1.spent_by(op), Some((&TxHeight::Unconfirmed, tx2.txid())));
+    assert_eq!(cg2.spent_by(op), Some((&TxHeight::Unconfirmed, tx3.txid())));
+}
+
+#[test]
+fn update_evicts_conflicting_tx() {
+    let cp_a = BlockId {
+        height: 0,
+        hash: h!("A"),
+    };
+    let cp_b = BlockId {
+        height: 1,
+        hash: h!("B"),
+    };
+    let cp_b2 = BlockId {
+        height: 1,
+        hash: h!("B'"),
+    };
+
+    let tx_a = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut::default()],
+    };
+
+    let tx_b = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_a.txid(), 0),
+            script_sig: Script::new(),
+            sequence: Sequence::default(),
+            witness: Witness::new(),
+        }],
+        output: vec![TxOut::default()],
+    };
+
+    let tx_b2 = Transaction {
+        version: 0x02,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_a.txid(), 0),
+            script_sig: Script::new(),
+            sequence: Sequence::default(),
+            witness: Witness::new(),
+        }],
+        output: vec![TxOut::default(), TxOut::default()],
+    };
+    {
+        let mut cg1 = {
+            let mut cg = ChainGraph::default();
+            let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
+            let _ = cg
+                .insert_tx(tx_a.clone(), TxHeight::Confirmed(0))
+                .expect("should insert tx");
+            let _ = cg
+                .insert_tx(tx_b.clone(), TxHeight::Unconfirmed)
+                .expect("should insert tx");
+            cg
+        };
+        let cg2 = {
+            let mut cg = ChainGraph::default();
+            let _ = cg
+                .insert_tx(tx_b2.clone(), TxHeight::Unconfirmed)
+                .expect("should insert tx");
+            cg
+        };
+
+        let changeset = ChangeSet::<TxHeight, Transaction> {
+            chain: sparse_chain::ChangeSet {
+                checkpoints: Default::default(),
+                txids: [
+                    (tx_b.txid(), None),
+                    (tx_b2.txid(), Some(TxHeight::Unconfirmed)),
+                ]
+                .into(),
+            },
+            graph: tx_graph::Additions {
+                tx: [tx_b2.clone()].into(),
+                txout: [].into(),
+            },
+        };
+        assert_eq!(
+            cg1.determine_changeset(&cg2),
+            Ok(changeset.clone()),
+            "tx should be evicted from mempool"
+        );
+
+        cg1.apply_changeset(changeset);
+    }
+
+    {
+        let cg1 = {
+            let mut cg = ChainGraph::default();
+            let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
+            let _ = cg.insert_checkpoint(cp_b).expect("should insert cp");
+            let _ = cg
+                .insert_tx(tx_a.clone(), TxHeight::Confirmed(0))
+                .expect("should insert tx");
+            let _ = cg
+                .insert_tx(tx_b.clone(), TxHeight::Confirmed(1))
+                .expect("should insert tx");
+            cg
+        };
+        let cg2 = {
+            let mut cg = ChainGraph::default();
+            let _ = cg
+                .insert_tx(tx_b2.clone(), TxHeight::Unconfirmed)
+                .expect("should insert tx");
+            cg
+        };
+        assert_eq!(
+            cg1.determine_changeset(&cg2),
+            Err(UpdateError::UnresolvableConflict(UnresolvableConflict {
+                already_confirmed_tx: (TxHeight::Confirmed(1), tx_b.txid()),
+                update_tx: (TxHeight::Unconfirmed, tx_b2.txid()),
+            })),
+            "fail if tx is evicted from valid block"
+        );
+    }
+
+    {
+        // Given 2 blocks `{A, B}`, and an update that invalidates block B with
+        // `{A, B'}`, we expect txs that exist in `B` that conflicts with txs
+        // introduced in the update to be successfully evicted.
+        let mut cg1 = {
+            let mut cg = ChainGraph::default();
+            let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
+            let _ = cg.insert_checkpoint(cp_b).expect("should insert cp");
+            let _ = cg
+                .insert_tx(tx_a.clone(), TxHeight::Confirmed(0))
+                .expect("should insert tx");
+            let _ = cg
+                .insert_tx(tx_b.clone(), TxHeight::Confirmed(1))
+                .expect("should insert tx");
+            cg
+        };
+        let cg2 = {
+            let mut cg = ChainGraph::default();
+            let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
+            let _ = cg.insert_checkpoint(cp_b2).expect("should insert cp");
+            let _ = cg
+                .insert_tx(tx_b2.clone(), TxHeight::Unconfirmed)
+                .expect("should insert tx");
+            cg
+        };
+
+        let changeset = ChangeSet::<TxHeight, Transaction> {
+            chain: sparse_chain::ChangeSet {
+                checkpoints: [(1, Some(h!("B'")))].into(),
+                txids: [
+                    (tx_b.txid(), None),
+                    (tx_b2.txid(), Some(TxHeight::Unconfirmed)),
+                ]
+                .into(),
+            },
+            graph: tx_graph::Additions {
+                tx: [tx_b2.clone()].into(),
+                txout: [].into(),
+            },
+        };
+        assert_eq!(
+            cg1.determine_changeset(&cg2),
+            Ok(changeset.clone()),
+            "tx should be evicted from B",
+        );
+
+        cg1.apply_changeset(changeset);
+    }
+}
+
+#[test]
+fn chain_graph_new_missing() {
+    let tx_a = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut::default()],
+    };
+    let tx_b = Transaction {
+        version: 0x02,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut::default()],
+    };
+
+    let update = chain!(
+        index: TxHeight,
+        checkpoints: [[0, h!("A")]],
+        txids: [
+            (tx_a.txid(), TxHeight::Confirmed(0)),
+            (tx_b.txid(), TxHeight::Confirmed(0))
+        ]
+    );
+    let mut graph = TxGraph::default();
+
+    let mut expected_missing = HashSet::new();
+    expected_missing.insert(tx_a.txid());
+    expected_missing.insert(tx_b.txid());
+
+    assert_eq!(
+        ChainGraph::new(update.clone(), graph.clone()),
+        Err(NewError::Missing(expected_missing.clone()))
+    );
+
+    let _ = graph.insert_tx(tx_b.clone());
+    expected_missing.remove(&tx_b.txid());
+
+    assert_eq!(
+        ChainGraph::new(update.clone(), graph.clone()),
+        Err(NewError::Missing(expected_missing.clone()))
+    );
+
+    let _ = graph.insert_txout(
+        OutPoint {
+            txid: tx_a.txid(),
+            vout: 0,
+        },
+        tx_a.output[0].clone(),
+    );
+
+    assert_eq!(
+        ChainGraph::new(update.clone(), graph.clone()),
+        Err(NewError::Missing(expected_missing)),
+        "inserting an output instead of full tx doesn't satisfy constraint"
+    );
+
+    let _ = graph.insert_tx(tx_a.clone());
+
+    let new_graph = ChainGraph::new(update.clone(), graph.clone()).unwrap();
+    let expected_graph = {
+        let mut cg = ChainGraph::<TxHeight, Transaction>::default();
+        let _ = cg
+            .insert_checkpoint(update.latest_checkpoint().unwrap())
+            .unwrap();
+        let _ = cg.insert_tx(tx_a, TxHeight::Confirmed(0)).unwrap();
+        let _ = cg.insert_tx(tx_b, TxHeight::Confirmed(0)).unwrap();
+        cg
+    };
+
+    assert_eq!(new_graph, expected_graph);
+}
+
+#[test]
+fn chain_graph_new_conflicts() {
+    let tx_a = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut::default()],
+    };
+
+    let tx_b = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_a.txid(), 0),
+            script_sig: Script::new(),
+            sequence: Sequence::default(),
+            witness: Witness::new(),
+        }],
+        output: vec![TxOut::default()],
+    };
+
+    let tx_b2 = Transaction {
+        version: 0x02,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_a.txid(), 0),
+            script_sig: Script::new(),
+            sequence: Sequence::default(),
+            witness: Witness::new(),
+        }],
+        output: vec![TxOut::default(), TxOut::default()],
+    };
+
+    let chain = chain!(
+        index: TxHeight,
+        checkpoints: [[5, h!("A")]],
+        txids: [
+            (tx_a.txid(), TxHeight::Confirmed(1)),
+            (tx_b.txid(), TxHeight::Confirmed(2)),
+            (tx_b2.txid(), TxHeight::Confirmed(3))
+        ]
+    );
+
+    let graph = TxGraph::new([tx_a, tx_b, tx_b2]);
+
+    assert!(matches!(
+        ChainGraph::new(chain, graph),
+        Err(NewError::Conflict { .. })
+    ));
+}
+
+#[test]
+fn test_get_tx_in_chain() {
+    let mut cg = ChainGraph::default();
+    let tx = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut::default()],
+    };
+
+    let _ = cg.insert_tx(tx.clone(), TxHeight::Unconfirmed).unwrap();
+    assert_eq!(
+        cg.get_tx_in_chain(tx.txid()),
+        Some((&TxHeight::Unconfirmed, &tx))
+    );
+}
+
+#[test]
+fn test_iterate_transactions() {
+    let mut cg = ChainGraph::default();
+    let txs = (0..3)
+        .map(|i| Transaction {
+            version: i,
+            lock_time: PackedLockTime(0),
+            input: vec![],
+            output: vec![TxOut::default()],
+        })
+        .collect::<Vec<_>>();
+    let _ = cg
+        .insert_checkpoint(BlockId {
+            height: 1,
+            hash: h!("A"),
+        })
+        .unwrap();
+    let _ = cg
+        .insert_tx(txs[0].clone(), TxHeight::Confirmed(1))
+        .unwrap();
+    let _ = cg.insert_tx(txs[1].clone(), TxHeight::Unconfirmed).unwrap();
+    let _ = cg
+        .insert_tx(txs[2].clone(), TxHeight::Confirmed(0))
+        .unwrap();
+
+    assert_eq!(
+        cg.transactions_in_chain().collect::<Vec<_>>(),
+        vec![
+            (&TxHeight::Confirmed(0), &txs[2]),
+            (&TxHeight::Confirmed(1), &txs[0]),
+            (&TxHeight::Unconfirmed, &txs[1]),
+        ]
+    );
+}
+
+/// Start with: block1, block2a, tx1, tx2a
+///   Update 1: block2a -> block2b , tx2a -> tx2b
+///   Update 2: block2b -> block2c , tx2b -> tx2a
+#[test]
+fn test_apply_changes_reintroduce_tx() {
+    let block1 = BlockId {
+        height: 1,
+        hash: h!("block 1"),
+    };
+    let block2a = BlockId {
+        height: 2,
+        hash: h!("block 2a"),
+    };
+    let block2b = BlockId {
+        height: 2,
+        hash: h!("block 2b"),
+    };
+    let block2c = BlockId {
+        height: 2,
+        hash: h!("block 2c"),
+    };
+
+    let tx1 = Transaction {
+        version: 0,
+        lock_time: PackedLockTime(1),
+        input: Vec::new(),
+        output: [TxOut {
+            value: 1,
+            script_pubkey: Script::new(),
+        }]
+        .into(),
+    };
+
+    let tx2a = Transaction {
+        version: 0,
+        lock_time: PackedLockTime('a'.into()),
+        input: [TxIn {
+            previous_output: OutPoint::new(tx1.txid(), 0),
+            ..Default::default()
+        }]
+        .into(),
+        output: [TxOut {
+            value: 0,
+            ..Default::default()
+        }]
+        .into(),
+    };
+
+    let tx2b = Transaction {
+        lock_time: PackedLockTime('b'.into()),
+        ..tx2a.clone()
+    };
+
+    // block1, block2a, tx1, tx2a
+    let mut cg = {
+        let mut cg = ChainGraph::default();
+        let _ = cg.insert_checkpoint(block1).unwrap();
+        let _ = cg.insert_checkpoint(block2a).unwrap();
+        let _ = cg.insert_tx(tx1.clone(), TxHeight::Confirmed(1)).unwrap();
+        let _ = cg.insert_tx(tx2a.clone(), TxHeight::Confirmed(2)).unwrap();
+        cg
+    };
+
+    // block2a -> block2b , tx2a -> tx2b
+    let update = {
+        let mut update = ChainGraph::default();
+        let _ = update.insert_checkpoint(block1).unwrap();
+        let _ = update.insert_checkpoint(block2b).unwrap();
+        let _ = update
+            .insert_tx(tx2b.clone(), TxHeight::Confirmed(2))
+            .unwrap();
+        update
+    };
+    assert_eq!(
+        cg.apply_update(update).expect("should update"),
+        ChangeSet {
+            chain: changeset! {
+                checkpoints: [(2, Some(block2b.hash))],
+                txids: [(tx2a.txid(), None), (tx2b.txid(), Some(TxHeight::Confirmed(2)))]
+            },
+            graph: tx_graph::Additions {
+                tx: [tx2b.clone()].into(),
+                ..Default::default()
+            },
+        }
+    );
+
+    // block2b -> block2c , tx2b -> tx2a
+    let update = {
+        let mut update = ChainGraph::default();
+        let _ = update.insert_checkpoint(block1).unwrap();
+        let _ = update.insert_checkpoint(block2c).unwrap();
+        let _ = update
+            .insert_tx(tx2a.clone(), TxHeight::Confirmed(2))
+            .unwrap();
+        update
+    };
+    assert_eq!(
+        cg.apply_update(update).expect("should update"),
+        ChangeSet {
+            chain: changeset! {
+                checkpoints: [(2, Some(block2c.hash))],
+                txids: [(tx2b.txid(), None), (tx2a.txid(), Some(TxHeight::Confirmed(2)))]
+            },
+            ..Default::default()
+        }
+    );
+}
+
+#[test]
+fn test_evict_descendants() {
+    let block_1 = BlockId {
+        height: 1,
+        hash: h!("block 1"),
+    };
+
+    let block_2a = BlockId {
+        height: 2,
+        hash: h!("block 2 a"),
+    };
+
+    let block_2b = BlockId {
+        height: 2,
+        hash: h!("block 2 b"),
+    };
+
+    let tx_1 = Transaction {
+        input: vec![TxIn {
+            previous_output: OutPoint::new(h!("fake tx"), 0),
+            ..Default::default()
+        }],
+        output: vec![TxOut {
+            value: 10_000,
+            script_pubkey: Script::new(),
+        }],
+        ..common::new_tx(1)
+    };
+    let tx_2 = Transaction {
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_1.txid(), 0),
+            ..Default::default()
+        }],
+        output: vec![
+            TxOut {
+                value: 20_000,
+                script_pubkey: Script::new(),
+            },
+            TxOut {
+                value: 30_000,
+                script_pubkey: Script::new(),
+            },
+        ],
+        ..common::new_tx(2)
+    };
+    let tx_3 = Transaction {
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_2.txid(), 0),
+            ..Default::default()
+        }],
+        output: vec![TxOut {
+            value: 40_000,
+            script_pubkey: Script::new(),
+        }],
+        ..common::new_tx(3)
+    };
+    let tx_4 = Transaction {
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_2.txid(), 1),
+            ..Default::default()
+        }],
+        output: vec![TxOut {
+            value: 40_000,
+            script_pubkey: Script::new(),
+        }],
+        ..common::new_tx(4)
+    };
+    let tx_5 = Transaction {
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_4.txid(), 0),
+            ..Default::default()
+        }],
+        output: vec![TxOut {
+            value: 40_000,
+            script_pubkey: Script::new(),
+        }],
+        ..common::new_tx(5)
+    };
+
+    let tx_conflict = Transaction {
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_1.txid(), 0),
+            ..Default::default()
+        }],
+        output: vec![TxOut {
+            value: 12345,
+            script_pubkey: Script::new(),
+        }],
+        ..common::new_tx(6)
+    };
+
+    // 1 is spent by 2, 2 is spent by 3 and 4, 4 is spent by 5
+    let _txid_1 = tx_1.txid();
+    let txid_2 = tx_2.txid();
+    let txid_3 = tx_3.txid();
+    let txid_4 = tx_4.txid();
+    let txid_5 = tx_5.txid();
+
+    // this tx conflicts with 2
+    let txid_conflict = tx_conflict.txid();
+
+    let cg = {
+        let mut cg = ChainGraph::<TxHeight>::default();
+        let _ = cg.insert_checkpoint(block_1);
+        let _ = cg.insert_checkpoint(block_2a);
+        let _ = cg.insert_tx(tx_1, TxHeight::Confirmed(1));
+        let _ = cg.insert_tx(tx_2, TxHeight::Confirmed(2));
+        let _ = cg.insert_tx(tx_3, TxHeight::Confirmed(2));
+        let _ = cg.insert_tx(tx_4, TxHeight::Confirmed(2));
+        let _ = cg.insert_tx(tx_5, TxHeight::Confirmed(2));
+        cg
+    };
+
+    let update = {
+        let mut cg = ChainGraph::<TxHeight>::default();
+        let _ = cg.insert_checkpoint(block_1);
+        let _ = cg.insert_checkpoint(block_2b);
+        let _ = cg.insert_tx(tx_conflict.clone(), TxHeight::Confirmed(2));
+        cg
+    };
+
+    assert_eq!(
+        cg.determine_changeset(&update),
+        Ok(ChangeSet {
+            chain: changeset! {
+                checkpoints: [(2, Some(block_2b.hash))],
+                txids: [(txid_2, None), (txid_3, None), (txid_4, None), (txid_5, None), (txid_conflict, Some(TxHeight::Confirmed(2)))]
+            },
+            graph: tx_graph::Additions {
+                tx: [tx_conflict.clone()].into(),
+                ..Default::default()
+            }
+        })
+    );
+
+    let err = cg
+        .insert_tx_preview(tx_conflict.clone(), TxHeight::Unconfirmed)
+        .expect_err("must fail due to conflicts");
+    assert!(matches!(err, InsertTxError::UnresolvableConflict(_)));
+}
diff --git a/crates/chain/tests/test_keychain_tracker.rs b/crates/chain/tests/test_keychain_tracker.rs
new file mode 100644 (file)
index 0000000..67eaeb4
--- /dev/null
@@ -0,0 +1,243 @@
+#![cfg(feature = "miniscript")]
+#[macro_use]
+mod common;
+use bdk_chain::{
+    keychain::{Balance, KeychainTracker},
+    miniscript::{
+        bitcoin::{secp256k1::Secp256k1, OutPoint, PackedLockTime, Transaction, TxOut},
+        Descriptor,
+    },
+    BlockId, ConfirmationTime, TxHeight,
+};
+use bitcoin::TxIn;
+
+#[test]
+fn test_insert_tx() {
+    let mut tracker = KeychainTracker::default();
+    let secp = Secp256k1::new();
+    let (descriptor, _) = Descriptor::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
+    tracker.add_keychain((), descriptor.clone());
+    let txout = TxOut {
+        value: 100_000,
+        script_pubkey: descriptor.at_derivation_index(5).script_pubkey(),
+    };
+
+    let tx = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![txout],
+    };
+
+    let _ = tracker.txout_index.reveal_to_target(&(), 5);
+
+    let changeset = tracker
+        .insert_tx_preview(tx.clone(), ConfirmationTime::Unconfirmed)
+        .unwrap();
+    tracker.apply_changeset(changeset);
+    assert_eq!(
+        tracker
+            .chain_graph()
+            .transactions_in_chain()
+            .collect::<Vec<_>>(),
+        vec![(&ConfirmationTime::Unconfirmed, &tx)]
+    );
+
+    assert_eq!(
+        tracker
+            .txout_index
+            .txouts_of_keychain(&())
+            .collect::<Vec<_>>(),
+        vec![(
+            5,
+            OutPoint {
+                txid: tx.txid(),
+                vout: 0
+            }
+        )]
+    );
+}
+
+#[test]
+fn test_balance() {
+    use core::str::FromStr;
+    #[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd)]
+    enum Keychain {
+        One,
+        Two,
+    }
+    let mut tracker = KeychainTracker::<Keychain, TxHeight>::default();
+    let one = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/0/*)#rg247h69").unwrap();
+    let two = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/1/*)#ju05rz2a").unwrap();
+    tracker.add_keychain(Keychain::One, one.clone());
+    tracker.add_keychain(Keychain::Two, two.clone());
+
+    let tx1 = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut {
+            value: 13_000,
+            script_pubkey: tracker
+                .txout_index
+                .reveal_next_spk(&Keychain::One)
+                .0
+                 .1
+                .clone(),
+        }],
+    };
+
+    let tx2 = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut {
+            value: 7_000,
+            script_pubkey: tracker
+                .txout_index
+                .reveal_next_spk(&Keychain::Two)
+                .0
+                 .1
+                .clone(),
+        }],
+    };
+
+    let tx_coinbase = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn::default()],
+        output: vec![TxOut {
+            value: 11_000,
+            script_pubkey: tracker
+                .txout_index
+                .reveal_next_spk(&Keychain::Two)
+                .0
+                 .1
+                .clone(),
+        }],
+    };
+
+    assert!(tx_coinbase.is_coin_base());
+
+    let _ = tracker
+        .insert_checkpoint(BlockId {
+            height: 5,
+            hash: h!("1"),
+        })
+        .unwrap();
+
+    let should_trust = |keychain: &Keychain| match keychain {
+        &Keychain::One => false,
+        &Keychain::Two => true,
+    };
+
+    assert_eq!(tracker.balance(should_trust), Balance::default());
+
+    let _ = tracker
+        .insert_tx(tx1.clone(), TxHeight::Unconfirmed)
+        .unwrap();
+
+    assert_eq!(
+        tracker.balance(should_trust),
+        Balance {
+            untrusted_pending: 13_000,
+            ..Default::default()
+        }
+    );
+
+    let _ = tracker
+        .insert_tx(tx2.clone(), TxHeight::Unconfirmed)
+        .unwrap();
+
+    assert_eq!(
+        tracker.balance(should_trust),
+        Balance {
+            trusted_pending: 7_000,
+            untrusted_pending: 13_000,
+            ..Default::default()
+        }
+    );
+
+    let _ = tracker
+        .insert_tx(tx_coinbase, TxHeight::Confirmed(0))
+        .unwrap();
+
+    assert_eq!(
+        tracker.balance(should_trust),
+        Balance {
+            trusted_pending: 7_000,
+            untrusted_pending: 13_000,
+            immature: 11_000,
+            ..Default::default()
+        }
+    );
+
+    let _ = tracker
+        .insert_tx(tx1.clone(), TxHeight::Confirmed(1))
+        .unwrap();
+
+    assert_eq!(
+        tracker.balance(should_trust),
+        Balance {
+            trusted_pending: 7_000,
+            untrusted_pending: 0,
+            immature: 11_000,
+            confirmed: 13_000,
+        }
+    );
+
+    let _ = tracker
+        .insert_tx(tx2.clone(), TxHeight::Confirmed(2))
+        .unwrap();
+
+    assert_eq!(
+        tracker.balance(should_trust),
+        Balance {
+            trusted_pending: 0,
+            untrusted_pending: 0,
+            immature: 11_000,
+            confirmed: 20_000,
+        }
+    );
+
+    let _ = tracker
+        .insert_checkpoint(BlockId {
+            height: 98,
+            hash: h!("98"),
+        })
+        .unwrap();
+
+    assert_eq!(
+        tracker.balance(should_trust),
+        Balance {
+            trusted_pending: 0,
+            untrusted_pending: 0,
+            immature: 11_000,
+            confirmed: 20_000,
+        }
+    );
+
+    let _ = tracker
+        .insert_checkpoint(BlockId {
+            height: 99,
+            hash: h!("99"),
+        })
+        .unwrap();
+
+    assert_eq!(
+        tracker.balance(should_trust),
+        Balance {
+            trusted_pending: 0,
+            untrusted_pending: 0,
+            immature: 0,
+            confirmed: 31_000,
+        }
+    );
+
+    assert_eq!(tracker.balance_at(0), 0);
+    assert_eq!(tracker.balance_at(1), 13_000);
+    assert_eq!(tracker.balance_at(2), 20_000);
+    assert_eq!(tracker.balance_at(98), 20_000);
+    assert_eq!(tracker.balance_at(99), 31_000);
+    assert_eq!(tracker.balance_at(100), 31_000);
+}
diff --git a/crates/chain/tests/test_keychain_txout_index.rs b/crates/chain/tests/test_keychain_txout_index.rs
new file mode 100644 (file)
index 0000000..48be994
--- /dev/null
@@ -0,0 +1,328 @@
+#![cfg(feature = "miniscript")]
+
+#[macro_use]
+mod common;
+use bdk_chain::{
+    collections::BTreeMap,
+    keychain::{DerivationAdditions, KeychainTxOutIndex},
+};
+
+use bitcoin::{secp256k1::Secp256k1, Script, Transaction, TxOut};
+use miniscript::{Descriptor, DescriptorPublicKey};
+
+#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
+enum TestKeychain {
+    External,
+    Internal,
+}
+
+fn init_txout_index() -> (
+    bdk_chain::keychain::KeychainTxOutIndex<TestKeychain>,
+    Descriptor<DescriptorPublicKey>,
+    Descriptor<DescriptorPublicKey>,
+) {
+    let mut txout_index = bdk_chain::keychain::KeychainTxOutIndex::<TestKeychain>::default();
+
+    let secp = bdk_chain::bitcoin::secp256k1::Secp256k1::signing_only();
+    let (external_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/0/*)").unwrap();
+    let (internal_descriptor,_) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "tr([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/*)").unwrap();
+
+    txout_index.add_keychain(TestKeychain::External, external_descriptor.clone());
+    txout_index.add_keychain(TestKeychain::Internal, internal_descriptor.clone());
+
+    (txout_index, external_descriptor, internal_descriptor)
+}
+
+fn spk_at_index(descriptor: &Descriptor<DescriptorPublicKey>, index: u32) -> Script {
+    descriptor
+        .derived_descriptor(&Secp256k1::verification_only(), index)
+        .expect("must derive")
+        .script_pubkey()
+}
+
+#[test]
+fn test_set_all_derivation_indices() {
+    let (mut txout_index, _, _) = init_txout_index();
+    let derive_to: BTreeMap<_, _> =
+        [(TestKeychain::External, 12), (TestKeychain::Internal, 24)].into();
+    assert_eq!(
+        txout_index.reveal_to_target_multi(&derive_to).1.as_inner(),
+        &derive_to
+    );
+    assert_eq!(txout_index.last_revealed_indices(), &derive_to);
+    assert_eq!(
+        txout_index.reveal_to_target_multi(&derive_to).1,
+        DerivationAdditions::default(),
+        "no changes if we set to the same thing"
+    );
+}
+
+#[test]
+fn test_lookahead() {
+    let (mut txout_index, external_desc, internal_desc) = init_txout_index();
+
+    // ensure it does not break anything if lookahead is set multiple times
+    (0..=10).for_each(|lookahead| txout_index.set_lookahead(&TestKeychain::External, lookahead));
+    (0..=20)
+        .filter(|v| v % 2 == 0)
+        .for_each(|lookahead| txout_index.set_lookahead(&TestKeychain::Internal, lookahead));
+
+    assert_eq!(txout_index.inner().all_spks().len(), 30);
+
+    // given:
+    // - external lookahead set to 10
+    // - internal lookahead set to 20
+    // when:
+    // - set external derivation index to value higher than last, but within the lookahead value
+    // expect:
+    // - scripts cached in spk_txout_index should increase correctly
+    // - stored scripts of external keychain should be of expected counts
+    for index in (0..20).skip_while(|i| i % 2 == 1) {
+        let (revealed_spks, revealed_additions) =
+            txout_index.reveal_to_target(&TestKeychain::External, index);
+        assert_eq!(
+            revealed_spks.collect::<Vec<_>>(),
+            vec![(index, spk_at_index(&external_desc, index))],
+        );
+        assert_eq!(
+            revealed_additions.as_inner(),
+            &[(TestKeychain::External, index)].into()
+        );
+
+        assert_eq!(
+            txout_index.inner().all_spks().len(),
+            10 /* external lookahead */ +
+            20 /* internal lookahead */ +
+            index as usize + 1 /* `derived` count */
+        );
+        assert_eq!(
+            txout_index
+                .revealed_spks_of_keychain(&TestKeychain::External)
+                .count(),
+            index as usize + 1,
+        );
+        assert_eq!(
+            txout_index
+                .revealed_spks_of_keychain(&TestKeychain::Internal)
+                .count(),
+            0,
+        );
+        assert_eq!(
+            txout_index
+                .unused_spks_of_keychain(&TestKeychain::External)
+                .count(),
+            index as usize + 1,
+        );
+        assert_eq!(
+            txout_index
+                .unused_spks_of_keychain(&TestKeychain::Internal)
+                .count(),
+            0,
+        );
+    }
+
+    // given:
+    // - internal lookahead is 20
+    // - internal derivation index is `None`
+    // when:
+    // - derivation index is set ahead of current derivation index + lookahead
+    // expect:
+    // - scripts cached in spk_txout_index should increase correctly, a.k.a. no scripts are skipped
+    let (revealed_spks, revealed_additions) =
+        txout_index.reveal_to_target(&TestKeychain::Internal, 24);
+    assert_eq!(
+        revealed_spks.collect::<Vec<_>>(),
+        (0..=24)
+            .map(|index| (index, spk_at_index(&internal_desc, index)))
+            .collect::<Vec<_>>(),
+    );
+    assert_eq!(
+        revealed_additions.as_inner(),
+        &[(TestKeychain::Internal, 24)].into()
+    );
+    assert_eq!(
+        txout_index.inner().all_spks().len(),
+        10 /* external lookahead */ +
+        20 /* internal lookahead */ +
+        20 /* external stored index count */ +
+        25 /* internal stored index count */
+    );
+    assert_eq!(
+        txout_index
+            .revealed_spks_of_keychain(&TestKeychain::Internal)
+            .count(),
+        25,
+    );
+
+    // ensure derivation indices are expected for each keychain
+    let last_external_index = txout_index
+        .last_revealed_index(&TestKeychain::External)
+        .expect("already derived");
+    let last_internal_index = txout_index
+        .last_revealed_index(&TestKeychain::Internal)
+        .expect("already derived");
+    assert_eq!(last_external_index, 19);
+    assert_eq!(last_internal_index, 24);
+
+    // when:
+    // - scanning txouts with spks within stored indexes
+    // expect:
+    // - no changes to stored index counts
+    let external_iter = 0..=last_external_index;
+    let internal_iter = last_internal_index - last_external_index..=last_internal_index;
+    for (external_index, internal_index) in external_iter.zip(internal_iter) {
+        let tx = Transaction {
+            output: vec![
+                TxOut {
+                    script_pubkey: external_desc
+                        .at_derivation_index(external_index)
+                        .script_pubkey(),
+                    value: 10_000,
+                },
+                TxOut {
+                    script_pubkey: internal_desc
+                        .at_derivation_index(internal_index)
+                        .script_pubkey(),
+                    value: 10_000,
+                },
+            ],
+            ..common::new_tx(external_index)
+        };
+        assert_eq!(txout_index.scan(&tx), DerivationAdditions::default());
+        assert_eq!(
+            txout_index.last_revealed_index(&TestKeychain::External),
+            Some(last_external_index)
+        );
+        assert_eq!(
+            txout_index.last_revealed_index(&TestKeychain::Internal),
+            Some(last_internal_index)
+        );
+        assert_eq!(
+            txout_index
+                .revealed_spks_of_keychain(&TestKeychain::External)
+                .count(),
+            last_external_index as usize + 1,
+        );
+        assert_eq!(
+            txout_index
+                .revealed_spks_of_keychain(&TestKeychain::Internal)
+                .count(),
+            last_internal_index as usize + 1,
+        );
+    }
+
+    // when:
+    // - scanning txouts with spks above last stored index
+    // expect:
+    // - cached scripts count should increase as expected
+    // - last stored index should increase as expected
+    // TODO!
+}
+
+#[test]
+fn test_wildcard_derivations() {
+    let (mut txout_index, external_desc, _) = init_txout_index();
+    let external_spk_0 = external_desc.at_derivation_index(0).script_pubkey();
+    let external_spk_16 = external_desc.at_derivation_index(16).script_pubkey();
+    let external_spk_26 = external_desc.at_derivation_index(26).script_pubkey();
+    let external_spk_27 = external_desc.at_derivation_index(27).script_pubkey();
+
+    // - nothing is derived
+    // - unused list is also empty
+    //
+    // - next_derivation_index() == (0, true)
+    // - derive_new() == ((0, <spk>), DerivationAdditions)
+    // - next_unused() == ((0, <spk>), DerivationAdditions:is_empty())
+    assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true));
+    let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
+    assert_eq!(spk, (0_u32, &external_spk_0));
+    assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into());
+    let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
+    assert_eq!(spk, (0_u32, &external_spk_0));
+    assert_eq!(changeset.as_inner(), &[].into());
+
+    // - derived till 25
+    // - used all spks till 15.
+    // - used list : [0..=15, 17, 20, 23]
+    // - unused list: [16, 18, 19, 21, 22, 24, 25]
+
+    // - next_derivation_index() = (26, true)
+    // - derive_new() = ((26, <spk>), DerivationAdditions)
+    // - next_unused() == ((16, <spk>), DerivationAdditions::is_empty())
+    let _ = txout_index.reveal_to_target(&TestKeychain::External, 25);
+
+    (0..=15)
+        .into_iter()
+        .chain([17, 20, 23].into_iter())
+        .for_each(|index| assert!(txout_index.mark_used(&TestKeychain::External, index)));
+
+    assert_eq!(txout_index.next_index(&TestKeychain::External), (26, true));
+
+    let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
+    assert_eq!(spk, (26, &external_spk_26));
+
+    assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 26)].into());
+
+    let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
+    assert_eq!(spk, (16, &external_spk_16));
+    assert_eq!(changeset.as_inner(), &[].into());
+
+    // - Use all the derived till 26.
+    // - next_unused() = ((27, <spk>), DerivationAdditions)
+    (0..=26).into_iter().for_each(|index| {
+        txout_index.mark_used(&TestKeychain::External, index);
+    });
+
+    let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
+    assert_eq!(spk, (27, &external_spk_27));
+    assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 27)].into());
+}
+
+#[test]
+fn test_non_wildcard_derivations() {
+    let mut txout_index = KeychainTxOutIndex::<TestKeychain>::default();
+
+    let secp = bitcoin::secp256k1::Secp256k1::signing_only();
+    let (no_wildcard_descriptor, _) = Descriptor::<DescriptorPublicKey>::parse_descriptor(&secp, "wpkh([73c5da0a/86'/0'/0']xprv9xgqHN7yz9MwCkxsBPN5qetuNdQSUttZNKw1dcYTV4mkaAFiBVGQziHs3NRSWMkCzvgjEe3n9xV8oYywvM8at9yRqyaZVz6TYYhX98VjsUk/1/0)").unwrap();
+    let external_spk = no_wildcard_descriptor
+        .at_derivation_index(0)
+        .script_pubkey();
+
+    txout_index.add_keychain(TestKeychain::External, no_wildcard_descriptor);
+
+    // given:
+    // - `txout_index` with no stored scripts
+    // expect:
+    // - next derivation index should be new
+    // - when we derive a new script, script @ index 0
+    // - when we get the next unused script, script @ index 0
+    assert_eq!(txout_index.next_index(&TestKeychain::External), (0, true));
+    let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
+    assert_eq!(spk, (0, &external_spk));
+    assert_eq!(changeset.as_inner(), &[(TestKeychain::External, 0)].into());
+
+    let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
+    assert_eq!(spk, (0, &external_spk));
+    assert_eq!(changeset.as_inner(), &[].into());
+
+    // given:
+    // - the non-wildcard descriptor already has a stored and used script
+    // expect:
+    // - next derivation index should not be new
+    // - derive new and next unused should return the old script
+    // - store_up_to should not panic and return empty additions
+    assert_eq!(txout_index.next_index(&TestKeychain::External), (0, false));
+    txout_index.mark_used(&TestKeychain::External, 0);
+
+    let (spk, changeset) = txout_index.reveal_next_spk(&TestKeychain::External);
+    assert_eq!(spk, (0, &external_spk));
+    assert_eq!(changeset.as_inner(), &[].into());
+
+    let (spk, changeset) = txout_index.next_unused_spk(&TestKeychain::External);
+    assert_eq!(spk, (0, &external_spk));
+    assert_eq!(changeset.as_inner(), &[].into());
+    let (revealed_spks, revealed_additions) =
+        txout_index.reveal_to_target(&TestKeychain::External, 200);
+    assert_eq!(revealed_spks.count(), 0);
+    assert!(revealed_additions.is_empty());
+}
diff --git a/crates/chain/tests/test_sparse_chain.rs b/crates/chain/tests/test_sparse_chain.rs
new file mode 100644 (file)
index 0000000..76bb22c
--- /dev/null
@@ -0,0 +1,773 @@
+#[macro_use]
+mod common;
+
+use bdk_chain::{collections::BTreeSet, sparse_chain::*, BlockId, TxHeight};
+use bitcoin::{hashes::Hash, Txid};
+use core::ops::Bound;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
+pub struct TestIndex(TxHeight, u32);
+
+impl ChainPosition for TestIndex {
+    fn height(&self) -> TxHeight {
+        self.0
+    }
+
+    fn max_ord_of_height(height: TxHeight) -> Self {
+        Self(height, u32::MAX)
+    }
+
+    fn min_ord_of_height(height: TxHeight) -> Self {
+        Self(height, u32::MIN)
+    }
+}
+
+impl TestIndex {
+    pub fn new<H>(height: H, ext: u32) -> Self
+    where
+        H: Into<TxHeight>,
+    {
+        Self(height.into(), ext)
+    }
+}
+
+#[test]
+fn add_first_checkpoint() {
+    let chain = SparseChain::default();
+    assert_eq!(
+        chain.determine_changeset(&chain!([0, h!("A")])),
+        Ok(changeset! {
+            checkpoints: [(0, Some(h!("A")))],
+            txids: []
+        },),
+        "add first tip"
+    );
+}
+
+#[test]
+fn add_second_tip() {
+    let chain = chain!([0, h!("A")]);
+    assert_eq!(
+        chain.determine_changeset(&chain!([0, h!("A")], [1, h!("B")])),
+        Ok(changeset! {
+            checkpoints: [(1, Some(h!("B")))],
+            txids: []
+        },),
+        "extend tip by one"
+    );
+}
+
+#[test]
+fn two_disjoint_chains_cannot_merge() {
+    let chain1 = chain!([0, h!("A")]);
+    let chain2 = chain!([1, h!("B")]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Err(UpdateError::NotConnected(0))
+    );
+}
+
+#[test]
+fn duplicate_chains_should_merge() {
+    let chain1 = chain!([0, h!("A")]);
+    let chain2 = chain!([0, h!("A")]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(ChangeSet::default())
+    );
+}
+
+#[test]
+fn duplicate_chains_with_txs_should_merge() {
+    let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(ChangeSet::default())
+    );
+}
+
+#[test]
+fn duplicate_chains_with_different_txs_should_merge() {
+    let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx1"), TxHeight::Confirmed(0))]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [],
+            txids: [(h!("tx1"), Some(TxHeight::Confirmed(0)))]
+        })
+    );
+}
+
+#[test]
+fn invalidate_first_and_only_checkpoint_without_tx_changes() {
+    let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    let chain2 = chain!(checkpoints: [[0,h!("A'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [(0, Some(h!("A'")))],
+            txids: []
+        },)
+    );
+}
+
+#[test]
+fn invalidate_first_and_only_checkpoint_with_tx_move_forward() {
+    let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    let chain2 = chain!(checkpoints: [[0,h!("A'")],[1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [(0, Some(h!("A'"))), (1, Some(h!("B")))],
+            txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))]
+        },)
+    );
+}
+
+#[test]
+fn invalidate_first_and_only_checkpoint_with_tx_move_backward() {
+    let chain1 = chain!(checkpoints: [[1,h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]);
+    let chain2 = chain!(checkpoints: [[0,h!("A")],[1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [(0, Some(h!("A"))), (1, Some(h!("B'")))],
+            txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))]
+        },)
+    );
+}
+
+#[test]
+fn invalidate_a_checkpoint_and_try_and_move_tx_when_it_wasnt_within_invalidation() {
+    let chain1 = chain!(checkpoints: [[0, h!("A")], [1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    let chain2 = chain!(checkpoints: [[0, h!("A")], [1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Err(UpdateError::TxInconsistent {
+            txid: h!("tx0"),
+            original_pos: TxHeight::Confirmed(0).into(),
+            update_pos: TxHeight::Confirmed(1).into(),
+        })
+    );
+}
+
+/// This test doesn't make much sense. We're invalidating a block at height 1 and moving it to
+/// height 0. It should be impossible for it to be at height 1 at any point if it was at height 0
+/// all along.
+#[test]
+fn move_invalidated_tx_into_earlier_checkpoint() {
+    let chain1 = chain!(checkpoints: [[0, h!("A")], [1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]);
+    let chain2 = chain!(checkpoints: [[0, h!("A")], [1, h!("B'")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [(1, Some(h!("B'")))],
+            txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))]
+        },)
+    );
+}
+
+#[test]
+fn invalidate_first_and_only_checkpoint_with_tx_move_to_mempool() {
+    let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    let chain2 = chain!(checkpoints: [[0,h!("A'")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [(0, Some(h!("A'")))],
+            txids: [(h!("tx0"), Some(TxHeight::Unconfirmed))]
+        },)
+    );
+}
+
+#[test]
+fn confirm_tx_without_extending_chain() {
+    let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
+    let chain2 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [],
+            txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))]
+        },)
+    );
+}
+
+#[test]
+fn confirm_tx_backwards_while_extending_chain() {
+    let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
+    let chain2 = chain!(checkpoints: [[0,h!("A")],[1,h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(0))]);
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [(1, Some(h!("B")))],
+            txids: [(h!("tx0"), Some(TxHeight::Confirmed(0)))]
+        },)
+    );
+}
+
+#[test]
+fn confirm_tx_in_new_block() {
+    let chain1 = chain!(checkpoints: [[0,h!("A")]], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
+    let chain2 = chain! {
+        checkpoints: [[0,h!("A")], [1,h!("B")]],
+        txids: [(h!("tx0"), TxHeight::Confirmed(1))]
+    };
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [(1, Some(h!("B")))],
+            txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))]
+        },)
+    );
+}
+
+#[test]
+fn merging_mempool_of_empty_chains_doesnt_fail() {
+    let chain1 = chain!(checkpoints: [], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
+    let chain2 = chain!(checkpoints: [], txids: [(h!("tx1"), TxHeight::Unconfirmed)]);
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [],
+            txids: [(h!("tx1"), Some(TxHeight::Unconfirmed))]
+        },)
+    );
+}
+
+#[test]
+fn cannot_insert_confirmed_tx_without_checkpoints() {
+    let chain = SparseChain::default();
+    assert_eq!(
+        chain.insert_tx_preview(h!("A"), TxHeight::Confirmed(0)),
+        Err(InsertTxError::TxTooHigh {
+            txid: h!("A"),
+            tx_height: 0,
+            tip_height: None
+        })
+    );
+}
+
+#[test]
+fn empty_chain_can_add_unconfirmed_transactions() {
+    let chain1 = chain!(checkpoints: [[0, h!("A")]], txids: []);
+    let chain2 = chain!(checkpoints: [], txids: [(h!("tx0"), TxHeight::Unconfirmed)]);
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [],
+            txids: [ (h!("tx0"), Some(TxHeight::Unconfirmed)) ]
+        },)
+    );
+}
+
+#[test]
+fn can_update_with_shorter_chain() {
+    let chain1 = chain!(checkpoints: [[1, h!("B")],[2, h!("C")]], txids: []);
+    let chain2 = chain!(checkpoints: [[1, h!("B")]], txids: [(h!("tx0"), TxHeight::Confirmed(1))]);
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [],
+            txids: [(h!("tx0"), Some(TxHeight::Confirmed(1)))]
+        },)
+    )
+}
+
+#[test]
+fn can_introduce_older_checkpoints() {
+    let chain1 = chain!(checkpoints: [[2, h!("C")], [3, h!("D")]], txids: []);
+    let chain2 = chain!(checkpoints: [[1, h!("B")], [2, h!("C")]], txids: []);
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [(1, Some(h!("B")))],
+            txids: []
+        },)
+    );
+}
+
+#[test]
+fn fix_blockhash_before_agreement_point() {
+    let chain1 = chain!([0, h!("im-wrong")], [1, h!("we-agree")]);
+    let chain2 = chain!([0, h!("fix")], [1, h!("we-agree")]);
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [(0, Some(h!("fix")))],
+            txids: []
+        },)
+    )
+}
+
+// TODO: Use macro
+#[test]
+fn cannot_change_ext_index_of_confirmed_tx() {
+    let chain1 = chain!(
+        index: TestIndex,
+        checkpoints: [[1, h!("A")]],
+        txids: [(h!("tx0"), TestIndex(TxHeight::Confirmed(1), 10))]
+    );
+    let chain2 = chain!(
+        index: TestIndex,
+        checkpoints: [[1, h!("A")]],
+        txids: [(h!("tx0"), TestIndex(TxHeight::Confirmed(1), 20))]
+    );
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Err(UpdateError::TxInconsistent {
+            txid: h!("tx0"),
+            original_pos: TestIndex(TxHeight::Confirmed(1), 10),
+            update_pos: TestIndex(TxHeight::Confirmed(1), 20),
+        }),
+    )
+}
+
+#[test]
+fn can_change_index_of_unconfirmed_tx() {
+    let chain1 = chain!(
+        index: TestIndex,
+        checkpoints: [[1, h!("A")]],
+        txids: [(h!("tx1"), TestIndex(TxHeight::Unconfirmed, 10))]
+    );
+    let chain2 = chain!(
+        index: TestIndex,
+        checkpoints: [[1, h!("A")]],
+        txids: [(h!("tx1"), TestIndex(TxHeight::Unconfirmed, 20))]
+    );
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(ChangeSet {
+            checkpoints: [].into(),
+            txids: [(h!("tx1"), Some(TestIndex(TxHeight::Unconfirmed, 20)),)].into()
+        },),
+    )
+}
+
+/// B and C are in both chain and update
+/// ```
+///        | 0 | 1 | 2 | 3 | 4
+/// chain  |     B   C
+/// update | A   B   C   D
+/// ```
+/// This should succeed with the point of agreement being C and A should be added in addition.
+#[test]
+fn two_points_of_agreement() {
+    let chain1 = chain!([1, h!("B")], [2, h!("C")]);
+    let chain2 = chain!([0, h!("A")], [1, h!("B")], [2, h!("C")], [3, h!("D")]);
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [(0, Some(h!("A"))), (3, Some(h!("D")))]
+        },),
+    );
+}
+
+/// Update and chain does not connect:
+/// ```
+///        | 0 | 1 | 2 | 3 | 4
+/// chain  |     B   C
+/// update | A   B       D
+/// ```
+/// This should fail as we cannot figure out whether C & D are on the same chain
+#[test]
+fn update_and_chain_does_not_connect() {
+    let chain1 = chain!([1, h!("B")], [2, h!("C")]);
+    let chain2 = chain!([0, h!("A")], [1, h!("B")], [3, h!("D")]);
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Err(UpdateError::NotConnected(2)),
+    );
+}
+
+/// Transient invalidation:
+/// ```
+///        | 0 | 1 | 2 | 3 | 4 | 5
+/// chain  | A       B   C       E
+/// update | A       B'  C'  D
+/// ```
+/// This should succeed and invalidate B,C and E with point of agreement being A.
+/// It should also invalidate transactions at height 1.
+#[test]
+fn transitive_invalidation_applies_to_checkpoints_higher_than_invalidation() {
+    let chain1 = chain! {
+        checkpoints: [[0, h!("A")], [2, h!("B")], [3, h!("C")], [5, h!("E")]],
+        txids: [
+            (h!("a"), TxHeight::Confirmed(0)),
+            (h!("b1"), TxHeight::Confirmed(1)),
+            (h!("b2"), TxHeight::Confirmed(2)),
+            (h!("d"), TxHeight::Confirmed(3)),
+            (h!("e"), TxHeight::Confirmed(5))
+        ]
+    };
+    let chain2 = chain! {
+        checkpoints: [[0, h!("A")], [2, h!("B'")], [3, h!("C'")], [4, h!("D")]],
+        txids: [(h!("b1"), TxHeight::Confirmed(4)), (h!("b2"), TxHeight::Confirmed(3))]
+    };
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [
+                (2, Some(h!("B'"))),
+                (3, Some(h!("C'"))),
+                (4, Some(h!("D"))),
+                (5, None)
+            ],
+            txids: [
+                (h!("b1"), Some(TxHeight::Confirmed(4))),
+                (h!("b2"), Some(TxHeight::Confirmed(3))),
+                (h!("d"), Some(TxHeight::Unconfirmed)),
+                (h!("e"), Some(TxHeight::Unconfirmed))
+            ]
+        },)
+    );
+}
+
+/// Transient invalidation:
+/// ```
+///        | 0 | 1 | 2 | 3 | 4
+/// chain  |     B   C       E
+/// update |     B'  C'  D
+/// ```
+///
+/// This should succeed and invalidate B, C and E with no point of agreement
+#[test]
+fn transitive_invalidation_applies_to_checkpoints_higher_than_invalidation_no_point_of_agreement() {
+    let chain1 = chain!([1, h!("B")], [2, h!("C")], [4, h!("E")]);
+    let chain2 = chain!([1, h!("B'")], [2, h!("C'")], [3, h!("D")]);
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [
+                (1, Some(h!("B'"))),
+                (2, Some(h!("C'"))),
+                (3, Some(h!("D"))),
+                (4, None)
+            ]
+        },)
+    )
+}
+
+/// Transient invalidation:
+/// ```
+///        | 0 | 1 | 2 | 3 | 4
+/// chain  | A   B   C       E
+/// update |     B'  C'  D
+/// ```
+///
+/// This should fail since although it tells us that B and C are invalid it doesn't tell us whether
+/// A was invalid.
+#[test]
+fn invalidation_but_no_connection() {
+    let chain1 = chain!([0, h!("A")], [1, h!("B")], [2, h!("C")], [4, h!("E")]);
+    let chain2 = chain!([1, h!("B'")], [2, h!("C'")], [3, h!("D")]);
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Err(UpdateError::NotConnected(0))
+    )
+}
+
+#[test]
+fn checkpoint_limit_is_respected() {
+    let mut chain1 = SparseChain::default();
+    let _ = chain1
+        .apply_update(chain!(
+            [1, h!("A")],
+            [2, h!("B")],
+            [3, h!("C")],
+            [4, h!("D")],
+            [5, h!("E")]
+        ))
+        .unwrap();
+
+    assert_eq!(chain1.checkpoints().len(), 5);
+    chain1.set_checkpoint_limit(Some(4));
+    assert_eq!(chain1.checkpoints().len(), 4);
+
+    let _ = chain1
+        .insert_checkpoint(BlockId {
+            height: 6,
+            hash: h!("F"),
+        })
+        .unwrap();
+    assert_eq!(chain1.checkpoints().len(), 4);
+
+    let changeset = chain1.determine_changeset(&chain!([6, h!("F")], [7, h!("G")]));
+    assert_eq!(changeset, Ok(changeset!(checkpoints: [(7, Some(h!("G")))])));
+
+    chain1.apply_changeset(changeset.unwrap());
+
+    assert_eq!(chain1.checkpoints().len(), 4);
+}
+
+#[test]
+fn range_txids_by_height() {
+    let mut chain = chain!(index: TestIndex, checkpoints: [[1, h!("block 1")], [2, h!("block 2")]]);
+
+    let txids: [(TestIndex, Txid); 4] = [
+        (
+            TestIndex(TxHeight::Confirmed(1), u32::MIN),
+            Txid::from_inner([0x00; 32]),
+        ),
+        (
+            TestIndex(TxHeight::Confirmed(1), u32::MAX),
+            Txid::from_inner([0xfe; 32]),
+        ),
+        (
+            TestIndex(TxHeight::Confirmed(2), u32::MIN),
+            Txid::from_inner([0x01; 32]),
+        ),
+        (
+            TestIndex(TxHeight::Confirmed(2), u32::MAX),
+            Txid::from_inner([0xff; 32]),
+        ),
+    ];
+
+    // populate chain with txids
+    for (index, txid) in txids {
+        let _ = chain.insert_tx(txid, index).expect("should succeed");
+    }
+
+    // inclusive start
+    assert_eq!(
+        chain
+            .range_txids_by_height(TxHeight::Confirmed(1)..)
+            .collect::<Vec<_>>(),
+        txids.iter().collect::<Vec<_>>(),
+    );
+
+    // exclusive start
+    assert_eq!(
+        chain
+            .range_txids_by_height((Bound::Excluded(TxHeight::Confirmed(1)), Bound::Unbounded,))
+            .collect::<Vec<_>>(),
+        txids[2..].iter().collect::<Vec<_>>(),
+    );
+
+    // inclusive end
+    assert_eq!(
+        chain
+            .range_txids_by_height((Bound::Unbounded, Bound::Included(TxHeight::Confirmed(2))))
+            .collect::<Vec<_>>(),
+        txids[..4].iter().collect::<Vec<_>>(),
+    );
+
+    // exclusive end
+    assert_eq!(
+        chain
+            .range_txids_by_height(..TxHeight::Confirmed(2))
+            .collect::<Vec<_>>(),
+        txids[..2].iter().collect::<Vec<_>>(),
+    );
+}
+
+#[test]
+fn range_txids_by_index() {
+    let mut chain = chain!(index: TestIndex, checkpoints: [[1, h!("block 1")],[2, h!("block 2")]]);
+
+    let txids: [(TestIndex, Txid); 4] = [
+        (TestIndex(TxHeight::Confirmed(1), u32::MIN), h!("tx 1 min")),
+        (TestIndex(TxHeight::Confirmed(1), u32::MAX), h!("tx 1 max")),
+        (TestIndex(TxHeight::Confirmed(2), u32::MIN), h!("tx 2 min")),
+        (TestIndex(TxHeight::Confirmed(2), u32::MAX), h!("tx 2 max")),
+    ];
+
+    // populate chain with txids
+    for (index, txid) in txids {
+        let _ = chain.insert_tx(txid, index).expect("should succeed");
+    }
+
+    // inclusive start
+    assert_eq!(
+        chain
+            .range_txids_by_position(TestIndex(TxHeight::Confirmed(1), u32::MIN)..)
+            .collect::<Vec<_>>(),
+        txids.iter().collect::<Vec<_>>(),
+    );
+    assert_eq!(
+        chain
+            .range_txids_by_position(TestIndex(TxHeight::Confirmed(1), u32::MAX)..)
+            .collect::<Vec<_>>(),
+        txids[1..].iter().collect::<Vec<_>>(),
+    );
+
+    // exclusive start
+    assert_eq!(
+        chain
+            .range_txids_by_position((
+                Bound::Excluded(TestIndex(TxHeight::Confirmed(1), u32::MIN)),
+                Bound::Unbounded
+            ))
+            .collect::<Vec<_>>(),
+        txids[1..].iter().collect::<Vec<_>>(),
+    );
+    assert_eq!(
+        chain
+            .range_txids_by_position((
+                Bound::Excluded(TestIndex(TxHeight::Confirmed(1), u32::MAX)),
+                Bound::Unbounded
+            ))
+            .collect::<Vec<_>>(),
+        txids[2..].iter().collect::<Vec<_>>(),
+    );
+
+    // inclusive end
+    assert_eq!(
+        chain
+            .range_txids_by_position((
+                Bound::Unbounded,
+                Bound::Included(TestIndex(TxHeight::Confirmed(2), u32::MIN))
+            ))
+            .collect::<Vec<_>>(),
+        txids[..3].iter().collect::<Vec<_>>(),
+    );
+    assert_eq!(
+        chain
+            .range_txids_by_position((
+                Bound::Unbounded,
+                Bound::Included(TestIndex(TxHeight::Confirmed(2), u32::MAX))
+            ))
+            .collect::<Vec<_>>(),
+        txids[..4].iter().collect::<Vec<_>>(),
+    );
+
+    // exclusive end
+    assert_eq!(
+        chain
+            .range_txids_by_position(..TestIndex(TxHeight::Confirmed(2), u32::MIN))
+            .collect::<Vec<_>>(),
+        txids[..2].iter().collect::<Vec<_>>(),
+    );
+    assert_eq!(
+        chain
+            .range_txids_by_position(..TestIndex(TxHeight::Confirmed(2), u32::MAX))
+            .collect::<Vec<_>>(),
+        txids[..3].iter().collect::<Vec<_>>(),
+    );
+}
+
+#[test]
+fn range_txids() {
+    let mut chain = SparseChain::default();
+
+    let txids = (0..100)
+        .map(|v| Txid::hash(v.to_string().as_bytes()))
+        .collect::<BTreeSet<Txid>>();
+
+    // populate chain
+    for txid in &txids {
+        let _ = chain
+            .insert_tx(*txid, TxHeight::Unconfirmed)
+            .expect("should succeed");
+    }
+
+    for txid in &txids {
+        assert_eq!(
+            chain
+                .range_txids((TxHeight::Unconfirmed, *txid)..)
+                .map(|(_, txid)| txid)
+                .collect::<Vec<_>>(),
+            txids.range(*txid..).collect::<Vec<_>>(),
+            "range with inclusive start should succeed"
+        );
+
+        assert_eq!(
+            chain
+                .range_txids((
+                    Bound::Excluded((TxHeight::Unconfirmed, *txid)),
+                    Bound::Unbounded,
+                ))
+                .map(|(_, txid)| txid)
+                .collect::<Vec<_>>(),
+            txids
+                .range((Bound::Excluded(*txid), Bound::Unbounded,))
+                .collect::<Vec<_>>(),
+            "range with exclusive start should succeed"
+        );
+
+        assert_eq!(
+            chain
+                .range_txids(..(TxHeight::Unconfirmed, *txid))
+                .map(|(_, txid)| txid)
+                .collect::<Vec<_>>(),
+            txids.range(..*txid).collect::<Vec<_>>(),
+            "range with exclusive end should succeed"
+        );
+
+        assert_eq!(
+            chain
+                .range_txids((
+                    Bound::Included((TxHeight::Unconfirmed, *txid)),
+                    Bound::Unbounded,
+                ))
+                .map(|(_, txid)| txid)
+                .collect::<Vec<_>>(),
+            txids
+                .range((Bound::Included(*txid), Bound::Unbounded,))
+                .collect::<Vec<_>>(),
+            "range with inclusive end should succeed"
+        );
+    }
+}
+
+#[test]
+fn invalidated_txs_move_to_unconfirmed() {
+    let chain1 = chain! {
+        checkpoints: [[0, h!("A")], [1, h!("B")], [2, h!("C")]],
+        txids: [
+            (h!("a"), TxHeight::Confirmed(0)),
+            (h!("b"), TxHeight::Confirmed(1)),
+            (h!("c"), TxHeight::Confirmed(2)),
+            (h!("d"), TxHeight::Unconfirmed)
+        ]
+    };
+
+    let chain2 = chain!([0, h!("A")], [1, h!("B'")]);
+
+    assert_eq!(
+        chain1.determine_changeset(&chain2),
+        Ok(changeset! {
+            checkpoints: [
+                (1, Some(h!("B'"))),
+                (2, None)
+            ],
+            txids: [
+                (h!("b"), Some(TxHeight::Unconfirmed)),
+                (h!("c"), Some(TxHeight::Unconfirmed))
+            ]
+        },)
+    );
+}
+
+#[test]
+fn change_tx_position_from_unconfirmed_to_confirmed() {
+    let mut chain = SparseChain::<TxHeight>::default();
+    let txid = h!("txid");
+
+    let _ = chain.insert_tx(txid, TxHeight::Unconfirmed).unwrap();
+
+    assert_eq!(chain.tx_position(txid), Some(&TxHeight::Unconfirmed));
+    let _ = chain
+        .insert_checkpoint(BlockId {
+            height: 0,
+            hash: h!("0"),
+        })
+        .unwrap();
+    let _ = chain.insert_tx(txid, TxHeight::Confirmed(0)).unwrap();
+
+    assert_eq!(chain.tx_position(txid), Some(&TxHeight::Confirmed(0)));
+}
diff --git a/crates/chain/tests/test_spk_txout_index.rs b/crates/chain/tests/test_spk_txout_index.rs
new file mode 100644 (file)
index 0000000..a49df08
--- /dev/null
@@ -0,0 +1,101 @@
+use bdk_chain::SpkTxOutIndex;
+use bitcoin::{hashes::hex::FromHex, OutPoint, PackedLockTime, Script, Transaction, TxIn, TxOut};
+
+#[test]
+fn spk_txout_sent_and_received() {
+    let spk1 = Script::from_hex("001404f1e52ce2bab3423c6a8c63b7cd730d8f12542c").unwrap();
+    let spk2 = Script::from_hex("00142b57404ae14f08c3a0c903feb2af7830605eb00f").unwrap();
+
+    let mut index = SpkTxOutIndex::default();
+    index.insert_spk(0, spk1.clone());
+    index.insert_spk(1, spk2.clone());
+
+    let tx1 = Transaction {
+        version: 0x02,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut {
+            value: 42_000,
+            script_pubkey: spk1.clone(),
+        }],
+    };
+
+    assert_eq!(index.sent_and_received(&tx1), (0, 42_000));
+    assert_eq!(index.net_value(&tx1), 42_000);
+    index.scan(&tx1);
+    assert_eq!(
+        index.sent_and_received(&tx1),
+        (0, 42_000),
+        "shouldn't change after scanning"
+    );
+
+    let tx2 = Transaction {
+        version: 0x1,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn {
+            previous_output: OutPoint {
+                txid: tx1.txid(),
+                vout: 0,
+            },
+            ..Default::default()
+        }],
+        output: vec![
+            TxOut {
+                value: 20_000,
+                script_pubkey: spk2.clone(),
+            },
+            TxOut {
+                script_pubkey: spk1.clone(),
+                value: 30_000,
+            },
+        ],
+    };
+
+    assert_eq!(index.sent_and_received(&tx2), (42_000, 50_000));
+    assert_eq!(index.net_value(&tx2), 8_000);
+}
+
+#[test]
+fn mark_used() {
+    let spk1 = Script::from_hex("001404f1e52ce2bab3423c6a8c63b7cd730d8f12542c").unwrap();
+    let spk2 = Script::from_hex("00142b57404ae14f08c3a0c903feb2af7830605eb00f").unwrap();
+
+    let mut spk_index = SpkTxOutIndex::default();
+    spk_index.insert_spk(1, spk1.clone());
+    spk_index.insert_spk(2, spk2.clone());
+
+    assert_eq!(spk_index.is_used(&1), false);
+    spk_index.mark_used(&1);
+    assert_eq!(spk_index.is_used(&1), true);
+    spk_index.unmark_used(&1);
+    assert_eq!(spk_index.is_used(&1), false);
+    spk_index.mark_used(&1);
+    assert_eq!(spk_index.is_used(&1), true);
+
+    let tx1 = Transaction {
+        version: 0x02,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut {
+            value: 42_000,
+            script_pubkey: spk1.clone(),
+        }],
+    };
+
+    spk_index.scan(&tx1);
+    spk_index.unmark_used(&1);
+    assert_eq!(
+        spk_index.is_used(&1),
+        true,
+        "even though we unmark_used it doesn't matter because there was a tx scanned that used it"
+    );
+}
+
+#[test]
+fn unmark_used_does_not_result_in_invalid_representation() {
+    let mut spk_index = SpkTxOutIndex::default();
+    assert_eq!(spk_index.unmark_used(&0), false);
+    assert_eq!(spk_index.unmark_used(&1), false);
+    assert_eq!(spk_index.unmark_used(&2), false);
+    assert!(spk_index.unused_spks(..).collect::<Vec<_>>().is_empty());
+}
diff --git a/crates/chain/tests/test_tx_graph.rs b/crates/chain/tests/test_tx_graph.rs
new file mode 100644 (file)
index 0000000..b7e333c
--- /dev/null
@@ -0,0 +1,512 @@
+#[macro_use]
+mod common;
+use bdk_chain::{
+    collections::*,
+    tx_graph::{Additions, TxGraph},
+};
+use bitcoin::{hashes::Hash, OutPoint, PackedLockTime, Script, Transaction, TxIn, TxOut, Txid};
+use core::iter;
+
+#[test]
+fn insert_txouts() {
+    let original_ops = [
+        (
+            OutPoint::new(h!("tx1"), 1),
+            TxOut {
+                value: 10_000,
+                script_pubkey: Script::new(),
+            },
+        ),
+        (
+            OutPoint::new(h!("tx1"), 2),
+            TxOut {
+                value: 20_000,
+                script_pubkey: Script::new(),
+            },
+        ),
+    ];
+
+    let update_ops = [(
+        OutPoint::new(h!("tx2"), 0),
+        TxOut {
+            value: 20_000,
+            script_pubkey: Script::new(),
+        },
+    )];
+
+    let mut graph = {
+        let mut graph = TxGraph::<Transaction>::default();
+        for (outpoint, txout) in &original_ops {
+            assert_eq!(
+                graph.insert_txout(*outpoint, txout.clone()),
+                Additions {
+                    txout: [(*outpoint, txout.clone())].into(),
+                    ..Default::default()
+                }
+            );
+        }
+        graph
+    };
+
+    let update = {
+        let mut graph = TxGraph::<Transaction>::default();
+        for (outpoint, txout) in &update_ops {
+            assert_eq!(
+                graph.insert_txout(*outpoint, txout.clone()),
+                Additions {
+                    txout: [(*outpoint, txout.clone())].into(),
+                    ..Default::default()
+                }
+            );
+        }
+        graph
+    };
+
+    let additions = graph.determine_additions(&update);
+
+    assert_eq!(
+        additions,
+        Additions {
+            tx: [].into(),
+            txout: update_ops.into(),
+        }
+    );
+
+    graph.apply_additions(additions);
+    assert_eq!(graph.all_txouts().count(), 3);
+    assert_eq!(graph.full_transactions().count(), 0);
+    assert_eq!(graph.partial_transactions().count(), 2);
+}
+
+#[test]
+fn insert_tx_graph_doesnt_count_coinbase_as_spent() {
+    let tx = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn {
+            previous_output: OutPoint::null(),
+            ..Default::default()
+        }],
+        output: vec![],
+    };
+
+    let mut graph = TxGraph::default();
+    let _ = graph.insert_tx(tx);
+    assert!(graph.outspends(OutPoint::null()).is_empty());
+    assert!(graph.tx_outspends(Txid::all_zeros()).next().is_none());
+}
+
+#[test]
+fn insert_tx_graph_keeps_track_of_spend() {
+    let tx1 = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut::default()],
+    };
+
+    let op = OutPoint {
+        txid: tx1.txid(),
+        vout: 0,
+    };
+
+    let tx2 = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn {
+            previous_output: op,
+            ..Default::default()
+        }],
+        output: vec![],
+    };
+
+    let mut graph1 = TxGraph::default();
+    let mut graph2 = TxGraph::default();
+
+    // insert in different order
+    let _ = graph1.insert_tx(tx1.clone());
+    let _ = graph1.insert_tx(tx2.clone());
+
+    let _ = graph2.insert_tx(tx2.clone());
+    let _ = graph2.insert_tx(tx1.clone());
+
+    assert_eq!(
+        &*graph1.outspends(op),
+        &iter::once(tx2.txid()).collect::<HashSet<_>>()
+    );
+    assert_eq!(graph2.outspends(op), graph1.outspends(op));
+}
+
+#[test]
+fn insert_tx_can_retrieve_full_tx_from_graph() {
+    let tx = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn {
+            previous_output: OutPoint::null(),
+            ..Default::default()
+        }],
+        output: vec![TxOut::default()],
+    };
+
+    let mut graph = TxGraph::default();
+    let _ = graph.insert_tx(tx.clone());
+    assert_eq!(graph.get_tx(tx.txid()), Some(&tx));
+}
+
+#[test]
+fn insert_tx_displaces_txouts() {
+    let mut tx_graph = TxGraph::default();
+    let tx = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut {
+            value: 42_000,
+            script_pubkey: Script::default(),
+        }],
+    };
+
+    let _ = tx_graph.insert_txout(
+        OutPoint {
+            txid: tx.txid(),
+            vout: 0,
+        },
+        TxOut {
+            value: 1337_000,
+            script_pubkey: Script::default(),
+        },
+    );
+
+    let _ = tx_graph.insert_txout(
+        OutPoint {
+            txid: tx.txid(),
+            vout: 0,
+        },
+        TxOut {
+            value: 1_000_000_000,
+            script_pubkey: Script::default(),
+        },
+    );
+
+    let _additions = tx_graph.insert_tx(tx.clone());
+
+    assert_eq!(
+        tx_graph
+            .get_txout(OutPoint {
+                txid: tx.txid(),
+                vout: 0
+            })
+            .unwrap()
+            .value,
+        42_000
+    );
+    assert_eq!(
+        tx_graph.get_txout(OutPoint {
+            txid: tx.txid(),
+            vout: 1
+        }),
+        None
+    );
+}
+
+#[test]
+fn insert_txout_does_not_displace_tx() {
+    let mut tx_graph = TxGraph::default();
+    let tx = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut {
+            value: 42_000,
+            script_pubkey: Script::default(),
+        }],
+    };
+
+    let _additions = tx_graph.insert_tx(tx.clone());
+
+    let _ = tx_graph.insert_txout(
+        OutPoint {
+            txid: tx.txid(),
+            vout: 0,
+        },
+        TxOut {
+            value: 1337_000,
+            script_pubkey: Script::default(),
+        },
+    );
+
+    let _ = tx_graph.insert_txout(
+        OutPoint {
+            txid: tx.txid(),
+            vout: 0,
+        },
+        TxOut {
+            value: 1_000_000_000,
+            script_pubkey: Script::default(),
+        },
+    );
+
+    assert_eq!(
+        tx_graph
+            .get_txout(OutPoint {
+                txid: tx.txid(),
+                vout: 0
+            })
+            .unwrap()
+            .value,
+        42_000
+    );
+    assert_eq!(
+        tx_graph.get_txout(OutPoint {
+            txid: tx.txid(),
+            vout: 1
+        }),
+        None
+    );
+}
+
+#[test]
+fn test_calculate_fee() {
+    let mut graph = TxGraph::default();
+    let intx1 = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut {
+            value: 100,
+            ..Default::default()
+        }],
+    };
+    let intx2 = Transaction {
+        version: 0x02,
+        lock_time: PackedLockTime(0),
+        input: vec![],
+        output: vec![TxOut {
+            value: 200,
+            ..Default::default()
+        }],
+    };
+
+    let intxout1 = (
+        OutPoint {
+            txid: h!("dangling output"),
+            vout: 0,
+        },
+        TxOut {
+            value: 300,
+            ..Default::default()
+        },
+    );
+
+    let _ = graph.insert_tx(intx1.clone());
+    let _ = graph.insert_tx(intx2.clone());
+    let _ = graph.insert_txout(intxout1.0, intxout1.1);
+
+    let mut tx = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![
+            TxIn {
+                previous_output: OutPoint {
+                    txid: intx1.txid(),
+                    vout: 0,
+                },
+                ..Default::default()
+            },
+            TxIn {
+                previous_output: OutPoint {
+                    txid: intx2.txid(),
+                    vout: 0,
+                },
+                ..Default::default()
+            },
+            TxIn {
+                previous_output: intxout1.0,
+                ..Default::default()
+            },
+        ],
+        output: vec![TxOut {
+            value: 500,
+            ..Default::default()
+        }],
+    };
+
+    assert_eq!(graph.calculate_fee(&tx), Some(100));
+
+    tx.input.remove(2);
+
+    // fee would be negative
+    assert_eq!(graph.calculate_fee(&tx), Some(-200));
+
+    // If we have an unknown outpoint, fee should return None.
+    tx.input.push(TxIn {
+        previous_output: OutPoint {
+            txid: h!("unknown_txid"),
+            vout: 0,
+        },
+        ..Default::default()
+    });
+    assert_eq!(graph.calculate_fee(&tx), None);
+}
+
+#[test]
+fn test_calculate_fee_on_coinbase() {
+    let tx = Transaction {
+        version: 0x01,
+        lock_time: PackedLockTime(0),
+        input: vec![TxIn {
+            previous_output: OutPoint::null(),
+            ..Default::default()
+        }],
+        output: vec![TxOut::default()],
+    };
+
+    let graph = TxGraph::<Transaction>::default();
+
+    assert_eq!(graph.calculate_fee(&tx), Some(0));
+}
+
+#[test]
+fn test_conflicting_descendants() {
+    let previous_output = OutPoint::new(h!("op"), 2);
+
+    // tx_a spends previous_output
+    let tx_a = Transaction {
+        input: vec![TxIn {
+            previous_output,
+            ..TxIn::default()
+        }],
+        output: vec![TxOut::default()],
+        ..common::new_tx(0)
+    };
+
+    // tx_a2 spends previous_output and conflicts with tx_a
+    let tx_a2 = Transaction {
+        input: vec![TxIn {
+            previous_output,
+            ..TxIn::default()
+        }],
+        output: vec![TxOut::default(), TxOut::default()],
+        ..common::new_tx(1)
+    };
+
+    // tx_b spends tx_a
+    let tx_b = Transaction {
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_a.txid(), 0),
+            ..TxIn::default()
+        }],
+        output: vec![TxOut::default()],
+        ..common::new_tx(2)
+    };
+
+    let txid_a = tx_a.txid();
+    let txid_b = tx_b.txid();
+
+    let mut graph = TxGraph::default();
+    let _ = graph.insert_tx(tx_a);
+    let _ = graph.insert_tx(tx_b);
+
+    assert_eq!(
+        graph
+            .walk_conflicts(&tx_a2, |depth, txid| Some((depth, txid)))
+            .collect::<Vec<_>>(),
+        vec![(0_usize, txid_a), (1_usize, txid_b),],
+    );
+}
+
+#[test]
+fn test_descendants_no_repeat() {
+    let tx_a = Transaction {
+        output: vec![TxOut::default(), TxOut::default(), TxOut::default()],
+        ..common::new_tx(0)
+    };
+
+    let txs_b = (0..3)
+        .map(|vout| Transaction {
+            input: vec![TxIn {
+                previous_output: OutPoint::new(tx_a.txid(), vout),
+                ..TxIn::default()
+            }],
+            output: vec![TxOut::default()],
+            ..common::new_tx(1)
+        })
+        .collect::<Vec<_>>();
+
+    let txs_c = (0..2)
+        .map(|vout| Transaction {
+            input: vec![TxIn {
+                previous_output: OutPoint::new(txs_b[vout as usize].txid(), vout),
+                ..TxIn::default()
+            }],
+            output: vec![TxOut::default()],
+            ..common::new_tx(2)
+        })
+        .collect::<Vec<_>>();
+
+    let tx_d = Transaction {
+        input: vec![
+            TxIn {
+                previous_output: OutPoint::new(txs_c[0].txid(), 0),
+                ..TxIn::default()
+            },
+            TxIn {
+                previous_output: OutPoint::new(txs_c[1].txid(), 0),
+                ..TxIn::default()
+            },
+        ],
+        output: vec![TxOut::default()],
+        ..common::new_tx(3)
+    };
+
+    let tx_e = Transaction {
+        input: vec![TxIn {
+            previous_output: OutPoint::new(tx_d.txid(), 0),
+            ..TxIn::default()
+        }],
+        output: vec![TxOut::default()],
+        ..common::new_tx(4)
+    };
+
+    let txs_not_connected = (10..20)
+        .map(|v| Transaction {
+            input: vec![TxIn {
+                previous_output: OutPoint::new(h!("tx_does_not_exist"), v),
+                ..TxIn::default()
+            }],
+            output: vec![TxOut::default()],
+            ..common::new_tx(v)
+        })
+        .collect::<Vec<_>>();
+
+    let mut graph = TxGraph::default();
+    let mut expected_txids = BTreeSet::new();
+
+    // these are NOT descendants of `tx_a`
+    for tx in txs_not_connected {
+        let _ = graph.insert_tx(tx.clone());
+    }
+
+    // these are the expected descendants of `tx_a`
+    for tx in txs_b
+        .iter()
+        .chain(&txs_c)
+        .chain(core::iter::once(&tx_d))
+        .chain(core::iter::once(&tx_e))
+    {
+        let _ = graph.insert_tx(tx.clone());
+        assert!(expected_txids.insert(tx.txid()));
+    }
+
+    let descendants = graph
+        .walk_descendants(tx_a.txid(), |_, txid| Some(txid))
+        .collect::<Vec<_>>();
+
+    assert_eq!(descendants.len(), expected_txids.len());
+
+    for txid in descendants {
+        assert!(expected_txids.remove(&txid));
+    }
+    assert!(expected_txids.is_empty());
+}