fn is_witness(&self) -> bool;
fn is_taproot(&self) -> bool;
fn get_extended_keys(&self) -> Vec<DescriptorXKey<ExtendedPubKey>>;
- fn derive_from_hd_keypaths<'s>(
+ fn derive_from_hd_keypaths(
&self,
hd_keypaths: &HdKeyPaths,
- secp: &'s SecpCtx,
+ secp: &SecpCtx,
) -> Option<DerivedDescriptor>;
- fn derive_from_tap_key_origins<'s>(
+ fn derive_from_tap_key_origins(
&self,
tap_key_origins: &TapKeyOrigins,
- secp: &'s SecpCtx,
+ secp: &SecpCtx,
) -> Option<DerivedDescriptor>;
- fn derive_from_psbt_key_origins<'s>(
+ fn derive_from_psbt_key_origins(
&self,
key_origins: BTreeMap<Fingerprint, (&DerivationPath, SinglePubKey)>,
- secp: &'s SecpCtx,
+ secp: &SecpCtx,
) -> Option<DerivedDescriptor>;
- fn derive_from_psbt_input<'s>(
+ fn derive_from_psbt_input(
&self,
psbt_input: &psbt::Input,
utxo: Option<TxOut>,
- secp: &'s SecpCtx,
+ secp: &SecpCtx,
) -> Option<DerivedDescriptor>;
}
answer
}
- fn derive_from_psbt_key_origins<'s>(
+ fn derive_from_psbt_key_origins(
&self,
key_origins: BTreeMap<Fingerprint, (&DerivationPath, SinglePubKey)>,
- secp: &'s SecpCtx,
+ secp: &SecpCtx,
) -> Option<DerivedDescriptor> {
// Ensure that deriving `xpub` with `path` yields `expected`
let verify_key = |xpub: &DescriptorXKey<ExtendedPubKey>,
path_found.map(|path| self.at_derivation_index(path))
}
- fn derive_from_hd_keypaths<'s>(
+ fn derive_from_hd_keypaths(
&self,
hd_keypaths: &HdKeyPaths,
- secp: &'s SecpCtx,
+ secp: &SecpCtx,
) -> Option<DerivedDescriptor> {
// "Convert" an hd_keypaths map to the format required by `derive_from_psbt_key_origins`
let key_origins = hd_keypaths
self.derive_from_psbt_key_origins(key_origins, secp)
}
- fn derive_from_tap_key_origins<'s>(
+ fn derive_from_tap_key_origins(
&self,
tap_key_origins: &TapKeyOrigins,
- secp: &'s SecpCtx,
+ secp: &SecpCtx,
) -> Option<DerivedDescriptor> {
// "Convert" a tap_key_origins map to the format required by `derive_from_psbt_key_origins`
let key_origins = tap_key_origins
self.derive_from_psbt_key_origins(key_origins, secp)
}
- fn derive_from_psbt_input<'s>(
+ fn derive_from_psbt_input(
&self,
psbt_input: &psbt::Input,
utxo: Option<TxOut>,
- secp: &'s SecpCtx,
+ secp: &SecpCtx,
) -> Option<DerivedDescriptor> {
if let Some(derived) = self.derive_from_hd_keypaths(&psbt_input.bip32_derivation, secp) {
return Some(derived);
fn get_test_utxos() -> Vec<WeightedUtxo> {
vec![
utxo(100_000, 0, ConfirmationTime::Unconfirmed),
- utxo(FEE_AMOUNT as u64 - 40, 1, ConfirmationTime::Unconfirmed),
+ utxo(FEE_AMOUNT - 40, 1, ConfirmationTime::Unconfirmed),
utxo(200_000, 2, ConfirmationTime::Unconfirmed),
]
}
.last()
.unwrap(),
};
- let info = AddressInfo {
+ AddressInfo {
index,
address: Address::from_script(&spk, self.network)
.expect("descriptor must have address form"),
keychain,
- };
- info
+ }
}
/// Return whether or not a `script` is part of this wallet (either internal or external)
.map(|(&(keychain, derivation_index), utxo)| LocalUtxo {
outpoint: utxo.outpoint,
txout: utxo.txout,
- keychain: keychain.clone(),
+ keychain,
is_spent: false,
derivation_index,
confirmation_time: utxo.chain_position,
// - If that also fails, it will try it on the internal descriptor, if present
let desc = psbt
.get_utxo_for(n)
- .map(|txout| self.get_descriptor_for_txout(&txout))
- .flatten()
+ .and_then(|txout| self.get_descriptor_for_txout(&txout))
.or_else(|| {
self.keychain_tracker
.txout_index
}],
};
wallet
- .insert_tx(init_tx.clone(), wallet.transactions().last().unwrap().0)
+ .insert_tx(init_tx, wallet.transactions().last().unwrap().0)
.unwrap();
let addr = Address::from_str("2N1Ffz3WaNzbeLFBb51xyFMHYSEUXcbiSoX").unwrap();
}
/// Calculates the difference between self and `update` in the form of a [`ChangeSet`].
- pub fn determine_changeset<'a, T2>(
+ pub fn determine_changeset<T2>(
&self,
update: &ChainGraph<P, T2>,
) -> Result<ChangeSet<P, T>, UpdateError<P>>
// evicted, return error
return Err(UnresolvableConflict {
already_confirmed_tx: (conflicting_pos.clone(), conflicting_txid),
- update_tx: (update_pos.clone(), update_txid),
+ update_tx: (update_pos, update_txid),
});
}
TxHeight::Unconfirmed => {
f,
"missing full transactions for {}",
missing
- .into_iter()
+ .iter()
.map(|txid| txid.to_string())
.collect::<Vec<_>>()
.join(", ")
use crate::BlockId;
-pub const RAW_TX_1: &'static str = "0200000000010116d6174da7183d70d0a7d4dc314d517a7d135db79ad63515028b293a76f4f9d10000000000feffffff023a21fc8350060000160014531c405e1881ef192294b8813631e258bf98ea7a1027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c024730440220591b1a172a122da49ba79a3e79f98aaa03fd7a372f9760da18890b6a327e6010022013e82319231da6c99abf8123d7c07e13cf9bd8d76e113e18dc452e5024db156d012102318a2d558b2936c52e320decd6d92a88d7f530be91b6fe0af5caf41661e77da3ef2e0100";
-pub const RAW_TX_2: &'static str = "02000000000101a688607020cfae91a61e7c516b5ef1264d5d77f17200c3866826c6c808ebf1620000000000feffffff021027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c20fd48ff530600001600146886c525e41d4522042bd0b159dfbade2504a6bb024730440220740ff7e665cd20565d4296b549df8d26b941be3f1e3af89a0b60e50c0dbeb69a02206213ab7030cf6edc6c90d4ccf33010644261e029950a688dc0b1a9ebe6ddcc5a012102f2ac6b396a97853cb6cd62242c8ae4842024742074475023532a51e9c53194253e760100";
-pub const RAW_TX_3: &'static str = "0200000000010135d67ee47b557e68b8c6223958f597381965ed719f1207ee2b9e20432a24a5dc0100000000feffffff021027000000000000225120a82f29944d65b86ae6b5e5cc75e294ead6c59391a1edc5e016e3498c67fc7bbb62215a5055060000160014070df7671dea67a50c4799a744b5c9be8f4bac690247304402207ebf8d29f71fd03e7e6977b3ea78ca5fcc5c49a42ae822348fc401862fdd766c02201d7e4ff0684ecb008b6142f36ead1b0b4d615524c4f58c261113d361f4427e25012103e6a75e2fab85e5ecad641afc4ffba7222f998649d9f18cac92f0fcc8618883b3ee760100";
-pub const RAW_TX_4: &'static str = "02000000000101d00e8f76ed313e19b339ee293c0f52b0325c95e24c8f3966fa353fb2bedbcf580100000000feffffff021027000000000000225120882d74e5d0572d5a816cef0041a96b6c1de832f6f9676d9605c44d5e9a97d3dc9cda55fe53060000160014852b5864b8edd42fab4060c87f818e50780865ff0247304402201dccbb9bed7fba924b6d249c5837cc9b37470c0e3d8fbea77cb59baba3efe6fa0220700cc170916913b9bfc2bc0fefb6af776e8b542c561702f136cddc1c7aa43141012103acec3fc79dbbca745815c2a807dc4e81010c80e308e84913f59cb42a275dad97f3760100";
+pub const RAW_TX_1: &str = "0200000000010116d6174da7183d70d0a7d4dc314d517a7d135db79ad63515028b293a76f4f9d10000000000feffffff023a21fc8350060000160014531c405e1881ef192294b8813631e258bf98ea7a1027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c024730440220591b1a172a122da49ba79a3e79f98aaa03fd7a372f9760da18890b6a327e6010022013e82319231da6c99abf8123d7c07e13cf9bd8d76e113e18dc452e5024db156d012102318a2d558b2936c52e320decd6d92a88d7f530be91b6fe0af5caf41661e77da3ef2e0100";
+pub const RAW_TX_2: &str = "02000000000101a688607020cfae91a61e7c516b5ef1264d5d77f17200c3866826c6c808ebf1620000000000feffffff021027000000000000225120a60869f0dbcf1dc659c9cecbaf8050135ea9e8cdc487053f1dc6880949dc684c20fd48ff530600001600146886c525e41d4522042bd0b159dfbade2504a6bb024730440220740ff7e665cd20565d4296b549df8d26b941be3f1e3af89a0b60e50c0dbeb69a02206213ab7030cf6edc6c90d4ccf33010644261e029950a688dc0b1a9ebe6ddcc5a012102f2ac6b396a97853cb6cd62242c8ae4842024742074475023532a51e9c53194253e760100";
+pub const RAW_TX_3: &str = "0200000000010135d67ee47b557e68b8c6223958f597381965ed719f1207ee2b9e20432a24a5dc0100000000feffffff021027000000000000225120a82f29944d65b86ae6b5e5cc75e294ead6c59391a1edc5e016e3498c67fc7bbb62215a5055060000160014070df7671dea67a50c4799a744b5c9be8f4bac690247304402207ebf8d29f71fd03e7e6977b3ea78ca5fcc5c49a42ae822348fc401862fdd766c02201d7e4ff0684ecb008b6142f36ead1b0b4d615524c4f58c261113d361f4427e25012103e6a75e2fab85e5ecad641afc4ffba7222f998649d9f18cac92f0fcc8618883b3ee760100";
+pub const RAW_TX_4: &str = "02000000000101d00e8f76ed313e19b339ee293c0f52b0325c95e24c8f3966fa353fb2bedbcf580100000000feffffff021027000000000000225120882d74e5d0572d5a816cef0041a96b6c1de832f6f9676d9605c44d5e9a97d3dc9cda55fe53060000160014852b5864b8edd42fab4060c87f818e50780865ff0247304402201dccbb9bed7fba924b6d249c5837cc9b37470c0e3d8fbea77cb59baba3efe6fa0220700cc170916913b9bfc2bc0fefb6af776e8b542c561702f136cddc1c7aa43141012103acec3fc79dbbca745815c2a807dc4e81010c80e308e84913f59cb42a275dad97f3760100";
pub fn tx_from_hex(s: &str) -> Transaction {
let raw = Vec::from_hex(s).expect("data must be in hex");
/// Returns a reference to the internal [`TxGraph`] (which is part of the [`ChainGraph`]).
pub fn graph(&self) -> &TxGraph<T> {
- &self.chain_graph().graph()
+ self.chain_graph().graph()
}
/// Returns a reference to the internal [`SparseChain`] (which is part of the [`ChainGraph`]).
pub fn chain(&self) -> &SparseChain<P> {
- &self.chain_graph().chain()
+ self.chain_graph().chain()
}
/// Determines the changes as result of inserting `block_id` (a height and block hash) into the
use super::DerivationAdditions;
/// Maximum [BIP32](https://bips.xyz/32) derivation index.
-pub const BIP32_MAX_INDEX: u32 = 1 << 31 - 1;
+pub const BIP32_MAX_INDEX: u32 = (1 << 31) - 1;
/// A convenient wrapper around [`SpkTxOutIndex`] that relates script pubkeys to miniscript public
/// [`Descriptor`]s.
pub fn set_lookahead_for_all(&mut self, lookahead: u32) {
for keychain in &self.keychains.keys().cloned().collect::<Vec<_>>() {
self.lookahead.insert(keychain.clone(), lookahead);
- self.replenish_lookahead(&keychain);
+ self.replenish_lookahead(keychain);
}
}
let mut spks = BTreeMap::new();
for (keychain, &index) in keychains {
- let (new_spks, new_additions) = self.reveal_to_target(&keychain, index);
+ let (new_spks, new_additions) = self.reveal_to_target(keychain, index);
if !new_additions.is_empty() {
spks.insert(keychain.clone(), new_spks);
additions.append(new_additions);
where
C: IntoIterator<Item = BlockId>,
{
- let mut chain = Self::default();
- chain.checkpoints = checkpoints
- .into_iter()
- .map(|block_id| block_id.into())
- .collect();
- chain
+ Self {
+ checkpoints: checkpoints
+ .into_iter()
+ .map(|block_id| block_id.into())
+ .collect(),
+ ..Default::default()
+ }
}
/// Get the checkpoint for the last known tip.
if self.outputs_in_range(index..=index).next().is_some() {
return false;
}
- return self.unused.insert(index.clone());
+ self.unused.insert(index.clone())
}
/// Returns the index associated with the script pubkey.
let input_matches = tx
.input
.iter()
- .find(|input| self.txouts.contains_key(&input.previous_output))
- .is_some();
+ .any(|input| self.txouts.contains_key(&input.previous_output));
let output_matches = tx
.output
.iter()
- .find(|output| self.spk_indices.contains_key(&output.script_pubkey))
- .is_some();
+ .any(|output| self.spk_indices.contains_key(&output.script_pubkey));
input_matches || output_matches
}
}
///
/// The [`Additions`] would be the set difference of `update` and `self` (transactions that
/// exist in `update` but not in `self`).
- pub fn determine_additions<'a, T2>(&self, update: &'a TxGraph<T2>) -> Additions<T>
+ pub fn determine_additions<T2>(&self, update: &TxGraph<T2>) -> Additions<T>
where
T2: IntoOwned<T> + Clone,
{
for (&vout, update_txout) in partial {
let outpoint = OutPoint::new(txid, vout);
- if self.get_txout(outpoint) != Some(&update_txout) {
+ if self.get_txout(outpoint) != Some(update_txout) {
additions.txout.insert(outpoint, update_txout.clone());
}
}
};
self.populate_stack(op_spends + 1, txid);
- return Some(item);
+ Some(item)
}
}
let _ = cg.insert_checkpoint(cp_a).expect("should insert cp");
let _ = cg.insert_checkpoint(cp_b).expect("should insert cp");
let _ = cg
- .insert_tx(tx_a.clone(), TxHeight::Confirmed(0))
+ .insert_tx(tx_a, TxHeight::Confirmed(0))
.expect("should insert tx");
let _ = cg
.insert_tx(tx_b.clone(), TxHeight::Confirmed(1))
.into(),
},
graph: tx_graph::Additions {
- tx: [tx_b2.clone()].into(),
+ tx: [tx_b2].into(),
txout: [].into(),
},
};
let mut cg = ChainGraph::default();
let _ = cg.insert_checkpoint(block1).unwrap();
let _ = cg.insert_checkpoint(block2a).unwrap();
- let _ = cg.insert_tx(tx1.clone(), TxHeight::Confirmed(1)).unwrap();
+ let _ = cg.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap();
let _ = cg.insert_tx(tx2a.clone(), TxHeight::Confirmed(2)).unwrap();
cg
};
);
let err = cg
- .insert_tx_preview(tx_conflict.clone(), TxHeight::Unconfirmed)
+ .insert_tx_preview(tx_conflict, TxHeight::Unconfirmed)
.expect_err("must fail due to conflicts");
assert!(matches!(err, InsertTxError::UnresolvableConflict(_)));
}
let mut tracker = KeychainTracker::<Keychain, TxHeight>::default();
let one = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/0/*)#rg247h69").unwrap();
let two = Descriptor::from_str("tr([73c5da0a/86'/0'/0']xpub6BgBgsespWvERF3LHQu6CnqdvfEvtMcQjYrcRzx53QJjSxarj2afYWcLteoGVky7D3UKDP9QyrLprQ3VCECoY49yfdDEHGCtMMj92pReUsQ/1/*)#ju05rz2a").unwrap();
- tracker.add_keychain(Keychain::One, one.clone());
- tracker.add_keychain(Keychain::Two, two.clone());
+ tracker.add_keychain(Keychain::One, one);
+ tracker.add_keychain(Keychain::Two, two);
let tx1 = Transaction {
version: 0x01,
})
.unwrap();
- let should_trust = |keychain: &Keychain| match keychain {
- &Keychain::One => false,
- &Keychain::Two => true,
+ let should_trust = |keychain: &Keychain| match *keychain {
+ Keychain::One => false,
+ Keychain::Two => true,
};
assert_eq!(tracker.balance(should_trust), Balance::default());
}
);
- let _ = tracker
- .insert_tx(tx1.clone(), TxHeight::Confirmed(1))
- .unwrap();
+ let _ = tracker.insert_tx(tx1, TxHeight::Confirmed(1)).unwrap();
assert_eq!(
tracker.balance(should_trust),
}
);
- let _ = tracker
- .insert_tx(tx2.clone(), TxHeight::Confirmed(2))
- .unwrap();
+ let _ = tracker.insert_tx(tx2, TxHeight::Confirmed(2)).unwrap();
assert_eq!(
tracker.balance(should_trust),
chain1.determine_changeset(&chain2),
Err(UpdateError::TxInconsistent {
txid: h!("tx0"),
- original_pos: TxHeight::Confirmed(0).into(),
- update_pos: TxHeight::Confirmed(1).into(),
+ original_pos: TxHeight::Confirmed(0),
+ update_pos: TxHeight::Confirmed(1),
})
);
}
output: vec![
TxOut {
value: 20_000,
- script_pubkey: spk2.clone(),
+ script_pubkey: spk2,
},
TxOut {
- script_pubkey: spk1.clone(),
+ script_pubkey: spk1,
value: 30_000,
},
],
let mut spk_index = SpkTxOutIndex::default();
spk_index.insert_spk(1, spk1.clone());
- spk_index.insert_spk(2, spk2.clone());
+ spk_index.insert_spk(2, spk2);
- assert_eq!(spk_index.is_used(&1), false);
+ assert!(!spk_index.is_used(&1));
spk_index.mark_used(&1);
- assert_eq!(spk_index.is_used(&1), true);
+ assert!(spk_index.is_used(&1));
spk_index.unmark_used(&1);
- assert_eq!(spk_index.is_used(&1), false);
+ assert!(!spk_index.is_used(&1));
spk_index.mark_used(&1);
- assert_eq!(spk_index.is_used(&1), true);
+ assert!(spk_index.is_used(&1));
let tx1 = Transaction {
version: 0x02,
input: vec![],
output: vec![TxOut {
value: 42_000,
- script_pubkey: spk1.clone(),
+ script_pubkey: spk1,
}],
};
spk_index.scan(&tx1);
spk_index.unmark_used(&1);
- assert_eq!(
+ assert!(
spk_index.is_used(&1),
- true,
"even though we unmark_used it doesn't matter because there was a tx scanned that used it"
);
}
#[test]
fn unmark_used_does_not_result_in_invalid_representation() {
let mut spk_index = SpkTxOutIndex::default();
- assert_eq!(spk_index.unmark_used(&0), false);
- assert_eq!(spk_index.unmark_used(&1), false);
- assert_eq!(spk_index.unmark_used(&2), false);
+ assert!(!spk_index.unmark_used(&0));
+ assert!(!spk_index.unmark_used(&1));
+ assert!(!spk_index.unmark_used(&2));
assert!(spk_index.unused_spks(..).collect::<Vec<_>>().is_empty());
}
let _ = graph1.insert_tx(tx2.clone());
let _ = graph2.insert_tx(tx2.clone());
- let _ = graph2.insert_tx(tx1.clone());
+ let _ = graph2.insert_tx(tx1);
assert_eq!(
- &*graph1.outspends(op),
+ graph1.outspends(op),
&iter::once(tx2.txid()).collect::<HashSet<_>>()
);
assert_eq!(graph2.outspends(op), graph1.outspends(op));
vout: 0,
},
TxOut {
- value: 1337_000,
+ value: 1_337_000,
script_pubkey: Script::default(),
},
);
vout: 0,
},
TxOut {
- value: 1337_000,
+ value: 1_337_000,
script_pubkey: Script::default(),
},
);
impl ElectrumExt for Client {
fn get_tip(&self) -> Result<(u32, BlockHash), Error> {
// TODO: unsubscribe when added to the client, or is there a better call to use here?
- Ok(self
- .block_headers_subscribe()
- .map(|data| (data.height as u32, data.header.block_hash()))?)
+ self.block_headers_subscribe()
+ .map(|data| (data.height as u32, data.header.block_hash()))
}
fn scan<K: Ord + Clone>(
batch_size,
) {
Err(InternalError::Reorg) => continue,
- Err(InternalError::ElectrumError(e)) => return Err(e.into()),
+ Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(mut spks) => scanned_spks.append(&mut spks),
};
}
batch_size,
) {
Err(InternalError::Reorg) => continue,
- Err(InternalError::ElectrumError(e)) => return Err(e.into()),
+ Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(spks) => scanned_spks.extend(
spks.into_iter()
.map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)),
match populate_with_txids(self, &mut update, &mut txids.iter().cloned()) {
Err(InternalError::Reorg) => continue,
- Err(InternalError::ElectrumError(e)) => return Err(e.into()),
+ Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(_) => {}
}
match populate_with_outpoints(self, &mut update, &mut outpoints.iter().cloned()) {
Err(InternalError::Reorg) => continue,
- Err(InternalError::ElectrumError(e)) => return Err(e.into()),
+ Err(InternalError::ElectrumError(e)) => return Err(e),
Ok(_txs) => { /* [TODO] cache full txs to reduce bandwidth */ }
}
.into_iter()
.zip(
client
- .batch_block_header(heights.clone())?
+ .batch_block_header(heights)?
.into_iter()
.map(|bh| bh.time as u64),
)
Ok(ElectrumUpdate {
chain_update: new_update,
- last_active_indices: self.last_active_indices.clone(),
+ last_active_indices: self.last_active_indices,
})
}
}
impl From<electrum_client::Error> for InternalError {
fn from(value: electrum_client::Error) -> Self {
- Self::ElectrumError(value.into())
+ Self::ElectrumError(value)
}
}
fn get_tip(client: &Client) -> Result<(u32, BlockHash), Error> {
// TODO: unsubscribe when added to the client, or is there a better call to use here?
- Ok(client
+ client
.block_headers_subscribe()
- .map(|data| (data.height as u32, data.header.block_hash()))?)
+ .map(|data| (data.height as u32, data.header.block_hash()))
}
/// Prepare an update sparsechain "template" based on the checkpoints of the `local_chain`.
/// parallel.
///
/// [`ChainPosition`]: bdk_chain::sparse_chain::ChainPosition
+ #[allow(clippy::result_large_err)] // FIXME
fn scan<K: Ord + Clone>(
&self,
local_chain: &BTreeMap<u32, BlockHash>,
/// Convenience method to call [`scan`] without requiring a keychain.
///
/// [`scan`]: EsploraExt::scan
+ #[allow(clippy::result_large_err)] // FIXME
fn scan_without_keychain(
&self,
local_chain: &BTreeMap<u32, BlockHash>,
let mut spks = spks.into_iter();
let mut last_active_index = None;
let mut empty_scripts = 0;
+ type IndexWithTxs = (u32, Vec<esplora_client::Tx>);
loop {
let handles = (0..parallel_requests)
.filter_map(
- |_| -> Option<
- std::thread::JoinHandle<Result<(u32, Vec<esplora_client::Tx>), _>>,
- > {
+ |_| -> Option<std::thread::JoinHandle<Result<IndexWithTxs, _>>> {
let (index, script) = spks.next()?;
let client = self.clone();
Some(std::thread::spawn(move || {
};
pub use file_store::*;
-impl<'de, K, P> PersistBackend<K, P> for KeychainStore<K, P>
+impl<K, P> PersistBackend<K, P> for KeychainStore<K, P>
where
K: Ord + Clone + core::fmt::Debug,
P: ChainPosition,
TxHeight,
};
use bdk_file_store::{FileError, IterError, KeychainStore, MAGIC_BYTES, MAGIC_BYTES_LEN};
-use serde;
use std::{
io::{Read, Write},
vec::Vec,
}
fn main() -> anyhow::Result<()> {
- let (args, keymap, mut tracker, mut db) = cli::init::<ElectrumCommands, _>()?;
+ let (args, keymap, tracker, db) = cli::init::<ElectrumCommands, _>()?;
let electrum_url = match args.network {
Network::Bitcoin => "ssl://electrum.blockstream.info:50002",
Network::Signet => "tcp://signet-electrumx.wakiyamap.dev:50001",
};
let config = electrum_client::Config::builder()
- .validate_domain(match args.network {
- Network::Bitcoin => true,
- _ => false,
- })
+ .validate_domain(matches!(args.network, Network::Bitcoin))
.build();
let client = electrum_client::Client::from_config(electrum_url, config)?;
let _txid = client.transaction_broadcast(transaction)?;
Ok(())
},
- &mut tracker,
- &mut db,
+ &tracker,
+ &db,
args.network,
&keymap,
)
.txout_index
.all_spks()
.iter()
- .map(|(k, v)| (k.clone(), v.clone()))
+ .map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>();
spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| {
eprintln!("scanning {:?}", index);
let unused_spks = tracker
.txout_index
.unused_spks(..)
- .map(|(k, v)| (k.clone(), v.clone()))
+ .map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>();
spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| {
eprintln!(
.txout_index
.all_spks()
.iter()
- .map(|(k, v)| (k.clone(), v.clone()))
+ .map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>();
spks = Box::new(spks.chain(all_spks.into_iter().map(|(index, script)| {
eprintln!("scanning {:?}", index);
let unused_spks = tracker
.txout_index
.unused_spks(..)
- .map(|(k, v)| (k.clone(), v.clone()))
+ .map(|(k, v)| (*k, v.clone()))
.collect::<Vec<_>>();
spks = Box::new(spks.chain(unused_spks.into_iter().map(|(index, script)| {
eprintln!(
false => Keychain::External,
};
for (index, spk) in txout_index.revealed_spks_of_keychain(&target_keychain) {
- let address = Address::from_script(&spk, network)
+ let address = Address::from_script(spk, network)
.expect("should always be able to derive address");
println!(
"{:?} {} used:{}",
unconfirmed,
} => {
let tracker = tracker.lock().unwrap();
+ #[allow(clippy::type_complexity)] // FIXME
let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> = match (unspent, spent)
{
(true, false) => Box::new(tracker.full_utxos()),
_ => Box::new(tracker.full_txouts()),
};
+ #[allow(clippy::type_complexity)] // FIXME
let txouts: Box<dyn Iterator<Item = (&(K, u32), FullTxOut<P>)>> =
match (confirmed, unconfirmed) {
(true, false) => Box::new(
}
}
+#[allow(clippy::type_complexity)] // FIXME
pub fn create_tx<P: ChainPosition>(
value: u64,
address: Address,
assert!(
requirements.signatures.sign_with_keymap(
i,
- &keymap,
+ keymap,
&sighash_prevouts,
None,
None,
{
match command {
// TODO: Make these functions return stuffs
- Commands::Address { addr_cmd } => run_address_cmd(&tracker, &store, addr_cmd, network),
+ Commands::Address { addr_cmd } => run_address_cmd(tracker, store, addr_cmd, network),
Commands::Balance => {
- run_balance_cmd(&tracker);
+ run_balance_cmd(tracker);
Ok(())
}
Commands::TxOut { txout_cmd } => {
- run_txo_cmd(txout_cmd, &tracker, network);
+ run_txo_cmd(txout_cmd, tracker, network);
Ok(())
}
Commands::Send {
// take mutable ref to construct tx -- it is only open for a short time while building it.
let tracker = &mut *tracker.lock().unwrap();
let (transaction, change_info) =
- create_tx(value, address, coin_select, tracker, &keymap)?;
+ create_tx(value, address, coin_select, tracker, keymap)?;
if let Some((change_derivation_changes, (change_keychain, index))) = change_info {
// We must first persist to disk the fact that we've got a new address from the
// We failed to broadcast so allow our change address to be used in the future
tracker.txout_index.unmark_used(&keychain, index);
}
- Err(e.into())
+ Err(e)
}
}
}
}
}
+#[allow(clippy::type_complexity)] // FIXME
pub fn init<C: clap::Subcommand, P>() -> anyhow::Result<(
Args<C>,
KeyMap,
impl BranchStrategy {
pub fn will_continue(&self) -> bool {
- match self {
- Self::Continue | Self::SkipInclusion => true,
- _ => false,
- }
+ matches!(self, Self::Continue | Self::SkipInclusion)
}
}
/// Attempt to backtrack to the previously selected node's omission branch, return false
/// otherwise (no more solutions).
pub fn backtrack(&mut self) -> bool {
- (0..self.pool_pos)
- .rev()
- .find(|&pos| {
- let (index, candidate) = self.pool[pos];
-
- if self.selection.is_selected(index) {
- // deselect last `pos`, so next round will check omission branch
- self.pool_pos = pos;
- self.selection.deselect(index);
- return true;
- } else {
- self.rem_abs += candidate.value;
- self.rem_eff += candidate.effective_value(self.selection.opts.target_feerate);
- return false;
- }
- })
- .is_some()
+ (0..self.pool_pos).rev().any(|pos| {
+ let (index, candidate) = self.pool[pos];
+
+ if self.selection.is_selected(index) {
+ // deselect last `pos`, so next round will check omission branch
+ self.pool_pos = pos;
+ self.selection.deselect(index);
+ true
+ } else {
+ self.rem_abs += candidate.value;
+ self.rem_eff += candidate.effective_value(self.selection.opts.target_feerate);
+ false
+ }
+ })
}
/// Continue down this branch, skip inclusion branch if specified.
self.best_score = score;
return true;
}
- return false;
+ false
}
}
}
// check out inclusion branch first
- return (BranchStrategy::Continue, None);
+ (BranchStrategy::Continue, None)
};
// determine sum of absolute and effective values for current selection
})
})?;
- (selected - target_value) as u64
+ selected - target_value
};
let fee_without_drain = fee_without_drain.max(self.opts.min_absolute_fee);
impl core::fmt::Display for SelectionError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
- match self {
- SelectionError {
- selected,
- missing,
- constraint,
- } => write!(
- f,
- "insufficient coins selected; selected={}, missing={}, unsatisfied_constraint={:?}",
- selected, missing, constraint
- ),
- }
+ let SelectionError {
+ selected,
+ missing,
+ constraint,
+ } = self;
+ write!(
+ f,
+ "insufficient coins selected; selected={}, missing={}, unsatisfied_constraint={:?}",
+ selected, missing, constraint
+ )
}
}
#![allow(unused)]
#![allow(missing_docs)]
+#![allow(clippy::all)] // FIXME
//! A spending plan or *plan* for short is a representation of a particular spending path on a
//! descriptor. This allows us to analayze a choice of spending path without producing any
//! signatures or other witness data for it.