Make bdk and bdk_chain work under 1.57.0
- rewrite some parts of the code to deal with older borrow checker - downgraded hashbrown
This commit is contained in:
committed by
Daniela Brozzoni
parent
3a5d727899
commit
38ef170ed1
@@ -2,6 +2,7 @@
|
||||
name = "bdk_chain"
|
||||
version = "0.3.1"
|
||||
edition = "2021"
|
||||
rust-version = "1.57"
|
||||
homepage = "https://bitcoindevkit.org"
|
||||
repository = "https://github.com/bitcoindevkit/bdk"
|
||||
documentation = "https://docs.rs/bdk_chain"
|
||||
@@ -14,8 +15,10 @@ readme = "../README.md"
|
||||
[dependencies]
|
||||
bitcoin = { version = "0.29" }
|
||||
serde_crate = { package = "serde", version = "1", optional = true, features = ["derive"] }
|
||||
|
||||
# Use hashbrown as a feature flag to have HashSet and HashMap from it.
|
||||
hashbrown = { version = "0.13.2", optional = true }
|
||||
# note version 0.13 breaks outs MSRV.
|
||||
hashbrown = { version = "0.12", optional = true, features = ["serde"] }
|
||||
miniscript = { version = "9.0.0", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
@@ -24,4 +27,4 @@ rand = "0.8"
|
||||
[features]
|
||||
default = ["std", "miniscript"]
|
||||
std = []
|
||||
serde = ["serde_crate", "bitcoin/serde"]
|
||||
serde = ["serde_crate", "bitcoin/serde" ]
|
||||
|
||||
@@ -294,7 +294,7 @@ where
|
||||
&'a self,
|
||||
tx: &'a Transaction,
|
||||
) -> impl Iterator<Item = (&'a P, Txid)> + 'a {
|
||||
self.graph.walk_conflicts(tx, |_, conflict_txid| {
|
||||
self.graph.walk_conflicts(tx, move |_, conflict_txid| {
|
||||
self.chain
|
||||
.tx_position(conflict_txid)
|
||||
.map(|conflict_pos| (conflict_pos, conflict_txid))
|
||||
@@ -309,39 +309,42 @@ where
|
||||
&self,
|
||||
changeset: &mut ChangeSet<P, T>,
|
||||
) -> Result<(), UnresolvableConflict<P>> {
|
||||
let chain_conflicts = changeset
|
||||
.chain
|
||||
.txids
|
||||
.iter()
|
||||
// we want to find new txid additions by the changeset (all txid entries in the
|
||||
// changeset with Some(position_change))
|
||||
.filter_map(|(&txid, pos_change)| pos_change.as_ref().map(|pos| (txid, pos)))
|
||||
// we don't care about txids that move, only newly added txids
|
||||
.filter(|&(txid, _)| self.chain.tx_position(txid).is_none())
|
||||
// full tx should exist (either in graph, or additions)
|
||||
.filter_map(|(txid, pos)| {
|
||||
let full_tx = self
|
||||
let mut chain_conflicts = vec![];
|
||||
|
||||
for (&txid, pos_change) in &changeset.chain.txids {
|
||||
let pos = match pos_change {
|
||||
Some(pos) => {
|
||||
// Ignore txs that are still in the chain -- we only care about new ones
|
||||
if self.chain.tx_position(txid).is_some() {
|
||||
continue;
|
||||
}
|
||||
pos
|
||||
}
|
||||
// Ignore txids that are being delted by the change (they can't conflict)
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let mut full_tx = self.graph.get_tx(txid);
|
||||
|
||||
if full_tx.is_none() {
|
||||
full_tx = changeset
|
||||
.graph
|
||||
.get_tx(txid)
|
||||
.or_else(|| {
|
||||
changeset
|
||||
.graph
|
||||
.tx
|
||||
.iter()
|
||||
.find(|tx| tx.as_tx().txid() == txid)
|
||||
})
|
||||
.map(|tx| (txid, tx, pos));
|
||||
debug_assert!(full_tx.is_some(), "should have full tx at this point");
|
||||
full_tx
|
||||
})
|
||||
.flat_map(|(new_txid, new_tx, new_pos)| {
|
||||
self.tx_conflicts_in_chain(new_tx.as_tx()).map(
|
||||
move |(conflict_pos, conflict_txid)| {
|
||||
(new_pos.clone(), new_txid, conflict_pos, conflict_txid)
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
.tx
|
||||
.iter()
|
||||
.find(|tx| tx.as_tx().txid() == txid)
|
||||
}
|
||||
|
||||
debug_assert!(full_tx.is_some(), "should have full tx at this point");
|
||||
|
||||
let full_tx = match full_tx {
|
||||
Some(full_tx) => full_tx,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
for (conflict_pos, conflict_txid) in self.tx_conflicts_in_chain(full_tx.as_tx()) {
|
||||
chain_conflicts.push((pos.clone(), txid, conflict_pos, conflict_txid))
|
||||
}
|
||||
}
|
||||
|
||||
for (update_pos, update_txid, conflicting_pos, conflicting_txid) in chain_conflicts {
|
||||
// We have found a tx that conflicts with our update txid. Only allow this when the
|
||||
@@ -411,7 +414,7 @@ where
|
||||
pub fn transactions_in_chain(&self) -> impl DoubleEndedIterator<Item = (&P, &T)> {
|
||||
self.chain
|
||||
.txids()
|
||||
.map(|(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist")))
|
||||
.map(move |(pos, txid)| (pos, self.graph.get_tx(*txid).expect("must exist")))
|
||||
}
|
||||
|
||||
/// Finds the transaction in the chain that spends `outpoint` given the input/output
|
||||
|
||||
@@ -23,7 +23,7 @@ pub struct Persist<K, P, B> {
|
||||
stage: keychain::KeychainChangeSet<K, P>,
|
||||
}
|
||||
|
||||
impl<K, P, B: PersistBackend<K, P>> Persist<K, P, B> {
|
||||
impl<K, P, B> Persist<K, P, B> {
|
||||
/// Create a new `Persist` from a [`PersistBackend`].
|
||||
pub fn new(backend: B) -> Self {
|
||||
Self {
|
||||
@@ -51,7 +51,10 @@ impl<K, P, B: PersistBackend<K, P>> Persist<K, P, B> {
|
||||
/// Commit the staged changes to the underlying persistence backend.
|
||||
///
|
||||
/// Retuns a backend defined error if this fails
|
||||
pub fn commit(&mut self) -> Result<(), B::WriteError> {
|
||||
pub fn commit(&mut self) -> Result<(), B::WriteError>
|
||||
where
|
||||
B: PersistBackend<K, P>,
|
||||
{
|
||||
self.backend.append_changeset(&self.stage)?;
|
||||
self.stage = Default::default();
|
||||
Ok(())
|
||||
|
||||
@@ -125,7 +125,7 @@ where
|
||||
pub fn full_txouts(&self) -> impl Iterator<Item = (&(K, u32), FullTxOut<P>)> + '_ {
|
||||
self.txout_index
|
||||
.txouts()
|
||||
.filter_map(|(spk_i, op, _)| Some((spk_i, self.chain_graph.full_txout(op)?)))
|
||||
.filter_map(move |(spk_i, op, _)| Some((spk_i, self.chain_graph.full_txout(op)?)))
|
||||
}
|
||||
|
||||
/// Iterates through [`FullTxOut`]s that are unspent outputs.
|
||||
|
||||
@@ -423,7 +423,7 @@ impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
|
||||
Cow::Owned(descriptor.clone()),
|
||||
next_reveal_index..index + 1,
|
||||
),
|
||||
DerivationAdditions([(keychain.clone(), index)].into()),
|
||||
DerivationAdditions(core::iter::once((keychain.clone(), index)).collect()),
|
||||
)
|
||||
}
|
||||
None => (
|
||||
@@ -575,11 +575,17 @@ where
|
||||
.take_while(move |&index| has_wildcard || index == 0)
|
||||
// we can only iterate over non-hardened indices
|
||||
.take_while(|&index| index <= BIP32_MAX_INDEX)
|
||||
// take until failure
|
||||
.map_while(move |index| {
|
||||
descriptor
|
||||
.derived_descriptor(&secp, index)
|
||||
.map(|desc| (index, desc.script_pubkey()))
|
||||
.ok()
|
||||
})
|
||||
.map(
|
||||
move |index| -> Result<_, miniscript::descriptor::ConversionError> {
|
||||
Ok((
|
||||
index,
|
||||
descriptor
|
||||
.at_derivation_index(index)
|
||||
.derived_descriptor(&secp)?
|
||||
.script_pubkey(),
|
||||
))
|
||||
},
|
||||
)
|
||||
.take_while(Result::is_ok)
|
||||
.map(Result::unwrap)
|
||||
}
|
||||
|
||||
@@ -949,7 +949,7 @@ impl<P: ChainPosition> SparseChain<P> {
|
||||
changeset
|
||||
.txids
|
||||
.iter()
|
||||
.filter(|(&txid, pos)| {
|
||||
.filter(move |(&txid, pos)| {
|
||||
pos.is_some() /*it was not a deletion*/ &&
|
||||
self.tx_position(txid).is_none() /* we don't have the txid already */
|
||||
})
|
||||
|
||||
@@ -79,13 +79,12 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
|
||||
/// See [`ForEachTxout`] for the types that support this.
|
||||
///
|
||||
/// [`ForEachTxout`]: crate::ForEachTxOut
|
||||
pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> BTreeSet<&I> {
|
||||
// let scanner = &mut SpkTxOutScanner::new(self);
|
||||
pub fn scan(&mut self, txouts: &impl ForEachTxOut) -> BTreeSet<I> {
|
||||
let mut scanned_indices = BTreeSet::new();
|
||||
|
||||
txouts.for_each_txout(|(op, txout)| {
|
||||
if let Some(spk_i) = scan_txout!(self, op, txout) {
|
||||
scanned_indices.insert(spk_i);
|
||||
scanned_indices.insert(spk_i.clone());
|
||||
}
|
||||
});
|
||||
|
||||
@@ -207,7 +206,7 @@ impl<I: Clone + Ord> SpkTxOutIndex<I> {
|
||||
{
|
||||
self.unused
|
||||
.range(range)
|
||||
.map(|index| (index, self.spk_at_index(index).expect("must exist")))
|
||||
.map(move |index| (index, self.spk_at_index(index).expect("must exist")))
|
||||
}
|
||||
|
||||
/// Returns whether the script pubkey at `index` has been used or not.
|
||||
|
||||
@@ -419,7 +419,7 @@ impl<T> TxGraph<T> {
|
||||
tx.input
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(vin, txin)| self.spends.get(&txin.previous_output).zip(Some(vin)))
|
||||
.filter_map(move |(vin, txin)| self.spends.get(&txin.previous_output).zip(Some(vin)))
|
||||
.flat_map(|(spends, vin)| core::iter::repeat(vin).zip(spends.iter().cloned()))
|
||||
.filter(move |(_, conflicting_txid)| *conflicting_txid != txid)
|
||||
}
|
||||
@@ -474,7 +474,7 @@ impl<T> Additions<T> {
|
||||
.output
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(vout, txout)| (OutPoint::new(tx.as_tx().txid(), vout as _), txout))
|
||||
.map(move |(vout, txout)| (OutPoint::new(tx.as_tx().txid(), vout as _), txout))
|
||||
})
|
||||
.chain(self.txout.iter().map(|(op, txout)| (*op, txout)))
|
||||
}
|
||||
|
||||
@@ -253,7 +253,7 @@ fn test_wildcard_derivations() {
|
||||
|
||||
(0..=15)
|
||||
.into_iter()
|
||||
.chain([17, 20, 23].into_iter())
|
||||
.chain(vec![17, 20, 23].into_iter())
|
||||
.for_each(|index| assert!(txout_index.mark_used(&TestKeychain::External, index)));
|
||||
|
||||
assert_eq!(txout_index.next_index(&TestKeychain::External), (26, true));
|
||||
|
||||
Reference in New Issue
Block a user