use crate::{
alloc::boxed::Box,
collections::*,
miniscript::{Descriptor, DescriptorPublicKey},
spk_client::{FullScanRequestBuilder, SyncRequestBuilder},
spk_iter::BIP32_MAX_INDEX,
spk_txout::SpkTxOutIndex,
DescriptorExt, DescriptorId, Indexed, Indexer, KeychainIndexed, SpkIterator,
};
use alloc::{borrow::ToOwned, vec::Vec};
use bitcoin::{
key::Secp256k1, Amount, OutPoint, ScriptBuf, SignedAmount, Transaction, TxOut, Txid,
};
use core::{
fmt::Debug,
ops::{Bound, RangeBounds},
};
use crate::spk_txout::{CreatedTxOut, SpentTxOut};
use crate::Merge;
pub const DEFAULT_LOOKAHEAD: u32 = 25;
#[derive(Clone, Debug)]
pub struct KeychainTxOutIndex<K> {
inner: SpkTxOutIndex<(K, u32)>,
keychain_to_descriptor_id: BTreeMap<K, DescriptorId>,
descriptor_id_to_keychain: HashMap<DescriptorId, K>,
descriptors: HashMap<DescriptorId, Descriptor<DescriptorPublicKey>>,
last_revealed: HashMap<DescriptorId, u32>,
lookahead: u32,
persist_spks: bool,
spk_cache: BTreeMap<DescriptorId, HashMap<u32, ScriptBuf>>,
spk_cache_stage: BTreeMap<DescriptorId, Vec<(u32, ScriptBuf)>>,
}
impl<K> Default for KeychainTxOutIndex<K> {
fn default() -> Self {
Self::new(DEFAULT_LOOKAHEAD, false)
}
}
impl<K> AsRef<SpkTxOutIndex<(K, u32)>> for KeychainTxOutIndex<K> {
fn as_ref(&self) -> &SpkTxOutIndex<(K, u32)> {
&self.inner
}
}
impl<K: Clone + Ord + Debug> Indexer for KeychainTxOutIndex<K> {
type ChangeSet = ChangeSet;
fn index_txout(&mut self, outpoint: OutPoint, txout: &TxOut) -> Self::ChangeSet {
let mut changeset = ChangeSet::default();
self._index_txout(&mut changeset, outpoint, txout);
self._empty_stage_into_changeset(&mut changeset);
changeset
}
fn index_tx(&mut self, tx: &bitcoin::Transaction) -> Self::ChangeSet {
let mut changeset = ChangeSet::default();
let txid = tx.compute_txid();
for (vout, txout) in tx.output.iter().enumerate() {
self._index_txout(&mut changeset, OutPoint::new(txid, vout as u32), txout);
}
self._empty_stage_into_changeset(&mut changeset);
changeset
}
fn initial_changeset(&self) -> Self::ChangeSet {
ChangeSet {
last_revealed: self.last_revealed.clone().into_iter().collect(),
spk_cache: self
.spk_cache
.iter()
.map(|(desc, spks)| {
(
*desc,
spks.iter().map(|(i, spk)| (*i, spk.clone())).collect(),
)
})
.collect(),
}
}
fn apply_changeset(&mut self, changeset: Self::ChangeSet) {
self.apply_changeset(changeset)
}
fn is_tx_relevant(&self, tx: &bitcoin::Transaction) -> bool {
self.inner.is_relevant(tx)
}
}
impl<K> KeychainTxOutIndex<K> {
pub fn new(lookahead: u32, persist_spks: bool) -> Self {
Self {
inner: SpkTxOutIndex::default(),
keychain_to_descriptor_id: Default::default(),
descriptors: Default::default(),
descriptor_id_to_keychain: Default::default(),
last_revealed: Default::default(),
lookahead,
persist_spks,
spk_cache: Default::default(),
spk_cache_stage: Default::default(),
}
}
pub fn inner(&self) -> &SpkTxOutIndex<(K, u32)> {
&self.inner
}
}
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
pub fn from_changeset(lookahead: u32, use_spk_cache: bool, changeset: ChangeSet) -> Self {
let mut out = Self::new(lookahead, use_spk_cache);
out.apply_changeset(changeset);
out
}
fn _index_txout(&mut self, changeset: &mut ChangeSet, outpoint: OutPoint, txout: &TxOut) {
if let Some((keychain, index)) = self.inner.scan_txout(outpoint, txout).cloned() {
let did = self
.keychain_to_descriptor_id
.get(&keychain)
.expect("invariant");
let index_updated = match self.last_revealed.entry(*did) {
hash_map::Entry::Occupied(mut e) if e.get() < &index => {
e.insert(index);
true
}
hash_map::Entry::Vacant(e) => {
e.insert(index);
true
}
_ => false,
};
if index_updated {
changeset.last_revealed.insert(*did, index);
self.replenish_inner_index(*did, &keychain, self.lookahead);
}
}
}
fn _empty_stage_into_changeset(&mut self, changeset: &mut ChangeSet) {
if !self.persist_spks {
return;
}
for (did, spks) in core::mem::take(&mut self.spk_cache_stage) {
debug_assert!(
{
let desc = self.descriptors.get(&did).expect("invariant");
spks.iter().all(|(i, spk)| {
let exp_spk = desc
.at_derivation_index(*i)
.expect("must derive")
.script_pubkey();
&exp_spk == spk
})
},
"all staged spks must be correct"
);
changeset.spk_cache.entry(did).or_default().extend(spks);
}
}
pub fn outpoints(&self) -> &BTreeSet<KeychainIndexed<K, OutPoint>> {
self.inner.outpoints()
}
pub fn txouts(
&self,
) -> impl DoubleEndedIterator<Item = KeychainIndexed<K, (OutPoint, &TxOut)>> + ExactSizeIterator
{
self.inner
.txouts()
.map(|(index, op, txout)| (index.clone(), (op, txout)))
}
pub fn txouts_in_tx(
&self,
txid: Txid,
) -> impl DoubleEndedIterator<Item = KeychainIndexed<K, (OutPoint, &TxOut)>> {
self.inner
.txouts_in_tx(txid)
.map(|(index, op, txout)| (index.clone(), (op, txout)))
}
pub fn txout(&self, outpoint: OutPoint) -> Option<KeychainIndexed<K, &TxOut>> {
self.inner
.txout(outpoint)
.map(|(index, txout)| (index.clone(), txout))
}
pub fn spk_at_index(&self, keychain: K, index: u32) -> Option<ScriptBuf> {
self.inner.spk_at_index(&(keychain.clone(), index))
}
pub fn index_of_spk(&self, script: ScriptBuf) -> Option<&(K, u32)> {
self.inner.index_of_spk(script)
}
pub fn is_used(&self, keychain: K, index: u32) -> bool {
self.inner.is_used(&(keychain, index))
}
pub fn mark_used(&mut self, keychain: K, index: u32) -> bool {
self.inner.mark_used(&(keychain, index))
}
pub fn unmark_used(&mut self, keychain: K, index: u32) -> bool {
self.inner.unmark_used(&(keychain, index))
}
pub fn sent_and_received(
&self,
tx: &Transaction,
range: impl RangeBounds<K>,
) -> (Amount, Amount) {
self.inner
.sent_and_received(tx, self.map_to_inner_bounds(range))
}
pub fn spent_txouts<'a>(
&'a self,
tx: &'a Transaction,
) -> impl Iterator<Item = SpentTxOut<(K, u32)>> + 'a {
self.inner.spent_txouts(tx)
}
pub fn created_txouts<'a>(
&'a self,
tx: &'a Transaction,
) -> impl Iterator<Item = CreatedTxOut<(K, u32)>> + 'a {
self.inner.created_txouts(tx)
}
pub fn net_value(&self, tx: &Transaction, range: impl RangeBounds<K>) -> SignedAmount {
self.inner.net_value(tx, self.map_to_inner_bounds(range))
}
}
impl<K: Clone + Ord + Debug> KeychainTxOutIndex<K> {
pub fn keychains(
&self,
) -> impl DoubleEndedIterator<Item = (K, &Descriptor<DescriptorPublicKey>)> + ExactSizeIterator + '_
{
self.keychain_to_descriptor_id
.iter()
.map(|(k, did)| (k.clone(), self.descriptors.get(did).expect("invariant")))
}
pub fn insert_descriptor(
&mut self,
keychain: K,
descriptor: Descriptor<DescriptorPublicKey>,
) -> Result<bool, InsertDescriptorError<K>> {
let did = descriptor.descriptor_id();
if !self.keychain_to_descriptor_id.contains_key(&keychain)
&& !self.descriptor_id_to_keychain.contains_key(&did)
{
self.descriptors.insert(did, descriptor.clone());
self.keychain_to_descriptor_id.insert(keychain.clone(), did);
self.descriptor_id_to_keychain.insert(did, keychain.clone());
self.replenish_inner_index(did, &keychain, self.lookahead);
return Ok(true);
}
if let Some(existing_desc_id) = self.keychain_to_descriptor_id.get(&keychain) {
let descriptor = self.descriptors.get(existing_desc_id).expect("invariant");
if *existing_desc_id != did {
return Err(InsertDescriptorError::KeychainAlreadyAssigned {
existing_assignment: Box::new(descriptor.clone()),
keychain,
});
}
}
if let Some(existing_keychain) = self.descriptor_id_to_keychain.get(&did) {
let descriptor = self.descriptors.get(&did).expect("invariant").clone();
if *existing_keychain != keychain {
return Err(InsertDescriptorError::DescriptorAlreadyAssigned {
existing_assignment: existing_keychain.clone(),
descriptor: Box::new(descriptor),
});
}
}
Ok(false)
}
pub fn get_descriptor(&self, keychain: K) -> Option<&Descriptor<DescriptorPublicKey>> {
let did = self.keychain_to_descriptor_id.get(&keychain)?;
self.descriptors.get(did)
}
pub fn lookahead(&self) -> u32 {
self.lookahead
}
pub fn lookahead_to_target(&mut self, keychain: K, target_index: u32) -> ChangeSet {
let mut changeset = ChangeSet::default();
if let Some((next_index, _)) = self.next_index(keychain.clone()) {
let temp_lookahead = (target_index + 1)
.checked_sub(next_index)
.filter(|&index| index > 0);
if let Some(temp_lookahead) = temp_lookahead {
self.replenish_inner_index_keychain(keychain, temp_lookahead);
}
}
self._empty_stage_into_changeset(&mut changeset);
changeset
}
fn replenish_inner_index_did(&mut self, did: DescriptorId, lookahead: u32) {
if let Some(keychain) = self.descriptor_id_to_keychain.get(&did).cloned() {
self.replenish_inner_index(did, &keychain, lookahead);
}
}
fn replenish_inner_index_keychain(&mut self, keychain: K, lookahead: u32) {
if let Some(did) = self.keychain_to_descriptor_id.get(&keychain) {
self.replenish_inner_index(*did, &keychain, lookahead);
}
}
fn replenish_inner_index(&mut self, did: DescriptorId, keychain: &K, lookahead: u32) {
let descriptor = self.descriptors.get(&did).expect("invariant");
let mut next_index = self
.inner
.all_spks()
.range(&(keychain.clone(), u32::MIN)..=&(keychain.clone(), u32::MAX))
.last()
.map_or(0, |((_, index), _)| *index + 1);
let stop_index = if descriptor.has_wildcard() {
let next_reveal_index = self.last_revealed.get(&did).map_or(0, |v| *v + 1);
(next_reveal_index + lookahead).min(BIP32_MAX_INDEX)
} else {
1
};
if self.persist_spks {
let derive_spk = {
let secp = Secp256k1::verification_only();
let _desc = &descriptor;
move |spk_i: u32| -> ScriptBuf {
_desc
.derived_descriptor(&secp, spk_i)
.expect("The descriptor cannot have hardened derivation")
.script_pubkey()
}
};
let cached_spk_iter = core::iter::from_fn({
let spk_cache = self.spk_cache.entry(did).or_default();
let spk_stage = self.spk_cache_stage.entry(did).or_default();
let _i = &mut next_index;
move || -> Option<Indexed<ScriptBuf>> {
if *_i >= stop_index {
return None;
}
let spk_i = *_i;
*_i = spk_i.saturating_add(1);
if let Some(spk) = spk_cache.get(&spk_i) {
debug_assert_eq!(spk, &derive_spk(spk_i), "cached spk must equal derived");
return Some((spk_i, spk.clone()));
}
let spk = derive_spk(spk_i);
spk_stage.push((spk_i, spk.clone()));
spk_cache.insert(spk_i, spk.clone());
Some((spk_i, spk))
}
});
for (new_index, new_spk) in cached_spk_iter {
let _inserted = self
.inner
.insert_spk((keychain.clone(), new_index), new_spk);
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={keychain:?}, lookahead={lookahead}, next_index={next_index}");
}
} else {
let spk_iter = SpkIterator::new_with_range(descriptor, next_index..stop_index);
for (new_index, new_spk) in spk_iter {
let _inserted = self
.inner
.insert_spk((keychain.clone(), new_index), new_spk);
debug_assert!(_inserted, "replenish lookahead: must not have existing spk: keychain={keychain:?}, lookahead={lookahead}, next_index={next_index}");
}
}
}
pub fn unbounded_spk_iter(
&self,
keychain: K,
) -> Option<SpkIterator<Descriptor<DescriptorPublicKey>>> {
let descriptor = self.get_descriptor(keychain)?.clone();
Some(SpkIterator::new(descriptor))
}
pub fn all_unbounded_spk_iters(
&self,
) -> BTreeMap<K, SpkIterator<Descriptor<DescriptorPublicKey>>> {
self.keychain_to_descriptor_id
.iter()
.map(|(k, did)| {
(
k.clone(),
SpkIterator::new(self.descriptors.get(did).expect("invariant").clone()),
)
})
.collect()
}
pub fn revealed_spks(
&self,
range: impl RangeBounds<K>,
) -> impl Iterator<Item = KeychainIndexed<K, ScriptBuf>> + '_ {
let start = range.start_bound();
let end = range.end_bound();
let mut iter_last_revealed = self
.keychain_to_descriptor_id
.range((start, end))
.map(|(k, did)| (k, self.last_revealed.get(did).cloned()));
let mut iter_spks = self
.inner
.all_spks()
.range(self.map_to_inner_bounds((start, end)));
let mut current_keychain = iter_last_revealed.next();
core::iter::from_fn(move || loop {
let ((keychain, index), spk) = iter_spks.next()?;
while current_keychain?.0 < keychain {
current_keychain = iter_last_revealed.next();
}
let (current_keychain, last_revealed) = current_keychain?;
if current_keychain == keychain && Some(*index) <= last_revealed {
break Some(((keychain.clone(), *index), spk.clone()));
}
})
}
pub fn revealed_keychain_spks(
&self,
keychain: K,
) -> impl DoubleEndedIterator<Item = Indexed<ScriptBuf>> + '_ {
let end = self
.last_revealed_index(keychain.clone())
.map(|v| v + 1)
.unwrap_or(0);
self.inner
.all_spks()
.range((keychain.clone(), 0)..(keychain.clone(), end))
.map(|((_, index), spk)| (*index, spk.clone()))
}
pub fn unused_spks(
&self,
) -> impl DoubleEndedIterator<Item = KeychainIndexed<K, ScriptBuf>> + Clone + '_ {
self.keychain_to_descriptor_id.keys().flat_map(|keychain| {
self.unused_keychain_spks(keychain.clone())
.map(|(i, spk)| ((keychain.clone(), i), spk.clone()))
})
}
pub fn unused_keychain_spks(
&self,
keychain: K,
) -> impl DoubleEndedIterator<Item = Indexed<ScriptBuf>> + Clone + '_ {
let end = match self.keychain_to_descriptor_id.get(&keychain) {
Some(did) => self.last_revealed.get(did).map(|v| *v + 1).unwrap_or(0),
None => 0,
};
self.inner
.unused_spks((keychain.clone(), 0)..(keychain.clone(), end))
.map(|((_, i), spk)| (*i, spk))
}
pub fn next_index(&self, keychain: K) -> Option<(u32, bool)> {
let did = self.keychain_to_descriptor_id.get(&keychain)?;
let last_index = self.last_revealed.get(did).cloned();
let descriptor = self.descriptors.get(did).expect("invariant");
let has_wildcard = descriptor.has_wildcard();
Some(match last_index {
None => (0, true),
Some(_) if !has_wildcard => (0, false),
Some(index) if index > BIP32_MAX_INDEX => {
unreachable!("index is out of bounds")
}
Some(index) if index == BIP32_MAX_INDEX => (index, false),
Some(index) => (index + 1, true),
})
}
pub fn last_revealed_indices(&self) -> BTreeMap<K, u32> {
self.last_revealed
.iter()
.filter_map(|(desc_id, index)| {
let keychain = self.descriptor_id_to_keychain.get(desc_id)?;
Some((keychain.clone(), *index))
})
.collect()
}
pub fn last_revealed_index(&self, keychain: K) -> Option<u32> {
let descriptor_id = self.keychain_to_descriptor_id.get(&keychain)?;
self.last_revealed.get(descriptor_id).cloned()
}
pub fn reveal_to_target_multi(&mut self, keychains: &BTreeMap<K, u32>) -> ChangeSet {
let mut changeset = ChangeSet::default();
for (keychain, &index) in keychains {
self._reveal_to_target(&mut changeset, keychain.clone(), index);
}
self._empty_stage_into_changeset(&mut changeset);
changeset
}
#[must_use]
pub fn reveal_to_target(
&mut self,
keychain: K,
target_index: u32,
) -> Option<(Vec<Indexed<ScriptBuf>>, ChangeSet)> {
let mut changeset = ChangeSet::default();
let revealed_spks = self._reveal_to_target(&mut changeset, keychain, target_index)?;
self._empty_stage_into_changeset(&mut changeset);
Some((revealed_spks, changeset))
}
fn _reveal_to_target(
&mut self,
changeset: &mut ChangeSet,
keychain: K,
target_index: u32,
) -> Option<Vec<Indexed<ScriptBuf>>> {
let mut spks: Vec<Indexed<ScriptBuf>> = vec![];
loop {
let (i, new) = self.next_index(keychain.clone())?;
if !new || i > target_index {
break;
}
match self._reveal_next_spk(changeset, keychain.clone()) {
Some(indexed_spk) => spks.push(indexed_spk),
None => break,
}
}
Some(spks)
}
pub fn reveal_next_spk(&mut self, keychain: K) -> Option<(Indexed<ScriptBuf>, ChangeSet)> {
let mut changeset = ChangeSet::default();
let indexed_spk = self._reveal_next_spk(&mut changeset, keychain)?;
self._empty_stage_into_changeset(&mut changeset);
Some((indexed_spk, changeset))
}
fn _reveal_next_spk(
&mut self,
changeset: &mut ChangeSet,
keychain: K,
) -> Option<Indexed<ScriptBuf>> {
let (next_index, new) = self.next_index(keychain.clone())?;
if new {
let did = self.keychain_to_descriptor_id.get(&keychain)?;
self.last_revealed.insert(*did, next_index);
changeset.last_revealed.insert(*did, next_index);
self.replenish_inner_index(*did, &keychain, self.lookahead);
}
let script = self
.inner
.spk_at_index(&(keychain.clone(), next_index))
.expect("we just inserted it");
Some((next_index, script))
}
pub fn next_unused_spk(&mut self, keychain: K) -> Option<(Indexed<ScriptBuf>, ChangeSet)> {
let mut changeset = ChangeSet::default();
let next_unused = self
.unused_keychain_spks(keychain.clone())
.next()
.map(|(i, spk)| (i, spk.to_owned()));
let spk = next_unused.or_else(|| self._reveal_next_spk(&mut changeset, keychain))?;
self._empty_stage_into_changeset(&mut changeset);
Some((spk, changeset))
}
pub fn keychain_outpoints(
&self,
keychain: K,
) -> impl DoubleEndedIterator<Item = Indexed<OutPoint>> + '_ {
self.keychain_outpoints_in_range(keychain.clone()..=keychain)
.map(|((_, i), op)| (i, op))
}
pub fn keychain_outpoints_in_range<'a>(
&'a self,
range: impl RangeBounds<K> + 'a,
) -> impl DoubleEndedIterator<Item = KeychainIndexed<K, OutPoint>> + 'a {
self.inner
.outputs_in_range(self.map_to_inner_bounds(range))
.map(|((k, i), op)| ((k.clone(), *i), op))
}
fn map_to_inner_bounds(&self, bound: impl RangeBounds<K>) -> impl RangeBounds<(K, u32)> {
let start = match bound.start_bound() {
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MIN)),
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MAX)),
Bound::Unbounded => Bound::Unbounded,
};
let end = match bound.end_bound() {
Bound::Included(keychain) => Bound::Included((keychain.clone(), u32::MAX)),
Bound::Excluded(keychain) => Bound::Excluded((keychain.clone(), u32::MIN)),
Bound::Unbounded => Bound::Unbounded,
};
(start, end)
}
pub fn last_used_index(&self, keychain: K) -> Option<u32> {
self.keychain_outpoints(keychain).last().map(|(i, _)| i)
}
pub fn last_used_indices(&self) -> BTreeMap<K, u32> {
self.keychain_to_descriptor_id
.iter()
.filter_map(|(keychain, _)| {
self.last_used_index(keychain.clone())
.map(|index| (keychain.clone(), index))
})
.collect()
}
pub fn apply_changeset(&mut self, changeset: ChangeSet) {
if self.persist_spks {
for (did, spks) in changeset.spk_cache {
self.spk_cache.entry(did).or_default().extend(spks);
}
}
for (did, index) in changeset.last_revealed {
let v = self.last_revealed.entry(did).or_default();
*v = index.max(*v);
self.replenish_inner_index_did(did, self.lookahead);
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum InsertDescriptorError<K> {
DescriptorAlreadyAssigned {
descriptor: Box<Descriptor<DescriptorPublicKey>>,
existing_assignment: K,
},
KeychainAlreadyAssigned {
keychain: K,
existing_assignment: Box<Descriptor<DescriptorPublicKey>>,
},
}
impl<K: core::fmt::Debug> core::fmt::Display for InsertDescriptorError<K> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
InsertDescriptorError::DescriptorAlreadyAssigned {
existing_assignment: existing,
descriptor,
} => {
write!(
f,
"attempt to re-assign descriptor {descriptor:?} already assigned to {existing:?}"
)
}
InsertDescriptorError::KeychainAlreadyAssigned {
existing_assignment: existing,
keychain,
} => {
write!(
f,
"attempt to re-assign keychain {keychain:?} already assigned to {existing:?}"
)
}
}
}
}
#[cfg(feature = "std")]
impl<K: core::fmt::Debug> std::error::Error for InsertDescriptorError<K> {}
#[derive(Clone, Debug, Default, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
#[must_use]
pub struct ChangeSet {
pub last_revealed: BTreeMap<DescriptorId, u32>,
#[cfg_attr(feature = "serde", serde(default))]
pub spk_cache: BTreeMap<DescriptorId, BTreeMap<u32, ScriptBuf>>,
}
impl Merge for ChangeSet {
fn merge(&mut self, other: Self) {
for (desc_id, index) in other.last_revealed {
use crate::collections::btree_map::Entry;
match self.last_revealed.entry(desc_id) {
Entry::Vacant(entry) => {
entry.insert(index);
}
Entry::Occupied(mut entry) => {
if *entry.get() < index {
entry.insert(index);
}
}
}
}
for (did, spks) in other.spk_cache {
let orig_spks = self.spk_cache.entry(did).or_default();
debug_assert!(
orig_spks
.iter()
.all(|(i, orig_spk)| spks.get(i).map_or(true, |spk| spk == orig_spk)),
"spk of the same descriptor-id and derivation index must not be different"
);
orig_spks.extend(spks);
}
}
fn is_empty(&self) -> bool {
self.last_revealed.is_empty() && self.spk_cache.is_empty()
}
}
pub trait SyncRequestBuilderExt<K> {
fn revealed_spks_from_indexer<R>(self, indexer: &KeychainTxOutIndex<K>, spk_range: R) -> Self
where
R: core::ops::RangeBounds<K>;
fn unused_spks_from_indexer(self, indexer: &KeychainTxOutIndex<K>) -> Self;
}
impl<K: Clone + Ord + core::fmt::Debug> SyncRequestBuilderExt<K> for SyncRequestBuilder<(K, u32)> {
fn revealed_spks_from_indexer<R>(self, indexer: &KeychainTxOutIndex<K>, spk_range: R) -> Self
where
R: core::ops::RangeBounds<K>,
{
self.spks_with_indexes(indexer.revealed_spks(spk_range))
}
fn unused_spks_from_indexer(self, indexer: &KeychainTxOutIndex<K>) -> Self {
self.spks_with_indexes(indexer.unused_spks())
}
}
pub trait FullScanRequestBuilderExt<K> {
fn spks_from_indexer(self, indexer: &KeychainTxOutIndex<K>) -> Self;
}
impl<K: Clone + Ord + core::fmt::Debug> FullScanRequestBuilderExt<K> for FullScanRequestBuilder<K> {
fn spks_from_indexer(mut self, indexer: &KeychainTxOutIndex<K>) -> Self {
for (keychain, spks) in indexer.all_unbounded_spk_iters() {
self = self.spks_for_keychain(keychain, spks);
}
self
}
}
#[cfg(test)]
mod test {
use super::*;
use bdk_testenv::utils::DESCRIPTORS;
use bitcoin::secp256k1::Secp256k1;
use miniscript::Descriptor;
#[test]
fn test_spk_cache() {
let lookahead = 10;
let use_cache = true;
let mut index = KeychainTxOutIndex::new(lookahead, use_cache);
let s = DESCRIPTORS[0];
let desc = Descriptor::parse_descriptor(&Secp256k1::new(), s)
.unwrap()
.0;
let did = desc.descriptor_id();
let reveal_to = 2;
let end_index = reveal_to + lookahead;
let _ = index.insert_descriptor(0i32, desc.clone());
assert_eq!(index.spk_cache.get(&did).unwrap().len() as u32, lookahead);
assert_eq!(index.next_index(0), Some((0, true)));
for _ in 0..=reveal_to {
let _ = index.reveal_next_spk(0).unwrap();
}
assert_eq!(index.last_revealed_index(0), Some(reveal_to));
let spk_cache = &index.spk_cache;
assert!(!spk_cache.is_empty());
for (&did, cached_spks) in spk_cache {
assert_eq!(did, desc.descriptor_id());
for (&i, cached_spk) in cached_spks {
let exp_spk = desc.at_derivation_index(i).unwrap().script_pubkey();
assert_eq!(&exp_spk, cached_spk);
assert_eq!(index.spk_at_index(0, i), Some(cached_spk.clone()));
}
}
let init_cs = index.initial_changeset();
assert_eq!(
init_cs.spk_cache.get(&did).unwrap().len() as u32,
end_index + 1
);
let recovered =
KeychainTxOutIndex::<&str>::from_changeset(lookahead, use_cache, init_cs.clone());
assert_eq!(&recovered.spk_cache, spk_cache);
let index = KeychainTxOutIndex::<i32>::from_changeset(lookahead, false, init_cs);
assert!(index.spk_cache.is_empty());
}
}