Skip to main content

lwk_wollet/
update.rs

1use crate::cache::{Height, Timestamp};
2use crate::clients::try_unblind;
3use crate::descriptor::Chain;
4use crate::elements::{OutPoint, Script, Transaction, TxOutSecrets, Txid};
5use crate::error::Error;
6use crate::wollet::{update_key, WolletState};
7use crate::EC;
8use crate::{BlindingPublicKey, Wollet, WolletDescriptor};
9use base64::prelude::*;
10use elements::bitcoin::bip32::ChildNumber;
11use elements::bitcoin::hashes::Hash;
12use elements::confidential::{AssetBlindingFactor, ValueBlindingFactor};
13use elements::encode::{Decodable, Encodable};
14use elements::hash_types::TxMerkleNode;
15use elements::{BlockExtData, BlockHash, BlockHeader, TxInWitness, TxOutWitness};
16use lwk_common::SignedBalance;
17use lwk_common::{decrypt_with_nonce_prefix, encrypt_with_random_nonce};
18use std::collections::HashMap;
19use std::sync::atomic;
20
21/// Transactions downloaded and unblinded
22#[derive(Default, Clone, PartialEq, Eq, Debug)]
23pub struct DownloadTxResult {
24    /// Transactions downloaded
25    pub txs: Vec<(Txid, Transaction)>,
26
27    /// Unblinded outputs of the downloaded transactions
28    pub unblinds: Vec<(OutPoint, TxOutSecrets)>,
29}
30
31impl DownloadTxResult {
32    fn is_empty(&self) -> bool {
33        self.txs.is_empty() && self.unblinds.is_empty()
34    }
35
36    fn prune(&mut self, scripts: &HashMap<Script, (Chain, ChildNumber)>) {
37        for (_, tx) in self.txs.iter_mut() {
38            for input in tx.input.iter_mut() {
39                input.witness = TxInWitness::empty();
40            }
41
42            for output in tx.output.iter_mut() {
43                if scripts.contains_key(&output.script_pubkey) {
44                    // we are keeping the rangeproof because it's needed for pset details
45                    output.witness.surjection_proof = None;
46                } else {
47                    output.witness = TxOutWitness::empty();
48                }
49            }
50        }
51    }
52}
53
54/// Passing a wallet to [`crate::clients::blocking::BlockchainBackend::full_scan()`] returns this structure which
55/// contains the delta of information to be applied to the wallet to reach the latest status.
56#[derive(Clone, PartialEq, Eq, Debug)]
57pub struct Update {
58    /// The version of the update
59    pub version: u8,
60
61    /// The status of the wallet this update is generated from
62    ///
63    /// If 0 means it has been deserialized from a V0 version
64    pub wollet_status: u64,
65
66    /// The new transactions
67    pub new_txs: DownloadTxResult,
68
69    /// The new transaction with confirmation heights (or None if not confirmed)
70    pub txid_height_new: Vec<(Txid, Option<Height>)>,
71
72    /// The transaction ids to delete, for example after a reorg or a replace by fee.
73    pub txid_height_delete: Vec<Txid>,
74
75    /// The timestamps of the transactions, more precisely the timestamp of the block containing the transaction
76    pub timestamps: Vec<(Height, Timestamp)>,
77
78    /// The script pub key with the chain, the child number and the blinding pubkey
79    /// The blinding pubkey is optional for backward compatibility reasons
80    pub scripts_with_blinding_pubkey: Vec<(Chain, ChildNumber, Script, Option<BlindingPublicKey>)>,
81
82    /// The tip of the blockchain at the time the update was generated
83    pub tip: BlockHeader,
84}
85
86impl Update {
87    /// Whether this update only changes the tip
88    pub fn only_tip(&self) -> bool {
89        self.new_txs.is_empty()
90            && self.txid_height_new.is_empty()
91            && self.txid_height_delete.is_empty()
92            && self.scripts_with_blinding_pubkey.is_empty()
93    }
94
95    /// Prune the update, removing unneeded data from transactions.
96    ///
97    /// Note: this function removes less data than
98    /// [`Update::prune_witnesses()`] since it keeps the rangeproofs
99    /// of the outputs the [`Wollet`] owns.
100    pub fn prune(&mut self, wallet: &Wollet) {
101        self.new_txs.prune(&wallet.cache.paths);
102    }
103
104    /// Prune witnesses from transactions
105    ///
106    /// Remove all input and output witnesses from transcations downloaded in
107    /// this update. This reduces memory and storage usage significantly.
108    ///
109    /// However pruning witnesses has effects on functions that use those
110    /// rangeproofs (which are part of output witness):
111    /// * When building transactions, it's possible to ask for the addition of
112    ///   input rangeproofs, using [`crate::TxBuilder::add_input_rangeproofs()`]
113    ///   or [`crate::WolletTxBuilder::add_input_rangeproofs()`]; however if the
114    ///   rangeproofs have been removed, they cannot be added to the created
115    ///   PSET.
116    /// * [`Wollet::unblind_utxos_with()`] cannot unblind utxos without
117    ///   witnesses.
118    /// * [`Wollet::reunblind()`] cannot unblind transactions without
119    ///   witnesses.
120    pub fn prune_witnesses(&mut self) {
121        for (_, tx) in self.new_txs.txs.iter_mut() {
122            for input in tx.input.iter_mut() {
123                input.witness = TxInWitness::empty();
124            }
125            for output in tx.output.iter_mut() {
126                output.witness = TxOutWitness::empty();
127            }
128        }
129    }
130
131    /// Serialize an [`Update`] to a byte array
132    pub fn serialize(&self) -> Result<Vec<u8>, elements::encode::Error> {
133        let mut vec = vec![];
134        self.consensus_encode(&mut vec)?;
135        Ok(vec)
136    }
137
138    /// Deserialize an [`Update`] from a byte array
139    pub fn deserialize(bytes: &[u8]) -> Result<Update, elements::encode::Error> {
140        Update::consensus_decode(bytes)
141    }
142
143    /// Serialize an update to a byte array, encrypted with a key derived from the descriptor. Decrypt using [`Self::deserialize_decrypted()`]
144    #[allow(deprecated)]
145    pub fn serialize_encrypted(&self, desc: &WolletDescriptor) -> Result<Vec<u8>, Error> {
146        let plaintext = self.serialize()?;
147        let mut cipher = desc.cipher();
148        let ciphertext = encrypt_with_random_nonce(&mut cipher, &plaintext)?;
149        Ok(ciphertext)
150    }
151
152    /// Serialize an update to a base64 encoded string, encrypted with a key derived from the descriptor. Decrypt using [`Self::deserialize_decrypted_base64()`]
153    pub fn serialize_encrypted_base64(&self, desc: &WolletDescriptor) -> Result<String, Error> {
154        let vec = self.serialize_encrypted(desc)?;
155        Ok(BASE64_STANDARD.encode(vec))
156    }
157
158    /// Deserialize an update from a byte array, decrypted with a key derived from the descriptor. Create the byte array using [`Self::serialize_encrypted()`]
159    #[allow(deprecated)]
160    pub fn deserialize_decrypted(bytes: &[u8], desc: &WolletDescriptor) -> Result<Update, Error> {
161        let mut cipher = desc.cipher();
162        let plaintext = decrypt_with_nonce_prefix(&mut cipher, bytes)?;
163        Ok(Update::deserialize(&plaintext)?)
164    }
165
166    /// Deserialize an update from a base64 encoded string, decrypted with a key derived from the descriptor. Create the base64 using [`Self::serialize_encrypted_base64()`]
167    pub fn deserialize_decrypted_base64(
168        base64: &str,
169        desc: &WolletDescriptor,
170    ) -> Result<Update, Error> {
171        let vec = BASE64_STANDARD
172            .decode(base64)
173            .map_err(|e| Error::Generic(e.to_string()))?;
174        Self::deserialize_decrypted(&vec, desc)
175    }
176
177    /// Merge another update into this one.
178    ///
179    /// This is used to squash multiple sequential updates into a single update.
180    ///
181    /// NOTE: it's caller responsibility to ensure that the following update is the next in sequence
182    /// and updates are not mixed up.
183    pub(crate) fn merge(&mut self, following: Update) {
184        // By construction there should not be duplicate txs,
185        // even if so when merged in the hashmap they will be overridden.
186        self.new_txs.txs.extend(following.new_txs.txs);
187
188        self.new_txs.unblinds.extend(following.new_txs.unblinds);
189
190        // When we apply an update we first delete then insert the new txs.
191        // Suppose to have Um = U1.merge(U2)
192        // The order would be:
193        // U1.delete, U1.insert, U2.delete, U2.insert
194
195        // Um.insert = (U1.insert \ U2.delete) ∪ U2.insert
196        self.txid_height_new
197            .retain(|(t, _)| !following.txid_height_delete.contains(t));
198        for (txid, height) in following.txid_height_new {
199            // In this case we can't extend, we want to keep the height of the newest update in case of duplicates
200            self.txid_height_new.retain(|(t, _)| *t != txid);
201            self.txid_height_new.push((txid, height));
202        }
203
204        // Um.delete = U1.delete ∪ U2.delete
205        self.txid_height_delete.extend(following.txid_height_delete);
206
207        // Merge timestamps and scripts
208        self.timestamps.extend(following.timestamps);
209        self.scripts_with_blinding_pubkey
210            .extend(following.scripts_with_blinding_pubkey);
211
212        // Update tip to other's tip
213        self.tip = following.tip;
214
215        // Update version to latest
216        self.version = following.version;
217
218        // We don't need to update the wollet status, it's right to keep the status of the first update
219    }
220}
221
222fn default_blockheader() -> BlockHeader {
223    BlockHeader {
224        version: 0,
225        prev_blockhash: BlockHash::all_zeros(),
226        merkle_root: TxMerkleNode::all_zeros(),
227        time: 0,
228        height: 0,
229        ext: BlockExtData::default(),
230    }
231}
232
233/// Update the wallet state from blockchain data
234impl Wollet {
235    fn apply_transaction_inner(
236        &mut self,
237        tx: Transaction,
238        do_persist: bool,
239    ) -> Result<SignedBalance, Error> {
240        let initial_balance = self.balance()?;
241        let mut unblinds = vec![];
242        let txid = tx.txid();
243        for (vout, output) in tx.output.iter().enumerate() {
244            if self.cache.paths.contains_key(&output.script_pubkey) {
245                let outpoint = OutPoint::new(txid, vout as u32);
246                match try_unblind(output, &self.descriptor) {
247                    Ok(unblinded) => {
248                        unblinds.push((outpoint, unblinded));
249                    }
250                    Err(_) => {
251                        log::info!("{outpoint} cannot unblind, ignoring (could be sender messed up with the blinding process)");
252                    }
253                }
254            }
255        }
256
257        let update = Update {
258            version: 2,
259            wollet_status: self.status(),
260            new_txs: DownloadTxResult {
261                txs: vec![(txid, tx)],
262                unblinds,
263            },
264            txid_height_new: vec![(txid, None)],
265            txid_height_delete: vec![],
266            timestamps: vec![],
267            scripts_with_blinding_pubkey: vec![],
268            tip: default_blockheader(),
269        };
270
271        self.apply_update_inner(update, do_persist)?;
272        let final_balance = self.balance()?;
273        Ok(final_balance - initial_balance)
274    }
275
276    /// Apply an update containing blockchain data
277    ///
278    /// To update the wallet you need to first obtain the blockchain data relevant for the wallet.
279    /// This can be done using [`crate::clients::blocking::BlockchainBackend::full_scan()`], which
280    /// returns an [`crate::Update`] that contains new transaction and other data relevant for the
281    /// wallet.
282    /// The update must then be applied to the [`crate::Wollet`] so that wollet methods such as
283    /// [`crate::Wollet::balance()`] or [`crate::Wollet::transactions()`] include the new data.
284    ///
285    /// However getting blockchain data involves network calls, so between the full scan start and
286    /// when the update is applied it might elapse a significant amount of time.
287    /// In that interval, applying any update, or any transaction using [`Wollet::apply_transaction()`],
288    /// will cause this function to return a [`Error::UpdateOnDifferentStatus`].
289    /// Callers should either avoid applying updates and transactions, or they can catch the error and wait for a new full scan to be completed and applied.
290    pub fn apply_update(&mut self, update: Update) -> Result<(), Error> {
291        self.apply_update_inner(update, true)
292    }
293
294    /// Same as [`Wollet::apply_update()`] but only apply the update in memory, without persisting it.
295    pub fn apply_update_no_persist(&mut self, update: Update) -> Result<(), Error> {
296        self.apply_update_inner(update, false)
297    }
298
299    fn apply_update_inner(&mut self, update: Update, do_persist: bool) -> Result<(), Error> {
300        // TODO should accept &Update
301
302        if update.wollet_status != 0 {
303            // wollet status 0 means the update has been created before saving the status (v0) and we can't check
304            if self.wollet_status() != update.wollet_status {
305                return Err(Error::UpdateOnDifferentStatus {
306                    wollet_status: self.wollet_status(),
307                    update_status: update.wollet_status,
308                });
309            }
310        }
311        let descriptor = self.wollet_descriptor();
312        let cache = &mut self.cache;
313        let Update {
314            version: _,
315            wollet_status: _,
316            new_txs,
317            txid_height_new,
318            txid_height_delete,
319            timestamps,
320            scripts_with_blinding_pubkey,
321            tip,
322        } = update.clone();
323
324        let scripts_with_blinding_pubkey =
325            compute_blinding_pubkey_if_missing(scripts_with_blinding_pubkey, descriptor)?;
326
327        if tip != default_blockheader() {
328            if tip.height + 1 < cache.tip.0 {
329                // Checking we are not applying an old update while giving enough space for a single block reorg
330                return Err(Error::UpdateHeightTooOld {
331                    update_tip_height: tip.height,
332                    cache_tip_height: cache.tip.0,
333                });
334            }
335
336            cache.tip = (tip.height, tip.block_hash());
337        }
338
339        cache.unblinded.extend(new_txs.unblinds);
340        cache.all_txs.extend(new_txs.txs);
341        cache.heights.retain(|k, _| !txid_height_delete.contains(k));
342        cache.heights.extend(txid_height_new.clone());
343        cache.rebuild_sorted_txids();
344        cache.timestamps.extend(timestamps);
345        cache.scripts.extend(
346            scripts_with_blinding_pubkey
347                .clone()
348                .into_iter()
349                .map(|(a, b, c, d)| ((a, b), (c, d))),
350        );
351        cache.paths.extend(
352            scripts_with_blinding_pubkey
353                .clone()
354                .into_iter()
355                .map(|(a, b, c, _d)| (c, (a, b))),
356        );
357        let mut last_used_internal = None;
358        let mut last_used_external = None;
359        // Also include deleted txids: a tx that was seen and then deleted (e.g. a phantom tx
360        // applied locally but not confirmed on-chain) should still count as having used its
361        // addresses so those addresses are not reused. This matters for merged updates where
362        // phantom txids are removed from txid_height_new but still appear in txid_height_delete.
363        let txids_for_last_used: Vec<Txid> = txid_height_new
364            .into_iter()
365            .map(|(t, _)| t)
366            .chain(txid_height_delete.iter().copied())
367            .collect();
368        for txid in txids_for_last_used {
369            if let Some(tx) = cache.all_txs.get(&txid) {
370                for (vout, output) in tx.output.iter().enumerate() {
371                    if !cache
372                        .unblinded
373                        .contains_key(&OutPoint::new(txid, vout as u32))
374                    {
375                        // Output cannot be unblinded by wallet
376                        continue;
377                    }
378                    if let Some((ext_int, ChildNumber::Normal { index })) =
379                        cache.paths.get(&output.script_pubkey)
380                    {
381                        match ext_int {
382                            Chain::External => match last_used_external {
383                                None => last_used_external = Some(index),
384                                Some(last) if index > last => last_used_external = Some(index),
385                                _ => {}
386                            },
387                            Chain::Internal => match last_used_internal {
388                                None => last_used_internal = Some(index),
389                                Some(last) if index > last => last_used_internal = Some(index),
390                                _ => {}
391                            },
392                        }
393                    }
394                }
395            }
396        }
397        if let Some(last_used_external) = last_used_external {
398            cache
399                .last_unused_external
400                .fetch_max(last_used_external + 1, atomic::Ordering::Relaxed);
401        }
402        if let Some(last_used_internal) = last_used_internal {
403            cache
404                .last_unused_internal
405                .fetch_max(last_used_internal + 1, atomic::Ordering::Relaxed);
406        }
407
408        if do_persist {
409            self.persist_update(update)?;
410        }
411
412        Ok(())
413    }
414
415    /// Persist an update to the store using an indexed key
416    fn persist_update(&self, mut update: Update) -> Result<(), Error> {
417        let mut next_index = self
418            .next_update_index
419            .lock()
420            .map_err(|_| Error::Generic("next_update_index lock poisoned".into()))?;
421
422        // Check if we can coalesce with the previous update (both are "only tip" updates)
423        if update.only_tip() && *next_index > 0 {
424            let prev_key = update_key(*next_index - 1);
425            if let Ok(Some(prev_bytes)) = self.store.get(&prev_key) {
426                if let Ok(prev_update) = Update::deserialize(&prev_bytes) {
427                    if prev_update.only_tip() {
428                        // Coalesce: overwrite the previous update
429                        // Keep the previous wollet status so reapplying works correctly
430                        update.wollet_status = prev_update.wollet_status;
431                        // Merge timestamps
432                        update.timestamps = [prev_update.timestamps, update.timestamps].concat();
433
434                        let bytes = update.serialize()?;
435                        self.store
436                            .put(&prev_key, &bytes)
437                            .map_err(|e| Error::Generic(format!("store error: {e}")))?;
438                        return Ok(());
439                    }
440                }
441            }
442        }
443
444        // Store as a new update
445        let key = update_key(*next_index);
446        let bytes = update.serialize()?;
447        self.store
448            .put(&key, &bytes)
449            .map_err(|e| Error::Generic(format!("store error: {e}")))?;
450        *next_index += 1;
451
452        *next_index = self.merge_updates(*next_index)?;
453
454        Ok(())
455    }
456
457    pub(crate) fn merge_updates(&self, next_index: usize) -> Result<usize, Error> {
458        match self.merge_threshold {
459            Some(threshold) if threshold < next_index => (),
460            _ => return Ok(next_index), // Not merging
461        };
462
463        // Read and merge all persisted updates
464        let first_bytes = self
465            .store
466            .get(&update_key(0))
467            .map_err(|e| Error::Generic(format!("store error: {e}")))?
468            .ok_or_else(|| Error::Generic("expected update 0 to exist".into()))?;
469        let mut merged = Update::deserialize(&first_bytes)?;
470
471        for i in 1..next_index {
472            let bytes = self
473                .store
474                .get(&update_key(i))
475                .map_err(|e| Error::Generic(format!("store error: {e}")))?
476                .ok_or_else(|| Error::Generic(format!("expected update {i} to exist")))?;
477            merged.merge(Update::deserialize(&bytes)?);
478        }
479
480        // Delete all old updates from last to first to avoid holes on crash
481        for j in (0..next_index).rev() {
482            self.store
483                .remove(&update_key(j))
484                .map_err(|e| Error::Generic(format!("failed to remove update {j}: {e}")))?;
485        }
486        // A crash here or during the removal loop will leave the cache empty or at an old state,
487        // which is not the end of the world, the following scan will bring it back.
488
489        // Store the merged update as update 0
490        let merged_bytes = merged.serialize()?;
491        self.store
492            .put(&update_key(0), &merged_bytes)
493            .map_err(|e| Error::Generic(format!("failed to store merged update: {e}")))?;
494
495        let next_index = 1;
496        Ok(next_index)
497    }
498
499    /// Apply a transaction to the wallet state
500    ///
501    /// Wallet transactions are normally obtained using [`crate::clients::blocking::BlockchainBackend::full_scan()`]
502    /// and applying the resulting [`crate::Update`] with [`Wollet::apply_update()`]. However a
503    /// full scan involves network calls and it can take a significant amount of time.
504    ///
505    /// If the caller does not want to wait for a full scan containing the transaction, it can
506    /// apply the transaction to the wallet state using this function.
507    ///
508    /// Note: if this transaction is *not* returned by a next full scan, after [`Wollet::apply_update()`] it will disappear from the
509    /// transactions list, will not be included in balance computations, and by the remaining
510    /// wollet methods.
511    ///
512    /// Calling this method, might cause [`Wollet::apply_update()`] to fail with a
513    /// [`Error::UpdateOnDifferentStatus`], make sure to either avoid it or handle the error properly.
514    pub fn apply_transaction(&mut self, tx: Transaction) -> Result<SignedBalance, Error> {
515        self.apply_transaction_inner(tx, true)
516    }
517
518    /// Same as [`Wollet::apply_transaction()`] but only apply the update in memory, without persisting it.
519    pub fn apply_transaction_no_persist(
520        &mut self,
521        tx: Transaction,
522    ) -> Result<SignedBalance, Error> {
523        self.apply_transaction_inner(tx, false)
524    }
525}
526
527#[allow(clippy::type_complexity)]
528fn compute_blinding_pubkey_if_missing(
529    scripts_with_blinding_pubkey: Vec<(
530        Chain,
531        ChildNumber,
532        Script,
533        Option<elements::secp256k1_zkp::PublicKey>,
534    )>,
535    wollet_descriptor: WolletDescriptor,
536) -> Result<Vec<(Chain, ChildNumber, Script, Option<BlindingPublicKey>)>, Error> {
537    let mut result = Vec::with_capacity(scripts_with_blinding_pubkey.len());
538
539    for (chain, child_number, script_pubkey, maybe_blinding_pubkey) in scripts_with_blinding_pubkey
540    {
541        let blinding_pubkey = match maybe_blinding_pubkey {
542            Some(pubkey) => Some(pubkey),
543            None => {
544                match wollet_descriptor.ct_definite_descriptor(chain, child_number.into()) {
545                    Ok(desc) => {
546                        // TODO: derive the blinding pubkey from the descriptor blinding key and scriptpubkey
547                        //       (needs function in elements-miniscript)
548
549                        let address = desc.address(&EC, &elements::AddressParams::ELEMENTS)?; // we don't need the address, we need only the blinding pubkey, thus we can use any params
550                        Some(
551                            address
552                                .blinding_pubkey
553                                .expect("blinding pubkey is present when using ct descriptors"),
554                        )
555                    }
556                    Err(Error::UnsupportedWithoutDescriptor) => None,
557                    Err(e) => return Err(e),
558                }
559            }
560        };
561        result.push((chain, child_number, script_pubkey, blinding_pubkey));
562    }
563
564    Ok(result)
565}
566
567impl Encodable for DownloadTxResult {
568    fn consensus_encode<W: std::io::Write>(
569        &self,
570        mut w: W,
571    ) -> Result<usize, elements::encode::Error> {
572        let mut bytes_written = 0;
573
574        let txs_len = self.txs.len();
575        bytes_written += elements::encode::VarInt(txs_len as u64).consensus_encode(&mut w)?;
576        for (_txid, tx) in self.txs.iter() {
577            // Avoid serializing Txid since are re-computable from the tx
578            bytes_written += tx.consensus_encode(&mut w)?;
579        }
580
581        let unblinds_len = self.unblinds.len();
582        bytes_written += elements::encode::VarInt(unblinds_len as u64).consensus_encode(&mut w)?;
583        for (out_point, tx_out_secrets) in self.unblinds.iter() {
584            bytes_written += out_point.consensus_encode(&mut w)?;
585
586            // TODO make TxOutSecrets encodable upstream
587            let encodable_tx_out_secrets = EncodableTxOutSecrets {
588                inner: *tx_out_secrets,
589            };
590            bytes_written += encodable_tx_out_secrets.consensus_encode(&mut w)?;
591        }
592
593        Ok(bytes_written)
594    }
595}
596
597impl Decodable for DownloadTxResult {
598    fn consensus_decode<D: std::io::Read>(mut d: D) -> Result<Self, elements::encode::Error> {
599        let mut txs = vec![];
600        let txs_len = elements::encode::VarInt::consensus_decode(&mut d)?.0;
601        for _ in 0..txs_len {
602            let tx = Transaction::consensus_decode(&mut d)?;
603            txs.push((tx.txid(), tx));
604        }
605
606        let mut unblinds = vec![];
607        let unblinds_len = elements::encode::VarInt::consensus_decode(&mut d)?.0;
608        for _ in 0..unblinds_len {
609            let out_point = OutPoint::consensus_decode(&mut d)?;
610            let encodable_tx_out_secrets = EncodableTxOutSecrets::consensus_decode(&mut d)?;
611            unblinds.push((out_point, encodable_tx_out_secrets.inner))
612        }
613
614        Ok(DownloadTxResult { txs, unblinds })
615    }
616}
617
618#[derive(Debug, PartialEq, Eq)]
619struct EncodableTxOutSecrets {
620    inner: TxOutSecrets,
621}
622impl Encodable for EncodableTxOutSecrets {
623    fn consensus_encode<W: std::io::Write>(
624        &self,
625        mut w: W,
626    ) -> Result<usize, elements::encode::Error> {
627        let mut bytes_written = 0;
628        bytes_written += self.inner.asset.consensus_encode(&mut w)?;
629
630        bytes_written += self
631            .inner
632            .asset_bf
633            .into_inner()
634            .as_ref()
635            .consensus_encode(&mut w)?;
636
637        bytes_written += self.inner.value.consensus_encode(&mut w)?;
638
639        bytes_written += self
640            .inner
641            .value_bf
642            .into_inner()
643            .as_ref()
644            .consensus_encode(&mut w)?;
645
646        Ok(bytes_written)
647    }
648}
649
650impl Decodable for EncodableTxOutSecrets {
651    fn consensus_decode<D: std::io::Read>(mut d: D) -> Result<Self, elements::encode::Error> {
652        Ok(Self {
653            inner: TxOutSecrets {
654                asset: Decodable::consensus_decode(&mut d)?,
655                asset_bf: {
656                    let bytes: [u8; 32] = Decodable::consensus_decode(&mut d)?;
657                    AssetBlindingFactor::from_slice(&bytes[..]).expect("bytes length is 32")
658                },
659                value: Decodable::consensus_decode(&mut d)?,
660                value_bf: {
661                    let bytes: [u8; 32] = Decodable::consensus_decode(&mut d)?;
662                    ValueBlindingFactor::from_slice(&bytes[..]).expect("bytes length is 32")
663                },
664            },
665        })
666    }
667}
668
669const UPDATE_MAGIC_BYTES: [u8; 4] = [0x89, 0x61, 0xb8, 0xc8];
670impl Encodable for Update {
671    fn consensus_encode<W: std::io::Write>(
672        &self,
673        mut w: W,
674    ) -> Result<usize, elements::encode::Error> {
675        let mut bytes_written = 0;
676
677        bytes_written += UPDATE_MAGIC_BYTES.consensus_encode(&mut w)?; // Magic bytes
678
679        bytes_written += self.version.consensus_encode(&mut w)?; // Version
680
681        if self.version >= 1 {
682            bytes_written += self.wollet_status.consensus_encode(&mut w)?;
683        }
684
685        bytes_written += self.new_txs.consensus_encode(&mut w)?;
686
687        bytes_written +=
688            elements::encode::VarInt(self.txid_height_new.len() as u64).consensus_encode(&mut w)?;
689        for (txid, height) in self.txid_height_new.iter() {
690            bytes_written += txid.consensus_encode(&mut w)?;
691            bytes_written += height.unwrap_or(u32::MAX).consensus_encode(&mut w)?;
692        }
693
694        bytes_written += elements::encode::VarInt(self.txid_height_delete.len() as u64)
695            .consensus_encode(&mut w)?;
696        for txid in self.txid_height_delete.iter() {
697            bytes_written += txid.consensus_encode(&mut w)?;
698        }
699
700        bytes_written +=
701            elements::encode::VarInt(self.timestamps.len() as u64).consensus_encode(&mut w)?;
702        for (height, timestamp) in self.timestamps.iter() {
703            bytes_written += height.consensus_encode(&mut w)?;
704            bytes_written += timestamp.consensus_encode(&mut w)?;
705        }
706
707        bytes_written += elements::encode::VarInt(self.scripts_with_blinding_pubkey.len() as u64)
708            .consensus_encode(&mut w)?;
709        for (chain, child_number, script, blinding_pubkey) in
710            self.scripts_with_blinding_pubkey.iter()
711        {
712            bytes_written += script.consensus_encode(&mut w)?;
713            bytes_written += match chain {
714                Chain::External => 0u8,
715                Chain::Internal => 1u8,
716            }
717            .consensus_encode(&mut w)?;
718            bytes_written += u32::from(*child_number).consensus_encode(&mut w)?;
719            if self.version >= 2 {
720                match blinding_pubkey {
721                    Some(blinding_pubkey) => {
722                        bytes_written += blinding_pubkey.serialize().consensus_encode(&mut w)?
723                    }
724                    None => {
725                        bytes_written += [0u8; 33].consensus_encode(&mut w)?;
726                    }
727                }
728            }
729        }
730        bytes_written += self.tip.consensus_encode(&mut w)?;
731
732        Ok(bytes_written)
733    }
734}
735
736impl Decodable for Update {
737    fn consensus_decode<D: std::io::Read>(mut d: D) -> Result<Self, elements::encode::Error> {
738        let magic_bytes: [u8; 4] = Decodable::consensus_decode(&mut d)?;
739        if magic_bytes != UPDATE_MAGIC_BYTES {
740            return Err(elements::encode::Error::ParseFailed("Invalid magic bytes"));
741        }
742
743        let version = u8::consensus_decode(&mut d)?;
744        if version > 2 {
745            return Err(elements::encode::Error::ParseFailed("Unsupported version"));
746        }
747        let wollet_status = if version >= 1 {
748            u64::consensus_decode(&mut d)?
749        } else {
750            0
751        };
752
753        let new_txs = DownloadTxResult::consensus_decode(&mut d)?;
754
755        let txid_height_new = {
756            let len = elements::encode::VarInt::consensus_decode(&mut d)?.0;
757            let mut vec = Vec::with_capacity(len as usize);
758            for _ in 0..len {
759                let txid = Txid::consensus_decode(&mut d)?;
760                let height = match u32::consensus_decode(&mut d)? {
761                    u32::MAX => None,
762                    x => Some(x),
763                };
764                vec.push((txid, height))
765            }
766            vec
767        };
768
769        let txid_height_delete = {
770            let len = elements::encode::VarInt::consensus_decode(&mut d)?.0;
771            let mut vec = Vec::with_capacity(len as usize);
772            for _ in 0..len {
773                vec.push(Txid::consensus_decode(&mut d)?);
774            }
775            vec
776        };
777
778        let timestamps = {
779            let len = elements::encode::VarInt::consensus_decode(&mut d)?.0;
780            let mut vec = Vec::with_capacity(len as usize);
781            for _ in 0..len {
782                let h = u32::consensus_decode(&mut d)?;
783                let t = u32::consensus_decode(&mut d)?;
784                vec.push((h, t));
785            }
786            vec
787        };
788
789        let scripts_with_blinding_pubkey = {
790            let len = elements::encode::VarInt::consensus_decode(&mut d)?.0;
791            let mut vec = Vec::with_capacity(len as usize);
792            for _ in 0..len {
793                let script = Script::consensus_decode(&mut d)?;
794                let chain = match u8::consensus_decode(&mut d)? {
795                    0 => Chain::External,
796                    1 => Chain::Internal,
797                    _ => return Err(elements::encode::Error::ParseFailed("Invalid chain")),
798                };
799                let child_number: ChildNumber = u32::consensus_decode(&mut d)?.into();
800                let blinding_pubkey = if version == 2 {
801                    let bytes: [u8; 33] = Decodable::consensus_decode(&mut d)?;
802                    if bytes == [0u8; 33] {
803                        None
804                    } else {
805                        Some(BlindingPublicKey::from_slice(&bytes)?)
806                    }
807                } else {
808                    None
809                };
810                vec.push((chain, child_number, script, blinding_pubkey));
811            }
812            vec
813        };
814
815        let tip = BlockHeader::consensus_decode(&mut d)?;
816
817        Ok(Self {
818            version,
819            wollet_status,
820            new_txs,
821            txid_height_new,
822            txid_height_delete,
823            timestamps,
824            scripts_with_blinding_pubkey,
825            tip,
826        })
827    }
828}
829
830#[cfg(test)]
831mod test {
832
833    use elements::{
834        encode::{Decodable, Encodable},
835        Script,
836    };
837
838    use crate::{update::DownloadTxResult, Chain, Update, WolletBuilder, WolletDescriptor};
839
840    use super::EncodableTxOutSecrets;
841
842    pub fn download_tx_result_test_vector() -> DownloadTxResult {
843        // there are issue in moving this in test_util
844        let tx_out_secret = lwk_test_util::tx_out_secrets_test_vector();
845        let mut txs = vec![];
846        let mut unblinds = vec![];
847        let tx = lwk_test_util::liquid_block_1().txdata.pop().unwrap();
848        unblinds.push((tx.input[0].previous_output, tx_out_secret));
849
850        txs.push((tx.txid(), tx));
851
852        DownloadTxResult { txs, unblinds }
853    }
854
855    #[test]
856    fn test_empty_update() {
857        let tip = lwk_test_util::liquid_block_1().header;
858        let mut update = Update {
859            version: 1,
860            new_txs: super::DownloadTxResult::default(),
861            txid_height_new: Default::default(),
862            txid_height_delete: Default::default(),
863            timestamps: Default::default(),
864            scripts_with_blinding_pubkey: Default::default(),
865            tip,
866            wollet_status: 1,
867        };
868        assert!(update.only_tip());
869        update
870            .txid_height_delete
871            .push(<elements::Txid as elements::hashes::Hash>::all_zeros());
872        assert!(!update.only_tip());
873    }
874
875    #[test]
876    fn test_tx_out_secrets_roundtrip() {
877        let secret = EncodableTxOutSecrets {
878            inner: lwk_test_util::tx_out_secrets_test_vector(),
879        };
880
881        let mut vec = vec![];
882        let len = secret.consensus_encode(&mut vec).unwrap();
883        assert_eq!(lwk_test_util::tx_out_secrets_test_vector_bytes(), vec);
884        assert_eq!(len, 104);
885        assert_eq!(vec.len(), len);
886
887        let back = EncodableTxOutSecrets::consensus_decode(&vec[..]).unwrap();
888        assert_eq!(secret, back)
889    }
890
891    #[test]
892    fn test_download_tx_result_roundtrip() {
893        let result = download_tx_result_test_vector();
894        let mut vec = vec![];
895        let len = result.consensus_encode(&mut vec).unwrap();
896        assert_eq!(len, 1325);
897        assert_eq!(vec.len(), len);
898
899        let back = DownloadTxResult::consensus_decode(&vec[..]).unwrap();
900        assert_eq!(result, back)
901    }
902
903    #[test]
904    fn test_update_roundtrip() {
905        let txid = lwk_test_util::txid_test_vector();
906        let new_txs = download_tx_result_test_vector();
907        let scripts_with_blinding_pubkey =
908            vec![(Chain::Internal, 3u32.into(), Script::default(), None)];
909        // previous version of this test was misleading by inserting two elements in a map, you have only one element.
910
911        let tip = lwk_test_util::liquid_block_1().header;
912        let update = Update {
913            version: 1,
914            new_txs,
915            txid_height_new: vec![(txid, None), (txid, Some(12))],
916            txid_height_delete: vec![txid],
917            timestamps: vec![(12, 44), (12, 44)],
918            scripts_with_blinding_pubkey,
919            tip,
920            wollet_status: 1,
921        };
922
923        let mut vec = vec![];
924        let len = update.consensus_encode(&mut vec).unwrap();
925        // std::fs::write("/tmp/xx.hex", vec.to_hex()).unwrap();
926        let exp_vec = lwk_test_util::update_test_vector_v1_bytes();
927
928        assert_eq!(vec.len(), exp_vec.len());
929        assert_eq!(vec, exp_vec);
930        assert_eq!(len, 2850);
931        assert_eq!(vec.len(), len);
932
933        let back = Update::consensus_decode(&vec[..]).unwrap();
934        assert_eq!(update, back)
935    }
936
937    #[test]
938    fn test_update_backward_comp() {
939        // Update can be deserialize from v0 or v1 blob, but in the first case the wallet_status will be 0.
940        let v0 = lwk_test_util::update_test_vector_bytes();
941        let v1 = lwk_test_util::update_test_vector_v1_bytes();
942
943        let upd_from_v0 = Update::deserialize(&v0).unwrap();
944
945        let mut upd_from_v1 = Update::deserialize(&v1).unwrap();
946        assert_ne!(upd_from_v0, upd_from_v1);
947        upd_from_v1.wollet_status = 0;
948        upd_from_v1.version = 0; // now we save the version in the struct, thus to compare for equality we need this hack
949        assert_eq!(upd_from_v0, upd_from_v1);
950    }
951
952    #[test]
953    fn test_update_decription() {
954        let update = Update::deserialize(&lwk_test_util::update_test_vector_bytes()).unwrap();
955        let desc: WolletDescriptor = lwk_test_util::wollet_descriptor_string().parse().unwrap();
956        let enc_bytes = lwk_test_util::update_test_vector_encrypted_bytes();
957        let update_from_enc = Update::deserialize_decrypted(&enc_bytes, &desc).unwrap();
958        assert_eq!(update, update_from_enc);
959
960        let enc_bytes2 = lwk_test_util::update_test_vector_encrypted_bytes2();
961        let desc2: WolletDescriptor = lwk_test_util::wollet_descriptor_string2().parse().unwrap();
962        Update::deserialize_decrypted(&enc_bytes2, &desc2).unwrap();
963    }
964
965    #[test]
966    fn test_update_base64() {
967        let base64 = lwk_test_util::update_test_vector_encrypted_base64();
968        let desc: WolletDescriptor = lwk_test_util::wollet_descriptor_string().parse().unwrap();
969
970        let update = Update::deserialize_decrypted_base64(&base64, &desc).unwrap();
971        let update_ser = update.serialize_encrypted_base64(&desc).unwrap();
972        assert_ne!(base64, update_ser); // decrypted content is the same, but encryption is not deterministic
973
974        let back = Update::deserialize_decrypted_base64(&update_ser, &desc).unwrap();
975        assert_eq!(update, back)
976    }
977
978    #[test]
979    fn test_update_prune() {
980        let update_bytes = lwk_test_util::update_test_vector_2_bytes();
981        let update = Update::deserialize(&update_bytes).unwrap();
982        let desc: WolletDescriptor = lwk_test_util::wollet_descriptor_string().parse().unwrap();
983        let wollet = WolletBuilder::new(crate::ElementsNetwork::LiquidTestnet, desc)
984            .build()
985            .unwrap();
986        assert_eq!(update_bytes.len(), 18436);
987        assert_eq!(update.serialize().unwrap().len(), 18436);
988        let update_pruned = {
989            let mut u = update.clone();
990            u.prune(&wollet);
991            u
992        };
993        assert_eq!(update_pruned.serialize().unwrap().len(), 1106);
994        assert_eq!(update.new_txs.txs.len(), update_pruned.new_txs.txs.len());
995        assert_eq!(update.new_txs.unblinds, update_pruned.new_txs.unblinds);
996    }
997}