spark_rust/wallet/internal_handlers/implementations/
transfer.rs

1use crate::common_types::types::frost::FrostSigningCommitments;
2use crate::common_types::types::AbsoluteLockTime;
3use crate::common_types::types::OutPoint;
4use crate::common_types::types::PublicKey;
5use crate::common_types::types::ScriptBuf;
6use crate::common_types::types::Secp256k1;
7use crate::common_types::types::Secp256k1Message;
8use crate::common_types::types::SecretKey;
9use crate::common_types::types::TransactionVersion;
10use crate::common_types::types::TxIn;
11use crate::common_types::types::Uuid;
12use crate::common_types::types::Witness;
13use crate::error::{validation::ValidationError, SparkSdkError, TransactionError};
14use crate::signer::traits::SparkSigner;
15use crate::wallet::handlers::transfer::Transfer;
16use crate::wallet::internal_handlers::traits::transfer::LeafKeyTweak;
17use crate::wallet::internal_handlers::traits::transfer::LeafRefundSigningData;
18use crate::wallet::internal_handlers::traits::transfer::TransferInternalHandlers;
19use crate::wallet::leaf_manager::SparkLeaf;
20use crate::wallet::utils::bitcoin::bitcoin_tx_from_bytes;
21use crate::wallet::utils::bitcoin::serialize_bitcoin_transaction;
22use crate::wallet::utils::sequence::next_sequence;
23use crate::SparkSdk;
24use bitcoin::Sequence;
25use spark_cryptography::secret_sharing::VerifiableSecretShare;
26use spark_protos::common::SignatureIntent;
27use spark_protos::common::SigningCommitment as ProtoSigningCommitment;
28use spark_protos::spark::CancelSendTransferRequest;
29use spark_protos::spark::CancelSendTransferResponse;
30use spark_protos::spark::ClaimLeafKeyTweak;
31use spark_protos::spark::ClaimTransferTweakKeysRequest;
32use spark_protos::spark::CompleteSendTransferRequest;
33use spark_protos::spark::FinalizeNodeSignaturesRequest;
34use spark_protos::spark::LeafRefundTxSigningJob;
35use spark_protos::spark::QueryAllTransfersRequest;
36use spark_protos::spark::QueryAllTransfersResponse;
37use spark_protos::spark::SendLeafKeyTweak;
38use spark_protos::spark::SigningJob;
39use spark_protos::spark::StartSendTransferRequest;
40use spark_protos::spark::TreeNode;
41use std::collections::HashMap;
42
43// external crates
44use sha256::digest;
45use tonic::async_trait;
46
47#[async_trait]
48impl<S: SparkSigner + Send + Sync + Clone + 'static> TransferInternalHandlers<S> for SparkSdk<S> {
49    /// Top-level transfer execution function, given the leaves to transfer. The function expects the leaves to be in [`LeafKeyTweak`] format.
50    ///
51    /// # Arguments
52    ///
53    /// * `leaves` - A vector of [`LeafKeyTweak`] objects representing the leaves to transfer.
54    /// * `receiver_identity_pubkey` - The public key of the receiver.
55    /// * `expiry_time` - The expiry time of the transfer.
56    ///
57    /// # Returns
58    ///
59    /// A [`Transfer`] object representing the transfer. This is an auto-converted protobuf object that is returned by the Spark API as the last step of the transfer for the sender.
60    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
61    async fn start_send_transfer(
62        &self,
63        leaves: &Vec<LeafKeyTweak>,
64        receiver_identity_pubkey: &PublicKey,
65        expiry_time: u64,
66    ) -> Result<Transfer, SparkSdkError> {
67        // Send the transfer, get the refund signatures, and aggregate by adding yours.
68        let (transfer, refund_signatures) = self
69            .send_transfer_sign_refunds(leaves, receiver_identity_pubkey, expiry_time)
70            .await?;
71
72        // Send the transfer, get the tweak keys, and send them.
73        let transfer = self
74            .send_transfer_tweak_key(&transfer, leaves, &refund_signatures)
75            .await?;
76
77        Ok(transfer)
78    }
79
80    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
81    async fn send_transfer_tweak_key(
82        &self,
83        transfer: &Transfer,
84        leaves: &Vec<LeafKeyTweak>,
85        refund_signature_map: &HashMap<String, Vec<u8>>,
86    ) -> Result<Transfer, SparkSdkError> {
87        // Prepare the key tweaks for the transfer.
88        let key_tweak_input_map =
89            match self.prepare_send_transfer_key_tweaks(transfer, leaves, refund_signature_map) {
90                Ok(map) => map,
91                Err(e) => {
92                    // If preparation fails, cancel the transfer and propagate the error
93                    if let Err(cancel_err) =
94                        self.cancel_send_transfer(transfer.id.to_string()).await
95                    {
96                        // Log the cancellation error but return the original error
97                        #[cfg(feature = "telemetry")]
98                        tracing::error!(
99                            "Failed to cancel transfer after key tweak preparation error: {}",
100                            cancel_err
101                        );
102                    } else {
103                        // Log the cancellation response
104                        #[cfg(feature = "telemetry")]
105                        tracing::error!("Cancelled transfer {} successfully", transfer.id);
106                    }
107
108                    return Err(e);
109                }
110            };
111
112        let mut updated_transfer: Option<Transfer> = None;
113
114        for operator in &self.config.spark_config.operator_pool.operators {
115            let request = CompleteSendTransferRequest {
116                transfer_id: transfer.id.to_string(),
117                owner_identity_public_key: self.get_spark_address()?.serialize().to_vec(),
118                leaves_to_send: key_tweak_input_map[&operator.frost_identifier_str()].clone(),
119            };
120
121            let complete_response: Option<Transfer> = match self
122                .config
123                .spark_config
124                .call_with_retry(
125                    request,
126                    |mut client, req| {
127                        Box::pin(async move { client.complete_send_transfer(req).await })
128                    },
129                    Some(operator.id),
130                )
131                .await
132            {
133                Ok(response) => response
134                    .transfer
135                    .map(|transfer| transfer.try_into())
136                    .transpose()?,
137
138                Err(status_err) => {
139                    // If the request fails, cancel the transfer and propagate the error
140                    if let Err(cancel_err) =
141                        self.cancel_send_transfer(transfer.id.to_string()).await
142                    {
143                        // Log the cancellation error but return the original error
144                        #[cfg(feature = "telemetry")]
145                        tracing::error!(
146                            "Failed to cancel transfer after complete_send_transfer error: {}",
147                            cancel_err
148                        );
149                    }
150                    return Err(status_err);
151                }
152            };
153
154            match &updated_transfer {
155                None => updated_transfer = complete_response,
156                Some(existing) => {
157                    // This should never happen. In this case, it should be too late to cancel the transfer.
158                    if existing != &complete_response.unwrap() {
159                        return Err(SparkSdkError::from(TransactionError::Transfer {
160                            reason: "Inconsistent transfer responses from operators".into(),
161                        }));
162                    }
163                }
164            }
165        }
166
167        Ok(updated_transfer.unwrap())
168    }
169
170    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
171    async fn send_transfer_sign_refunds(
172        &self,
173        leaves: &Vec<LeafKeyTweak>,
174        receiver_identity_pubkey: &PublicKey,
175        expiry_time: u64,
176    ) -> Result<(Transfer, HashMap<String, Vec<u8>>), SparkSdkError> {
177        // Generate a new transfer ID.
178        let transfer_id = Uuid::now_v7().to_string();
179
180        // Prepare signing data for each leaf
181        let mut leaf_data_map = HashMap::new();
182        for leaf_key in leaves {
183            let commitments = self.signer.generate_frost_signing_commitments()?;
184            let node_tx = bitcoin_tx_from_bytes(&leaf_key.leaf.node_tx)?;
185
186            let secp = Secp256k1::new();
187            let signing_public_key = leaf_key.old_signing_private_key.public_key(&secp);
188
189            let leaf_refund_signing_data = LeafRefundSigningData {
190                signing_public_key,
191                receiving_pubkey: *receiver_identity_pubkey,
192                commitment: frost_commitment_to_proto_commitment(&commitments)?,
193                tx: node_tx,
194                refund_tx: None,
195                vout: leaf_key.leaf.vout,
196            };
197            leaf_data_map.insert(leaf_key.leaf.id.clone(), leaf_refund_signing_data);
198        }
199
200        // Create the signing jobs needed for the transfer.
201        let signing_jobs = self.prepare_refund_so_signing_jobs(leaves, &mut leaf_data_map)?;
202
203        // Send the first transfer request as the sender.
204        let request_data = StartSendTransferRequest {
205            transfer_id: transfer_id.clone(),
206            leaves_to_send: signing_jobs,
207            owner_identity_public_key: self.get_spark_address()?.serialize().to_vec(),
208            receiver_identity_public_key: receiver_identity_pubkey.serialize().to_vec(),
209            expiry_time: Some(prost_types::Timestamp {
210                seconds: expiry_time as i64,
211                nanos: 0,
212            }),
213            key_tweak_proofs: Default::default(),
214        };
215
216        let response = self
217            .config
218            .spark_config
219            .call_with_retry(
220                request_data,
221                |mut client, req| Box::pin(async move { client.start_send_transfer(req).await }),
222                None,
223            )
224            .await?;
225
226        let transfer: Transfer = response.transfer.unwrap().try_into()?;
227
228        // Sign the refunds and aggregate signatures combining your and Spark Operators' signatures. This will give you the signature for the transfer transaction to be sent to the receiver.
229        let signing_results = response.signing_results;
230        let signatures =
231            match self
232                .signer
233                .sign_transfer_refunds(&leaf_data_map, &signing_results, vec![])
234            {
235                Ok(sigs) => sigs,
236                Err(e) => {
237                    // If signing fails, cancel the transfer and propagate the error
238                    if let Err(cancel_err) =
239                        self.cancel_send_transfer(transfer.id.to_string()).await
240                    {
241                        // Log the cancellation error but return the original error
242                        #[cfg(feature = "telemetry")]
243                        tracing::error!(
244                            "Failed to cancel transfer after signing error: {}",
245                            cancel_err
246                        );
247                    }
248                    return Err(e);
249                }
250            };
251
252        // Create a map of node IDs to signatures.
253        let mut signature_map = HashMap::new();
254        for leaf_signature in signatures {
255            signature_map.insert(leaf_signature.node_id, leaf_signature.refund_tx_signature);
256        }
257
258        // `transfer` is always Some.
259        Ok((transfer, signature_map))
260    }
261
262    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
263    fn prepare_send_transfer_key_tweaks(
264        &self,
265        transfer: &Transfer,
266        leaves: &Vec<LeafKeyTweak>,
267        refund_signature_map: &HashMap<String, Vec<u8>>,
268    ) -> Result<HashMap<String, Vec<SendLeafKeyTweak>>, SparkSdkError> {
269        let mut leaves_tweaks_map = HashMap::new();
270        for leaf in leaves {
271            // Get the refund signature
272            let leaf_refund_signature = refund_signature_map.get(&leaf.leaf.id).cloned();
273
274            // Get the tweaks for this leaf by preparing the key tweak data
275            let leaf_tweaks = self.prepare_single_send_transfer_key_tweak(
276                &transfer.id.to_string(),
277                leaf,
278                &transfer.receiver_identity_public_key,
279                leaf_refund_signature,
280            )?;
281
282            // Add the tweaks to the map, grouped by identifier
283            for (identifier, leaf_tweak) in leaf_tweaks {
284                leaves_tweaks_map
285                    .entry(identifier)
286                    .or_insert_with(Vec::new)
287                    .push(leaf_tweak);
288            }
289        }
290
291        Ok(leaves_tweaks_map)
292    }
293
294    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
295    fn prepare_single_send_transfer_key_tweak(
296        &self,
297        transfer_id: &str,
298        leaf: &LeafKeyTweak,
299        receiver_pubkey: &PublicKey,
300        refund_signature: Option<Vec<u8>>,
301    ) -> Result<HashMap<String, SendLeafKeyTweak>, SparkSdkError> {
302        let refund_signature = refund_signature.unwrap_or_default();
303
304        let secp = Secp256k1::new();
305        let publickey = &leaf.old_signing_private_key.public_key(&secp);
306        let tweaked_public_key = self.signer.subtract_secret_keys_given_pubkeys(
307            publickey,
308            &leaf.new_signing_public_key,
309            true,
310        )?;
311
312        // Split the secret key that belongs to the tweaked public key.
313        let shares = self
314            .signer
315            .split_from_public_key_with_verifiable_secret_sharing(
316                &tweaked_public_key,
317                self.config.spark_config.threshold as usize,
318                self.config.spark_config.operator_pool.operators.len(),
319            )?;
320
321        let mut pubkey_shares_tweak = HashMap::new();
322        for operator in &self.config.spark_config.operator_pool.operators {
323            let share = find_share(&shares, operator.id.into())?;
324
325            let share_scalar = SecretKey::from_slice(&share.secret_share.share.to_bytes())?;
326            let pubkey_tweak = PublicKey::from_secret_key(&Secp256k1::new(), &share_scalar);
327            pubkey_shares_tweak.insert(
328                operator.frost_identifier_str(),
329                pubkey_tweak.serialize().to_vec(),
330            );
331        }
332
333        // Generate signature over payload
334        // First, let's encrypt the new signing private key using ECIES
335        let secret_cipher = self
336            .signer
337            .encrypt_secret_key_with_ecies(receiver_pubkey, &leaf.new_signing_public_key)?;
338
339        // Now we'll create the payload exactly as in the Go code by concatenating:
340        // 1. leaf ID
341        // 2. transfer ID
342        // 3. encrypted secret (secret_cipher)
343        let payload = [
344            leaf.leaf.id.as_bytes(), // leaf ID bytes
345            transfer_id.as_bytes(),  // transfer ID bytes
346            &secret_cipher,          // encrypted secret bytes
347        ]
348        .concat();
349
350        // Sign the hash using ECDSA with our identity private key
351        // Note: We use the raw hash bytes directly, no hex encoding needed
352        let network = self.config.spark_config.network.to_bitcoin_network();
353        let signature = self
354            .signer
355            .sign_message_ecdsa_with_identity_key(payload, true, network)?;
356        // let signature = signature.serialize_der().to_vec();
357
358        let mut leaf_tweaks_map = HashMap::new();
359        for operator in &self.config.spark_config.operator_pool.operators {
360            let share = find_share(&shares, operator.id.into())?;
361            leaf_tweaks_map.insert(
362                operator.frost_identifier_str(),
363                SendLeafKeyTweak {
364                    leaf_id: leaf.leaf.id.clone(),
365                    secret_share_tweak: Some(spark_protos::spark::SecretShare {
366                        secret_share: share.secret_share.share.to_bytes().to_vec(),
367                        proofs: share.proofs.clone(),
368                    }),
369                    pubkey_shares_tweak: pubkey_shares_tweak.clone(),
370                    secret_cipher: secret_cipher.clone(),
371                    signature: signature.serialize_der().to_vec(),
372                    refund_signature: refund_signature.to_vec(),
373                },
374            );
375        }
376
377        Ok(leaf_tweaks_map)
378    }
379
380    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
381    fn prepare_refund_so_signing_jobs(
382        &self,
383        leaves: &Vec<LeafKeyTweak>,
384        leaf_data_map: &mut HashMap<String, LeafRefundSigningData>,
385    ) -> Result<Vec<LeafRefundTxSigningJob>, SparkSdkError> {
386        let mut signing_jobs = Vec::new();
387
388        for leaf in leaves {
389            let refund_signing_data = leaf_data_map.get_mut(&leaf.leaf.id).ok_or_else(|| {
390                SparkSdkError::from(ValidationError::InvalidInput {
391                    field: "Leaf data not found".to_string(),
392                })
393            })?;
394
395            let signing_pubkey = &refund_signing_data.signing_public_key;
396            let refund_tx =
397                self.create_refund_tx(&leaf.leaf, &refund_signing_data.receiving_pubkey)?;
398            let refund_bytes = serialize_bitcoin_transaction(&refund_tx)?;
399
400            refund_signing_data.refund_tx = Some(refund_tx);
401
402            let refund_commitment_proto = refund_signing_data.commitment.clone();
403
404            signing_jobs.push(LeafRefundTxSigningJob {
405                leaf_id: leaf.leaf.id.clone(),
406                refund_tx_signing_job: Some(SigningJob {
407                    signing_public_key: signing_pubkey.serialize().to_vec(),
408                    raw_tx: refund_bytes,
409                    signing_nonce_commitment: Some(refund_commitment_proto),
410                }),
411            });
412        }
413
414        Ok(signing_jobs)
415    }
416
417    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
418    fn create_refund_tx(
419        &self,
420        leaf: &TreeNode,
421        receiving_pubkey: &PublicKey,
422    ) -> Result<bitcoin::Transaction, SparkSdkError> {
423        let node_tx = bitcoin_tx_from_bytes(&leaf.node_tx)?;
424        let refund_tx = bitcoin_tx_from_bytes(&leaf.refund_tx)?;
425
426        let mut new_refund_tx = bitcoin::Transaction {
427            version: TransactionVersion::TWO,
428            lock_time: AbsoluteLockTime::ZERO,
429            input: vec![],
430            output: vec![],
431        };
432
433        let old_sequence = refund_tx.input[0].sequence.0;
434        let sequence = Sequence(next_sequence(old_sequence));
435
436        new_refund_tx.input.push(TxIn {
437            previous_output: OutPoint {
438                txid: node_tx.compute_txid(),
439                vout: 0,
440            },
441            script_sig: ScriptBuf::default(),
442            sequence,
443            witness: Witness::default(),
444        });
445
446        let secp = Secp256k1::new();
447        let addr = bitcoin::Address::p2tr(
448            &secp,
449            receiving_pubkey.x_only_public_key().0,
450            None,
451            self.config.spark_config.network.to_bitcoin_network(),
452        );
453
454        new_refund_tx.output.push(bitcoin::TxOut {
455            value: node_tx.output[0].value,
456            script_pubkey: addr.script_pubkey(),
457        });
458
459        Ok(new_refund_tx)
460    }
461
462    // Helper methods
463    // fn compare_transfers(&self, t1: &Transfer, t2: &Transfer) -> bool {
464    //     t1.id == t2.id
465    //         && t1.receiver_identity_public_key == t2.receiver_identity_public_key
466    //         && t1.status == t2.status
467    //         && t1.total_value == t2.total_value
468    //         && t1.expiry_time.as_ref().map(|t| t.seconds)
469    //             == t2.expiry_time.as_ref().map(|t| t.seconds)
470    //         && t1.leaves.len() == t2.leaves.len()
471    // }
472
473    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
474    async fn claim_finalize_incoming_transfer(
475        &self,
476        transfer: &Transfer,
477        leaves: &Vec<LeafKeyTweak>,
478    ) -> Result<(), SparkSdkError> {
479        // First tweak the keys
480        self.claim_transfer_tweak_keys(transfer, leaves).await?;
481
482        // Then sign the refunds
483        let signatures = self.claim_transfer_sign_refunds(transfer, leaves).await?;
484
485        // Finally, finalize the transfer
486        self.finalize_transfer(&signatures).await?;
487
488        let mut leaf_nodes = vec![];
489        for (leaf, _) in leaves.iter().zip(signatures.iter()) {
490            // Create a new refund transaction with decremented timelock
491            let new_refund_tx = self.create_refund_tx(&leaf.leaf, &leaf.new_signing_public_key)?;
492            let new_refund_tx_bytes = serialize_bitcoin_transaction(&new_refund_tx)?;
493
494            let mut updated_leaf = leaf.leaf.clone();
495            updated_leaf.refund_tx = new_refund_tx_bytes;
496            leaf_nodes.push(SparkLeaf::Bitcoin(updated_leaf));
497        }
498
499        self.leaf_manager.insert_leaves(leaf_nodes, false)?;
500
501        Ok(())
502    }
503
504    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
505    async fn claim_transfer_tweak_keys(
506        &self,
507        transfer: &Transfer,
508        leaves: &Vec<LeafKeyTweak>,
509    ) -> Result<(), SparkSdkError> {
510        let leaves_tweaks_map = self.prepare_claim_leaves_key_tweaks(leaves)?;
511
512        for operator in &self.config.spark_config.operator_pool.operators {
513            let request_data = ClaimTransferTweakKeysRequest {
514                transfer_id: transfer.id.to_string(),
515                owner_identity_public_key: self.get_spark_address()?.serialize().to_vec(),
516                leaves_to_receive: leaves_tweaks_map[&operator.frost_identifier_str()].clone(),
517            };
518
519            self.config
520                .spark_config
521                .call_with_retry(
522                    request_data,
523                    |mut client, req| {
524                        Box::pin(async move { client.claim_transfer_tweak_keys(req).await })
525                    },
526                    Some(operator.id),
527                )
528                .await?;
529        }
530
531        Ok(())
532    }
533
534    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
535    fn prepare_claim_leaves_key_tweaks(
536        &self,
537        leaves: &Vec<LeafKeyTweak>,
538    ) -> Result<HashMap<String, Vec<ClaimLeafKeyTweak>>, SparkSdkError> {
539        let mut leaves_tweaks_map = HashMap::new();
540
541        for leaf in leaves {
542            let leaf_tweaks = self.prepare_claim_leaf_key_tweaks(leaf)?;
543
544            for (identifier, leaf_tweak) in leaf_tweaks {
545                leaves_tweaks_map
546                    .entry(identifier)
547                    .or_insert_with(Vec::new)
548                    .push(leaf_tweak);
549            }
550        }
551
552        Ok(leaves_tweaks_map)
553    }
554
555    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
556    fn prepare_claim_leaf_key_tweaks(
557        &self,
558        leaf: &LeafKeyTweak,
559    ) -> Result<HashMap<String, ClaimLeafKeyTweak>, SparkSdkError> {
560        let secp = Secp256k1::new();
561        let tweaked_public_key = self.signer.subtract_secret_keys_given_pubkeys(
562            &leaf.old_signing_private_key.public_key(&secp),
563            &leaf.new_signing_public_key,
564            true,
565        )?;
566
567        let shares = self
568            .signer
569            .split_from_public_key_with_verifiable_secret_sharing(
570                &tweaked_public_key,
571                // &minus_one.to_be_bytes().to_vec(),
572                self.config.spark_config.threshold as usize,
573                self.config.spark_config.operator_pool.operators.len(),
574            )
575            .unwrap();
576
577        let mut pubkey_shares_tweak = HashMap::new();
578        for operator in &self.config.spark_config.operator_pool.operators {
579            let share = find_share(&shares, operator.id.into())?;
580
581            // This part comes from Spark cryptography, so the secret key is in the code.
582            let share_scalar = SecretKey::from_slice(&share.secret_share.share.to_bytes())?;
583            let pubkey_tweak = PublicKey::from_secret_key(&Secp256k1::new(), &share_scalar);
584            pubkey_shares_tweak.insert(
585                operator.frost_identifier_str(),
586                pubkey_tweak.serialize().to_vec(),
587            );
588        }
589
590        let mut leaf_tweaks_map = HashMap::new();
591        for operator in &self.config.spark_config.operator_pool.operators {
592            let share = find_share(&shares, operator.id.into())?;
593
594            leaf_tweaks_map.insert(
595                operator.frost_identifier_str(),
596                ClaimLeafKeyTweak {
597                    leaf_id: leaf.leaf.id.clone(),
598                    secret_share_tweak: Some(spark_protos::spark::SecretShare {
599                        secret_share: share.secret_share.share.to_bytes().to_vec(),
600                        proofs: share.proofs.clone(),
601                    }),
602                    pubkey_shares_tweak: pubkey_shares_tweak.clone(),
603                },
604            );
605        }
606
607        Ok(leaf_tweaks_map)
608    }
609
610    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
611    async fn claim_transfer_sign_refunds(
612        &self,
613        transfer: &Transfer,
614        leaf_keys: &Vec<LeafKeyTweak>,
615    ) -> Result<Vec<spark_protos::spark::NodeSignatures>, SparkSdkError> {
616        // Create a map to store refund signing data for each leaf
617        let mut leaf_data_map = HashMap::new();
618
619        for leaf_key in leaf_keys {
620            // For each leaf key, we create the signing data using the new private key
621            let commitments = self.signer.generate_frost_signing_commitments()?;
622
623            // Deserialize the transaction from raw bytes
624            let tx = match bitcoin::consensus::deserialize(&leaf_key.leaf.node_tx) {
625                Ok(tx) => tx,
626                Err(e) => {
627                    return Err(SparkSdkError::from(ValidationError::InvalidInput {
628                        field: format!("Failed to deserialize transaction: {}", e),
629                    }));
630                }
631            };
632
633            // Store all the necessary data for signing
634            leaf_data_map.insert(
635                leaf_key.leaf.id.clone(),
636                LeafRefundSigningData {
637                    signing_public_key: leaf_key.new_signing_public_key,
638                    receiving_pubkey: leaf_key.new_signing_public_key,
639                    commitment: frost_commitment_to_proto_commitment(&commitments)?,
640                    tx,
641                    refund_tx: None,
642                    vout: leaf_key.leaf.vout,
643                },
644            );
645        }
646
647        // Prepare the signing jobs for each leaf
648        let signing_jobs = self.prepare_refund_so_signing_jobs(leaf_keys, &mut leaf_data_map)?;
649
650        // Request signing of refunds
651        let request = spark_protos::spark::ClaimTransferSignRefundsRequest {
652            transfer_id: transfer.id.to_string(),
653            owner_identity_public_key: self.get_spark_address()?.serialize().to_vec(),
654            signing_jobs,
655            key_tweak_proofs: Default::default(),
656        };
657
658        let response = self
659            .config
660            .spark_config
661            .call_with_retry(
662                request,
663                |mut client, req| {
664                    Box::pin(async move { client.claim_transfer_sign_refunds(req).await })
665                },
666                None,
667            )
668            .await?;
669
670        // Process the signing results and generate final signatures
671        self.signer
672            .sign_transfer_refunds(&leaf_data_map, &response.signing_results, vec![])
673    }
674
675    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
676    async fn finalize_transfer(
677        &self,
678        signatures: &[spark_protos::spark::NodeSignatures],
679    ) -> Result<(), SparkSdkError> {
680        let request = FinalizeNodeSignaturesRequest {
681            intent: SignatureIntent::Transfer as i32,
682            node_signatures: signatures.to_vec(),
683        };
684
685        self.config
686            .spark_config
687            .call_with_retry(
688                request,
689                |mut client, req| {
690                    Box::pin(async move { client.finalize_node_signatures(req).await })
691                },
692                None,
693            )
694            .await?;
695
696        Ok(())
697    }
698
699    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
700    async fn verify_pending_transfer(
701        &self,
702        transfer: &Transfer,
703    ) -> Result<HashMap<String, SecretKey>, SparkSdkError> {
704        // Create the map for leafID -> decrypted leaf private key
705        let mut leaf_privkey_map = HashMap::new();
706
707        // 1) Parse the sender's public key
708        let secp = Secp256k1::new();
709
710        // 2) For each leaf, verify the signature, then decrypt the secret
711        for leaf in &transfer.leaves {
712            // Create the message to verify: leaf.Leaf.Id + transfer.Id + leaf.SecretCipher
713            let mut payload = leaf
714                .leaf
715                .as_ref()
716                .map(|l| l.id.to_string())
717                .unwrap_or_default()
718                .into_bytes();
719            payload.extend_from_slice(transfer.id.to_string().as_bytes());
720            payload.extend_from_slice(&leaf.secret_cipher);
721
722            // Hash the payload
723            let payload_hash_hex = digest(&payload);
724            let payload_hash_bytes = hex::decode(payload_hash_hex).map_err(|e| {
725                SparkSdkError::from(ValidationError::InvalidInput {
726                    field: format!("Failed to decode hex payload hash: {e}"),
727                })
728            })?;
729
730            // Build secp256k1 message and verify ECDSA signature
731            let msg = Secp256k1Message::from_digest_slice(&payload_hash_bytes).map_err(|e| {
732                SparkSdkError::from(ValidationError::InvalidInput {
733                    field: format!("Failed to create message for signature verify: {e}"),
734                })
735            })?;
736
737            let signature =
738                leaf.signature
739                    .ok_or(SparkSdkError::from(ValidationError::InvalidInput {
740                        field: "Missing signature".to_string(),
741                    }))?;
742
743            secp.verify_ecdsa(&msg, &signature, &transfer.sender_identity_public_key)
744                .map_err(|e| {
745                    SparkSdkError::from(ValidationError::InvalidInput {
746                        field: format!("Failed to verify signature: {e}"),
747                    })
748                })?;
749
750            // Decrypt secret cipher with our identity key (assuming the SparkSigner can do ECIES decryption)
751            let network = self.config.spark_config.network.to_bitcoin_network();
752            let leaf_secret = self
753                .signer
754                .decrypt_secret_key_with_ecies(&leaf.secret_cipher, network)?;
755
756            // Record the decrypted leaf secret (private key) in the map
757            if let Some(leaf_node) = &leaf.leaf {
758                leaf_privkey_map.insert(leaf_node.id.to_string().clone(), leaf_secret);
759            }
760        }
761
762        Ok(leaf_privkey_map)
763    }
764
765    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
766    async fn query_all_transfers(
767        &self,
768        limit: u32,
769        offset: u32,
770    ) -> Result<QueryAllTransfersResponse, SparkSdkError> {
771        let identity_public_key = self.get_spark_address()?;
772        let request = QueryAllTransfersRequest {
773            limit: limit as i64,
774            offset: offset as i64,
775            identity_public_key: identity_public_key.serialize().to_vec(),
776        };
777
778        let response = self
779            .config
780            .spark_config
781            .call_with_retry(
782                request,
783                |mut client, req| Box::pin(async move { client.query_all_transfers(req).await }),
784                None,
785            )
786            .await?;
787
788        Ok(response)
789    }
790
791    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
792    async fn cancel_send_transfer(
793        &self,
794        transfer_id: String,
795    ) -> Result<Option<Transfer>, SparkSdkError> {
796        let request_data = CancelSendTransferRequest {
797            transfer_id: transfer_id.to_string(),
798            sender_identity_public_key: self.get_spark_address()?.serialize().to_vec(),
799        };
800
801        let futures = self
802            .config
803            .spark_config
804            .operator_pool
805            .operators
806            .iter()
807            .map(|operator| {
808                let operator_id = operator.id;
809                let request_clone = request_data.clone();
810
811                async move {
812                    let response = self
813                        .config
814                        .spark_config
815                        .call_with_retry(
816                            request_clone,
817                            |mut client, req| {
818                                Box::pin(async move { client.cancel_send_transfer(req).await })
819                            },
820                            Some(operator_id),
821                        )
822                        .await?;
823
824                    Ok(response)
825                }
826            });
827
828        let results: Vec<Result<CancelSendTransferResponse, SparkSdkError>> =
829            futures::future::join_all(futures).await;
830
831        // Return the first successful response, if any
832        if let Some(result) = results.into_iter().flatten().next() {
833            return result.transfer.map(|tr| tr.try_into()).transpose();
834        }
835
836        Ok(None)
837    }
838}
839
840fn find_share(
841    shares: &[VerifiableSecretShare],
842    operator_id: u64,
843) -> Result<VerifiableSecretShare, SparkSdkError> {
844    let target_index = k256::Scalar::from(operator_id + 1);
845    shares
846        .iter()
847        .find(|s| s.secret_share.index == target_index)
848        .cloned()
849        .ok_or_else(|| {
850            SparkSdkError::from(ValidationError::InvalidInput {
851                field: "Share not found".to_string(),
852            })
853        })
854}
855
856fn frost_commitment_to_proto_commitment(
857    commitments: &FrostSigningCommitments,
858) -> Result<ProtoSigningCommitment, SparkSdkError> {
859    let hiding = commitments.hiding().serialize().unwrap();
860    let binding = commitments.binding().serialize().unwrap();
861
862    Ok(ProtoSigningCommitment { hiding, binding })
863}