spark_rust/wallet/internal_handlers/implementations/
transfer.rs

1use crate::common_types::types::frost::FrostSigningCommitments;
2use crate::common_types::types::AbsoluteLockTime;
3use crate::common_types::types::EcdsaSignature;
4use crate::common_types::types::OutPoint;
5use crate::common_types::types::PublicKey;
6use crate::common_types::types::ScriptBuf;
7use crate::common_types::types::Secp256k1;
8use crate::common_types::types::Secp256k1Message;
9use crate::common_types::types::SecretKey;
10use crate::common_types::types::TransactionVersion;
11use crate::common_types::types::TxIn;
12use crate::common_types::types::Uuid;
13use crate::common_types::types::Witness;
14use crate::error::{
15    network::NetworkError, transaction::TransactionError, validation::ValidationError,
16    SparkSdkError,
17};
18use crate::signer::traits::SparkSigner;
19use crate::wallet::internal_handlers::traits::transfer::LeafKeyTweak;
20use crate::wallet::internal_handlers::traits::transfer::LeafRefundSigningData;
21use crate::wallet::internal_handlers::traits::transfer::TransferInternalHandlers;
22use crate::wallet::leaf_manager::SparkLeaf;
23use crate::wallet::utils::bitcoin::bitcoin_tx_from_bytes;
24use crate::wallet::utils::bitcoin::parse_public_key;
25use crate::wallet::utils::bitcoin::serialize_bitcoin_transaction;
26use crate::wallet::utils::sequence::next_sequence;
27use crate::SparkSdk;
28use bitcoin::Sequence;
29use spark_cryptography::secret_sharing::secret_sharing::VerifiableSecretShare;
30use spark_protos::common::SignatureIntent;
31use spark_protos::common::SigningCommitment as ProtoSigningCommitment;
32use spark_protos::spark::CancelSendTransferRequest;
33use spark_protos::spark::ClaimLeafKeyTweak;
34use spark_protos::spark::ClaimTransferTweakKeysRequest;
35use spark_protos::spark::CompleteSendTransferRequest;
36use spark_protos::spark::FinalizeNodeSignaturesRequest;
37use spark_protos::spark::LeafRefundTxSigningJob;
38use spark_protos::spark::QueryAllTransfersRequest;
39use spark_protos::spark::QueryAllTransfersResponse;
40use spark_protos::spark::SendLeafKeyTweak;
41use spark_protos::spark::SigningJob;
42use spark_protos::spark::StartSendTransferRequest;
43use spark_protos::spark::Transfer;
44use spark_protos::spark::TreeNode;
45use std::collections::HashMap;
46
47// external crates
48use sha256::digest;
49use tonic::async_trait;
50
51#[async_trait]
52impl<S: SparkSigner + Send + Sync + Clone + 'static> TransferInternalHandlers<S> for SparkSdk<S> {
53    /// Top-level transfer execution function, given the leaves to transfer. The function expects the leaves to be in [`LeafKeyTweak`] format.
54    ///
55    /// # Arguments
56    ///
57    /// * `leaves` - A vector of [`LeafKeyTweak`] objects representing the leaves to transfer.
58    /// * `receiver_identity_pubkey` - The public key of the receiver.
59    /// * `expiry_time` - The expiry time of the transfer.
60    ///
61    /// # Returns
62    ///
63    /// A [`Transfer`] object representing the transfer. This is an auto-converted protobuf object that is returned by the Spark API as the last step of the transfer for the sender.
64    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
65    async fn start_send_transfer(
66        &self,
67        leaves: &Vec<LeafKeyTweak>,
68        receiver_identity_pubkey: &PublicKey,
69        expiry_time: u64,
70    ) -> Result<Transfer, SparkSdkError> {
71        // Send the transfer, get the refund signatures, and aggregate by adding yours.
72        let (transfer, refund_signatures) = self
73            .send_transfer_sign_refunds(leaves, receiver_identity_pubkey, expiry_time)
74            .await?;
75
76        // Send the transfer, get the tweak keys, and send them.
77        let transfer = self
78            .send_transfer_tweak_key(transfer, leaves, &refund_signatures)
79            .await?;
80
81        Ok(transfer)
82    }
83
84    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
85    async fn send_transfer_tweak_key(
86        &self,
87        transfer: Transfer,
88        leaves: &Vec<LeafKeyTweak>,
89        refund_signature_map: &HashMap<String, Vec<u8>>,
90    ) -> Result<Transfer, SparkSdkError> {
91        // Prepare the key tweaks for the transfer.
92        let key_tweak_input_map =
93            match self.prepare_send_transfer_key_tweaks(&transfer, leaves, refund_signature_map) {
94                Ok(map) => map,
95                Err(e) => {
96                    // If preparation fails, cancel the transfer and propagate the error
97                    if let Err(cancel_err) = self.cancel_send_transfer(transfer.id.clone()).await {
98                        // Log the cancellation error but return the original error
99                        #[cfg(feature = "telemetry")]
100                        tracing::error!(
101                            "Failed to cancel transfer after key tweak preparation error: {}",
102                            cancel_err
103                        );
104                    }
105                    return Err(e);
106                }
107            };
108
109        let mut updated_transfer: Option<Transfer> = None;
110
111        for operator in &self.config.spark_config.spark_operators {
112            let mut spark_client = self
113                .config
114                .spark_config
115                .get_spark_connection(Some(operator.id))
116                .await?;
117
118            let mut request = tonic::Request::new(CompleteSendTransferRequest {
119                transfer_id: transfer.id.clone(),
120                owner_identity_public_key: self.get_spark_address()?.serialize().to_vec(),
121                leaves_to_send: key_tweak_input_map[&operator.frost_identifier_str()].clone(),
122            });
123
124            self.add_authorization_header_to_request(&mut request, Some(operator.id));
125
126            let response = match spark_client.complete_send_transfer(request).await {
127                Ok(response) => response.into_inner(),
128                Err(status) => {
129                    // If the request fails, cancel the transfer and propagate the error
130                    if let Err(cancel_err) = self.cancel_send_transfer(transfer.id.clone()).await {
131                        // Log the cancellation error but return the original error
132                        #[cfg(feature = "telemetry")]
133                        tracing::error!(
134                            "Failed to cancel transfer after complete_send_transfer error: {}",
135                            cancel_err
136                        );
137                    }
138                    return Err(SparkSdkError::from(NetworkError::Status(status)));
139                }
140            };
141
142            match &updated_transfer {
143                None => updated_transfer = response.transfer,
144                Some(existing) => {
145                    // This should never happen. In this case, it should be too late to cancel the transfer.
146                    if !self.compare_transfers(existing, &response.transfer.unwrap()) {
147                        return Err(SparkSdkError::from(TransactionError::Transfer {
148                            reason: "Inconsistent transfer responses from operators".into(),
149                        }));
150                    }
151                }
152            }
153        }
154
155        Ok(updated_transfer.unwrap())
156    }
157
158    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
159    async fn send_transfer_sign_refunds(
160        &self,
161        leaves: &Vec<LeafKeyTweak>,
162        receiver_identity_pubkey: &PublicKey,
163        expiry_time: u64,
164    ) -> Result<(Transfer, HashMap<String, Vec<u8>>), SparkSdkError> {
165        // Generate a new transfer ID.
166        let transfer_id = Uuid::now_v7().to_string();
167
168        // Prepare signing data for each leaf
169        let mut leaf_data_map = HashMap::new();
170        for leaf_key in leaves {
171            let commitments = self.signer.new_frost_signing_noncepair()?;
172            let node_tx = bitcoin_tx_from_bytes(&leaf_key.leaf.node_tx)?;
173
174            let secp = Secp256k1::new();
175            let signing_public_key = leaf_key.old_signing_private_key.public_key(&secp);
176
177            let leaf_refund_signing_data = LeafRefundSigningData {
178                signing_public_key,
179                receiving_pubkey: *receiver_identity_pubkey,
180                commitment: frost_commitment_to_proto_commitment(&commitments)?,
181                tx: node_tx,
182                refund_tx: None,
183                vout: leaf_key.leaf.vout,
184            };
185            leaf_data_map.insert(leaf_key.leaf.id.clone(), leaf_refund_signing_data);
186        }
187
188        // Create the signing jobs needed for the transfer.
189        let signing_jobs = self.prepare_refund_so_signing_jobs(leaves, &mut leaf_data_map)?;
190
191        // Send the first transfer request as the sender.
192        let mut client = self.config.spark_config.get_spark_connection(None).await?;
193        let mut request = tonic::Request::new(StartSendTransferRequest {
194            transfer_id: transfer_id.clone(),
195            leaves_to_send: signing_jobs,
196            owner_identity_public_key: self.get_spark_address()?.serialize().to_vec(),
197            receiver_identity_public_key: receiver_identity_pubkey.serialize().to_vec(),
198            expiry_time: Some(prost_types::Timestamp {
199                seconds: expiry_time as i64,
200                nanos: 0,
201            }),
202            key_tweak_proofs: Default::default(),
203        });
204
205        self.add_authorization_header_to_request(&mut request, None);
206        let response = client
207            .start_send_transfer(request)
208            .await
209            .map_err(|status| SparkSdkError::from(NetworkError::Status(status)))?
210            .into_inner();
211        let transfer = response.transfer.unwrap(); // `transfer` is always Some.
212
213        // Sign the refunds and aggregate signatures combining your and Spark Operators' signatures. This will give you the signature for the transfer transaction to be sent to the receiver.
214        let signing_results = response.signing_results;
215        let signatures =
216            match self
217                .signer
218                .sign_transfer_refunds(&leaf_data_map, &signing_results, vec![])
219            {
220                Ok(sigs) => sigs,
221                Err(e) => {
222                    // If signing fails, cancel the transfer and propagate the error
223                    if let Err(cancel_err) = self.cancel_send_transfer(transfer.id).await {
224                        // Log the cancellation error but return the original error
225                        #[cfg(feature = "telemetry")]
226                        tracing::error!(
227                            "Failed to cancel transfer after signing error: {}",
228                            cancel_err
229                        );
230                    }
231                    return Err(e);
232                }
233            };
234
235        // Create a map of node IDs to signatures.
236        let mut signature_map = HashMap::new();
237        for leaf_signature in signatures {
238            signature_map.insert(leaf_signature.node_id, leaf_signature.refund_tx_signature);
239        }
240
241        Ok((transfer, signature_map))
242    }
243
244    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
245    fn prepare_send_transfer_key_tweaks(
246        &self,
247        transfer: &Transfer,
248        leaves: &Vec<LeafKeyTweak>,
249        refund_signature_map: &HashMap<String, Vec<u8>>,
250    ) -> Result<HashMap<String, Vec<SendLeafKeyTweak>>, SparkSdkError> {
251        let mut leaves_tweaks_map = HashMap::new();
252        for leaf in leaves {
253            // Get the refund signature
254            let leaf_refund_signature = refund_signature_map.get(&leaf.leaf.id).cloned();
255
256            // Get the tweaks for this leaf by preparing the key tweak data
257            let leaf_tweaks = self.prepare_single_send_transfer_key_tweak(
258                &transfer.id,
259                leaf,
260                &PublicKey::from_slice(&transfer.receiver_identity_public_key)?,
261                leaf_refund_signature,
262            )?;
263
264            // Add the tweaks to the map, grouped by identifier
265            for (identifier, leaf_tweak) in leaf_tweaks {
266                leaves_tweaks_map
267                    .entry(identifier)
268                    .or_insert_with(Vec::new)
269                    .push(leaf_tweak);
270            }
271        }
272
273        Ok(leaves_tweaks_map)
274    }
275
276    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
277    fn prepare_single_send_transfer_key_tweak(
278        &self,
279        transfer_id: &str,
280        leaf: &LeafKeyTweak,
281        receiver_pubkey: &PublicKey,
282        refund_signature: Option<Vec<u8>>,
283    ) -> Result<HashMap<String, SendLeafKeyTweak>, SparkSdkError> {
284        let refund_signature = refund_signature.unwrap_or_default();
285
286        let secp = Secp256k1::new();
287        let publickey = &leaf.old_signing_private_key.public_key(&secp);
288        let tweaked_public_key = self.signer.subtract_secret_keys_given_pubkeys(
289            publickey,
290            &leaf.new_signing_public_key,
291            true,
292        )?;
293
294        // Split the secret key that belongs to the tweaked public key.
295        let shares = self
296            .signer
297            .split_from_public_key_with_verifiable_secret_sharing(
298                &tweaked_public_key,
299                self.config.spark_config.threshold as usize,
300                self.config.spark_config.spark_operators.len(),
301            )?;
302
303        let mut pubkey_shares_tweak = HashMap::new();
304        for operator in &self.config.spark_config.spark_operators {
305            let share = find_share(&shares, operator.id.into())?;
306
307            let share_scalar = SecretKey::from_slice(&share.secret_share.share.to_bytes())?;
308            let pubkey_tweak = PublicKey::from_secret_key(&Secp256k1::new(), &share_scalar);
309            pubkey_shares_tweak.insert(
310                operator.frost_identifier_str(),
311                pubkey_tweak.serialize().to_vec(),
312            );
313        }
314
315        // Generate signature over payload
316        // First, let's encrypt the new signing private key using ECIES
317        let secret_cipher = self
318            .signer
319            .encrypt_secret_key_with_ecies(receiver_pubkey, &leaf.new_signing_public_key)?;
320
321        // Now we'll create the payload exactly as in the Go code by concatenating:
322        // 1. leaf ID
323        // 2. transfer ID
324        // 3. encrypted secret (secret_cipher)
325        let payload = [
326            leaf.leaf.id.as_bytes(), // leaf ID bytes
327            transfer_id.as_bytes(),  // transfer ID bytes
328            &secret_cipher,          // encrypted secret bytes
329        ]
330        .concat();
331
332        // Sign the hash using ECDSA with our identity private key
333        // Note: We use the raw hash bytes directly, no hex encoding needed
334        let network = self.config.spark_config.network.to_bitcoin_network();
335        let signature = self
336            .signer
337            .sign_message_ecdsa_with_identity_key(payload, true, network)?;
338        // let signature = signature.serialize_der().to_vec();
339
340        let mut leaf_tweaks_map = HashMap::new();
341        for operator in &self.config.spark_config.spark_operators {
342            let share = find_share(&shares, operator.id.into())?;
343            leaf_tweaks_map.insert(
344                operator.frost_identifier_str(),
345                SendLeafKeyTweak {
346                    leaf_id: leaf.leaf.id.clone(),
347                    secret_share_tweak: Some(spark_protos::spark::SecretShare {
348                        secret_share: share.secret_share.share.to_bytes().to_vec(),
349                        proofs: share.proofs.clone(),
350                    }),
351                    pubkey_shares_tweak: pubkey_shares_tweak.clone(),
352                    secret_cipher: secret_cipher.clone(),
353                    signature: signature.serialize_der().to_vec(),
354                    refund_signature: refund_signature.to_vec(),
355                },
356            );
357        }
358
359        Ok(leaf_tweaks_map)
360    }
361
362    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
363    fn prepare_refund_so_signing_jobs(
364        &self,
365        leaves: &Vec<LeafKeyTweak>,
366        leaf_data_map: &mut HashMap<String, LeafRefundSigningData>,
367    ) -> Result<Vec<LeafRefundTxSigningJob>, SparkSdkError> {
368        let mut signing_jobs = Vec::new();
369
370        for leaf in leaves {
371            let refund_signing_data = leaf_data_map.get_mut(&leaf.leaf.id).ok_or_else(|| {
372                SparkSdkError::from(ValidationError::InvalidInput {
373                    field: "Leaf data not found".to_string(),
374                })
375            })?;
376
377            let signing_pubkey = &refund_signing_data.signing_public_key;
378            let refund_tx =
379                self.create_refund_tx(&leaf.leaf, &refund_signing_data.receiving_pubkey)?;
380            let refund_bytes = serialize_bitcoin_transaction(&refund_tx)?;
381
382            refund_signing_data.refund_tx = Some(refund_tx);
383
384            let refund_commitment_proto = refund_signing_data.commitment.clone();
385
386            signing_jobs.push(LeafRefundTxSigningJob {
387                leaf_id: leaf.leaf.id.clone(),
388                refund_tx_signing_job: Some(SigningJob {
389                    signing_public_key: signing_pubkey.serialize().to_vec(),
390                    raw_tx: refund_bytes,
391                    signing_nonce_commitment: Some(refund_commitment_proto),
392                }),
393            });
394        }
395
396        Ok(signing_jobs)
397    }
398
399    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
400    fn create_refund_tx(
401        &self,
402        leaf: &TreeNode,
403        receiving_pubkey: &PublicKey,
404    ) -> Result<bitcoin::Transaction, SparkSdkError> {
405        let node_tx = bitcoin_tx_from_bytes(&leaf.node_tx)?;
406        let refund_tx = bitcoin_tx_from_bytes(&leaf.refund_tx)?;
407
408        let mut new_refund_tx = bitcoin::Transaction {
409            version: TransactionVersion::TWO,
410            lock_time: AbsoluteLockTime::ZERO,
411            input: vec![],
412            output: vec![],
413        };
414
415        let old_sequence = refund_tx.input[0].sequence.0;
416        let sequence = Sequence(next_sequence(old_sequence));
417
418        new_refund_tx.input.push(TxIn {
419            previous_output: OutPoint {
420                txid: node_tx.compute_txid(),
421                vout: 0,
422            },
423            script_sig: ScriptBuf::default(),
424            sequence,
425            witness: Witness::default(),
426        });
427
428        let secp = Secp256k1::new();
429        let addr = bitcoin::Address::p2tr(
430            &secp,
431            receiving_pubkey.x_only_public_key().0,
432            None,
433            self.config.spark_config.network.to_bitcoin_network(),
434        );
435
436        new_refund_tx.output.push(bitcoin::TxOut {
437            value: node_tx.output[0].value,
438            script_pubkey: addr.script_pubkey(),
439        });
440
441        Ok(new_refund_tx)
442    }
443
444    // Helper methods
445    fn compare_transfers(&self, t1: &Transfer, t2: &Transfer) -> bool {
446        t1.id == t2.id
447            && t1.receiver_identity_public_key == t2.receiver_identity_public_key
448            && t1.status == t2.status
449            && t1.total_value == t2.total_value
450            && t1.expiry_time.as_ref().map(|t| t.seconds)
451                == t2.expiry_time.as_ref().map(|t| t.seconds)
452            && t1.leaves.len() == t2.leaves.len()
453    }
454
455    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
456    async fn claim_finalize_incoming_transfer(
457        &self,
458        transfer: &Transfer,
459        leaves: &Vec<LeafKeyTweak>,
460    ) -> Result<(), SparkSdkError> {
461        // First tweak the keys
462        self.claim_transfer_tweak_keys(transfer, leaves).await?;
463
464        // Then sign the refunds
465        let signatures = self.claim_transfer_sign_refunds(transfer, leaves).await?;
466
467        // Finally, finalize the transfer
468        self.finalize_transfer(&signatures).await?;
469
470        let mut leaf_nodes = vec![];
471        for (leaf, _) in leaves.iter().zip(signatures.iter()) {
472            // Create a new refund transaction with decremented timelock
473            let new_refund_tx = self.create_refund_tx(&leaf.leaf, &leaf.new_signing_public_key)?;
474            let new_refund_tx_bytes = serialize_bitcoin_transaction(&new_refund_tx)?;
475
476            let mut updated_leaf = leaf.leaf.clone();
477            updated_leaf.refund_tx = new_refund_tx_bytes;
478            leaf_nodes.push(SparkLeaf::Bitcoin(updated_leaf));
479        }
480
481        self.leaf_manager.insert_leaves(leaf_nodes, false)?;
482
483        Ok(())
484    }
485
486    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
487    async fn claim_transfer_tweak_keys(
488        &self,
489        transfer: &Transfer,
490        leaves: &Vec<LeafKeyTweak>,
491    ) -> Result<(), SparkSdkError> {
492        let leaves_tweaks_map = self.prepare_claim_leaves_key_tweaks(leaves)?;
493
494        for operator in &self.config.spark_config.spark_operators {
495            let mut spark_client = self
496                .config
497                .spark_config
498                .get_spark_connection(Some(operator.id))
499                .await?;
500
501            let request = ClaimTransferTweakKeysRequest {
502                transfer_id: transfer.id.clone(),
503                owner_identity_public_key: self.get_spark_address()?.serialize().to_vec(),
504                leaves_to_receive: leaves_tweaks_map[&operator.frost_identifier_str()].clone(),
505            };
506            let mut tonic_request = tonic::Request::new(request);
507            self.add_authorization_header_to_request(&mut tonic_request, Some(operator.id));
508
509            let _ = spark_client
510                .claim_transfer_tweak_keys(tonic_request)
511                .await
512                .map_err(|status| SparkSdkError::from(NetworkError::Status(status)))?;
513        }
514
515        Ok(())
516    }
517
518    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
519    fn prepare_claim_leaves_key_tweaks(
520        &self,
521        leaves: &Vec<LeafKeyTweak>,
522    ) -> Result<HashMap<String, Vec<ClaimLeafKeyTweak>>, SparkSdkError> {
523        let mut leaves_tweaks_map = HashMap::new();
524
525        for leaf in leaves {
526            let leaf_tweaks = self.prepare_claim_leaf_key_tweaks(leaf)?;
527
528            for (identifier, leaf_tweak) in leaf_tweaks {
529                leaves_tweaks_map
530                    .entry(identifier)
531                    .or_insert_with(Vec::new)
532                    .push(leaf_tweak);
533            }
534        }
535
536        Ok(leaves_tweaks_map)
537    }
538
539    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
540    fn prepare_claim_leaf_key_tweaks(
541        &self,
542        leaf: &LeafKeyTweak,
543    ) -> Result<HashMap<String, ClaimLeafKeyTweak>, SparkSdkError> {
544        let secp = Secp256k1::new();
545        let tweaked_public_key = self.signer.subtract_secret_keys_given_pubkeys(
546            &leaf.old_signing_private_key.public_key(&secp),
547            &leaf.new_signing_public_key,
548            true,
549        )?;
550
551        let shares = self
552            .signer
553            .split_from_public_key_with_verifiable_secret_sharing(
554                &tweaked_public_key,
555                // &minus_one.to_be_bytes().to_vec(),
556                self.config.spark_config.threshold as usize,
557                self.config.spark_config.spark_operators.len(),
558            )
559            .unwrap();
560
561        let mut pubkey_shares_tweak = HashMap::new();
562        for operator in &self.config.spark_config.spark_operators {
563            let share = find_share(&shares, operator.id.into())?;
564
565            // This part comes from Spark cryptography, so the secret key is in the code.
566            let share_scalar = SecretKey::from_slice(&share.secret_share.share.to_bytes())?;
567            let pubkey_tweak = PublicKey::from_secret_key(&Secp256k1::new(), &share_scalar);
568            pubkey_shares_tweak.insert(
569                operator.frost_identifier_str(),
570                pubkey_tweak.serialize().to_vec(),
571            );
572        }
573
574        let mut leaf_tweaks_map = HashMap::new();
575        for operator in &self.config.spark_config.spark_operators {
576            let share = find_share(&shares, operator.id.into())?;
577
578            leaf_tweaks_map.insert(
579                operator.frost_identifier_str(),
580                ClaimLeafKeyTweak {
581                    leaf_id: leaf.leaf.id.clone(),
582                    secret_share_tweak: Some(spark_protos::spark::SecretShare {
583                        secret_share: share.secret_share.share.to_bytes().to_vec(),
584                        proofs: share.proofs.clone(),
585                    }),
586                    pubkey_shares_tweak: pubkey_shares_tweak.clone(),
587                },
588            );
589        }
590
591        Ok(leaf_tweaks_map)
592    }
593
594    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
595    async fn claim_transfer_sign_refunds(
596        &self,
597        transfer: &Transfer,
598        leaf_keys: &Vec<LeafKeyTweak>,
599    ) -> Result<Vec<spark_protos::spark::NodeSignatures>, SparkSdkError> {
600        // Create a map to store refund signing data for each leaf
601        let mut leaf_data_map = HashMap::new();
602
603        for leaf_key in leaf_keys {
604            // For each leaf key, we create the signing data using the new private key
605            let commitments = self.signer.new_frost_signing_noncepair()?;
606
607            // Deserialize the transaction from raw bytes
608            let tx = match bitcoin::consensus::deserialize(&leaf_key.leaf.node_tx) {
609                Ok(tx) => tx,
610                Err(e) => {
611                    return Err(SparkSdkError::from(ValidationError::InvalidInput {
612                        field: format!("Failed to deserialize transaction: {}", e),
613                    }));
614                }
615            };
616
617            // Store all the necessary data for signing
618            leaf_data_map.insert(
619                leaf_key.leaf.id.clone(),
620                LeafRefundSigningData {
621                    signing_public_key: leaf_key.new_signing_public_key,
622                    receiving_pubkey: leaf_key.new_signing_public_key,
623                    commitment: frost_commitment_to_proto_commitment(&commitments)?,
624                    tx,
625                    refund_tx: None,
626                    vout: leaf_key.leaf.vout,
627                },
628            );
629        }
630
631        // Prepare the signing jobs for each leaf
632        let signing_jobs = self.prepare_refund_so_signing_jobs(leaf_keys, &mut leaf_data_map)?;
633
634        // Request signing of refunds
635        let mut spark_client = self.config.spark_config.get_spark_connection(None).await?;
636        let request = spark_protos::spark::ClaimTransferSignRefundsRequest {
637            transfer_id: transfer.id.clone(),
638            owner_identity_public_key: self.get_spark_address()?.serialize().to_vec(),
639            signing_jobs,
640            key_tweak_proofs: Default::default(),
641        };
642        let mut tonic_request = tonic::Request::new(request);
643        self.add_authorization_header_to_request(&mut tonic_request, None);
644
645        let response = spark_client
646            .claim_transfer_sign_refunds(tonic_request)
647            .await
648            .map_err(|status| SparkSdkError::from(NetworkError::Status(status)))?
649            .into_inner();
650
651        // Process the signing results and generate final signatures
652        self.signer
653            .sign_transfer_refunds(&leaf_data_map, &response.signing_results, vec![])
654    }
655
656    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
657    async fn finalize_transfer(
658        &self,
659        signatures: &[spark_protos::spark::NodeSignatures],
660    ) -> Result<(), SparkSdkError> {
661        let mut spark_client = self.config.spark_config.get_spark_connection(None).await?;
662        let request = FinalizeNodeSignaturesRequest {
663            intent: SignatureIntent::Transfer as i32,
664            node_signatures: signatures.to_vec(),
665        };
666        let mut tonic_request = tonic::Request::new(request);
667        self.add_authorization_header_to_request(&mut tonic_request, None);
668
669        // TODO: It's more robust to use this, do additional validation
670        let _response = spark_client
671            .finalize_node_signatures(tonic_request)
672            .await
673            .map_err(|status| SparkSdkError::from(NetworkError::Status(status)))?;
674
675        Ok(())
676    }
677
678    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
679    async fn verify_pending_transfer(
680        &self,
681        transfer: &spark_protos::spark::Transfer,
682    ) -> Result<HashMap<String, SecretKey>, SparkSdkError> {
683        // Create the map for leafID -> decrypted leaf private key
684        let mut leaf_privkey_map = HashMap::new();
685
686        // 1) Parse the sender's public key
687        let secp = Secp256k1::new();
688        let sender_pubkey = parse_public_key(&transfer.sender_identity_public_key)?;
689
690        // 2) For each leaf, verify the signature, then decrypt the secret
691        for leaf in &transfer.leaves {
692            // Parse signature from DER
693            let signature = EcdsaSignature::from_der(&leaf.signature).map_err(|e| {
694                SparkSdkError::from(ValidationError::InvalidInput {
695                    field: format!("Failed to parse DER signature: {e}"),
696                })
697            })?;
698
699            // Create the message to verify: leaf.Leaf.Id + transfer.Id + leaf.SecretCipher
700            let mut payload = leaf
701                .leaf
702                .as_ref()
703                .map(|l| l.id.clone())
704                .unwrap_or_default()
705                .into_bytes();
706            payload.extend_from_slice(transfer.id.as_bytes());
707            payload.extend_from_slice(&leaf.secret_cipher);
708
709            // Hash the payload
710            let payload_hash_hex = digest(&payload);
711            let payload_hash_bytes = hex::decode(payload_hash_hex).map_err(|e| {
712                SparkSdkError::from(ValidationError::InvalidInput {
713                    field: format!("Failed to decode hex payload hash: {e}"),
714                })
715            })?;
716
717            // Build secp256k1 message and verify ECDSA signature
718            let msg = Secp256k1Message::from_digest_slice(&payload_hash_bytes).map_err(|e| {
719                SparkSdkError::from(ValidationError::InvalidInput {
720                    field: format!("Failed to create message for signature verify: {e}"),
721                })
722            })?;
723
724            secp.verify_ecdsa(&msg, &signature, &sender_pubkey)
725                .map_err(|e| {
726                    SparkSdkError::from(ValidationError::InvalidInput {
727                        field: format!("Failed to verify signature: {e}"),
728                    })
729                })?;
730
731            // Decrypt secret cipher with our identity key (assuming the SparkSigner can do ECIES decryption)
732            let network = self.config.spark_config.network.to_bitcoin_network();
733            let leaf_secret = self
734                .signer
735                .decrypt_secret_key_with_ecies(&leaf.secret_cipher, network)?;
736
737            // Record the decrypted leaf secret (private key) in the map
738            if let Some(leaf_node) = &leaf.leaf {
739                leaf_privkey_map.insert(leaf_node.id.clone(), leaf_secret);
740            }
741        }
742
743        Ok(leaf_privkey_map)
744    }
745
746    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
747    async fn query_all_transfers(
748        &self,
749        limit: u32,
750        offset: u32,
751    ) -> Result<QueryAllTransfersResponse, SparkSdkError> {
752        let mut spark_client = self.config.spark_config.get_spark_connection(None).await?;
753        let identity_public_key = self.get_spark_address()?;
754        let mut request = tonic::Request::new(QueryAllTransfersRequest {
755            limit: limit as i64,
756            offset: offset as i64,
757            identity_public_key: identity_public_key.serialize().to_vec(),
758        });
759        self.add_authorization_header_to_request(&mut request, None);
760
761        let response = spark_client
762            .query_all_transfers(request)
763            .await
764            .map_err(|status| SparkSdkError::from(NetworkError::Status(status)))?;
765
766        Ok(response.into_inner())
767    }
768
769    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
770    async fn cancel_send_transfer(
771        &self,
772        transfer_id: String,
773    ) -> Result<Option<Transfer>, SparkSdkError> {
774        let cancel_transfer_request = CancelSendTransferRequest {
775            transfer_id: transfer_id.to_string(),
776            sender_identity_public_key: self.get_spark_address()?.serialize().to_vec(),
777        };
778
779        let futures = self
780            .config
781            .spark_config
782            .spark_operators
783            .iter()
784            .map(|operator| {
785                let operator_id = operator.id;
786                let config = self.config.clone();
787                let cancel_request = cancel_transfer_request.clone();
788                let self_clone = self.clone();
789
790                async move {
791                    let mut spark_client = config
792                        .spark_config
793                        .get_spark_connection(Some(operator_id))
794                        .await?;
795
796                    let mut request = tonic::Request::new(cancel_request);
797                    self_clone.add_authorization_header_to_request(&mut request, Some(operator_id));
798
799                    let response = spark_client
800                        .cancel_send_transfer(request)
801                        .await
802                        .map_err(|status| SparkSdkError::from(NetworkError::Status(status)))?;
803                    Ok::<_, SparkSdkError>(response.into_inner())
804                }
805            });
806
807        let results = futures::future::join_all(futures).await;
808
809        // Return the first successful response, if any
810        if let Some(result) = results.into_iter().flatten().next() {
811            return Ok(result.transfer);
812        }
813
814        Ok(None)
815    }
816}
817
818fn find_share(
819    shares: &[VerifiableSecretShare],
820    operator_id: u64,
821) -> Result<VerifiableSecretShare, SparkSdkError> {
822    let target_index = k256::Scalar::from(operator_id + 1);
823    shares
824        .iter()
825        .find(|s| s.secret_share.index == target_index)
826        .cloned()
827        .ok_or_else(|| {
828            SparkSdkError::from(ValidationError::InvalidInput {
829                field: "Share not found".to_string(),
830            })
831        })
832}
833
834fn frost_commitment_to_proto_commitment(
835    commitments: &FrostSigningCommitments,
836) -> Result<ProtoSigningCommitment, SparkSdkError> {
837    let hiding = commitments.hiding().serialize().unwrap();
838    let binding = commitments.binding().serialize().unwrap();
839
840    Ok(ProtoSigningCommitment { hiding, binding })
841}