1use std::str::FromStr;
2
3use crate::with_handler_lock;
4use crate::{
5 constants::spark::DEFAULT_TRANSFER_EXPIRY,
6 error::{SparkSdkError, ValidationError},
7 signer::traits::SparkSigner,
8 wallet::{
9 internal_handlers::traits::{
10 leaves::LeavesInternalHandlers,
11 transfer::{LeafKeyTweak, TransferInternalHandlers},
12 },
13 leaf_manager::SparkNodeStatus,
14 utils::bitcoin::bitcoin_tx_from_bytes,
15 },
16 SparkNetwork, SparkSdk,
17};
18use bitcoin::{
19 secp256k1::{ecdsa::Signature, PublicKey},
20 Transaction,
21};
22use spark_cryptography::derivation_path::SparkKeyType;
23use spark_protos::spark::{
24 query_pending_transfers_request::Participant, QueryPendingTransfersRequest, TransferStatus,
25};
26use uuid::Uuid;
27
28#[derive(Debug, Clone)]
29pub struct TreeNode {
30 pub id: Uuid,
31 pub tree_id: Uuid,
32 pub value_sats: u64,
33 pub parent_node_id: Option<Uuid>,
34 pub node_tx: Transaction,
35 pub refund_tx: Transaction,
36 pub vout: u32,
37 pub verifying_public_key: PublicKey,
38 pub owner_identity_public_key: PublicKey,
39 pub signing_keyshares: Option<(Vec<String>, u32)>,
40 pub status: String,
41 pub network: SparkNetwork,
42}
43
44impl TryFrom<spark_protos::spark::TreeNode> for TreeNode {
45 type Error = SparkSdkError;
46
47 fn try_from(value: spark_protos::spark::TreeNode) -> Result<Self, Self::Error> {
48 let id = Uuid::from_str(&value.id)
49 .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))?;
50 let tree_id = Uuid::from_str(&value.tree_id)
51 .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))?;
52 let value_sats = value.value;
53 let parent_node_id = value
54 .parent_node_id
55 .map(|parent_node_id| {
56 Uuid::from_str(&parent_node_id)
57 .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))
58 })
59 .transpose()?;
60 let node_tx = bitcoin_tx_from_bytes(&value.node_tx)?;
61 let refund_tx = bitcoin_tx_from_bytes(&value.refund_tx)?;
62 let vout = value.vout;
63 let verifying_public_key = PublicKey::from_slice(&value.verifying_public_key)?;
64 let owner_identity_public_key = PublicKey::from_slice(&value.owner_identity_public_key)?;
65 let signing_keyshares = value.signing_keyshare.map(|signing_keyshare| {
66 (
67 signing_keyshare.owner_identifiers,
68 signing_keyshare.threshold,
69 )
70 });
71 let status = value.status;
72 let network = value.network.try_into()?;
73
74 Ok(TreeNode {
75 id,
76 tree_id,
77 value_sats,
78 parent_node_id,
79 node_tx,
80 refund_tx,
81 vout,
82 verifying_public_key,
83 owner_identity_public_key,
84 signing_keyshares,
85 status,
86 network,
87 })
88 }
89}
90
91impl From<TreeNode> for spark_protos::spark::TreeNode {
92 fn from(value: TreeNode) -> Self {
93 spark_protos::spark::TreeNode {
94 id: value.id.to_string(),
95 tree_id: value.tree_id.to_string(),
96 value: value.value_sats,
97 parent_node_id: value.parent_node_id.map(|id| id.to_string()),
98 node_tx: bitcoin::consensus::serialize(&value.node_tx),
99 refund_tx: bitcoin::consensus::serialize(&value.refund_tx),
100 vout: value.vout,
101 verifying_public_key: value.verifying_public_key.serialize().to_vec(),
102 owner_identity_public_key: value.owner_identity_public_key.serialize().to_vec(),
103 signing_keyshare: value.signing_keyshares.map(|(identifiers, threshold)| {
104 spark_protos::spark::SigningKeyshare {
105 owner_identifiers: identifiers,
106 threshold,
107 }
108 }),
109 status: value.status,
110 network: value.network.marshal_proto(),
111 }
112 }
113}
114
115#[derive(Debug, Clone)]
116pub struct TransferLeaf {
117 pub leaf: Option<TreeNode>,
118 pub secret_cipher: Vec<u8>,
119 pub signature: Option<Signature>,
120 pub intermediate_refund_tx: Transaction,
121}
122
123impl TryFrom<spark_protos::spark::TransferLeaf> for TransferLeaf {
124 type Error = SparkSdkError;
125
126 fn try_from(value: spark_protos::spark::TransferLeaf) -> Result<Self, Self::Error> {
127 let leaf = value.leaf.map(TreeNode::try_from).transpose()?;
128 let secret_cipher = value.secret_cipher;
129 let signature = if !value.signature.is_empty() {
130 Some(Signature::from_der(&value.signature)?)
131 } else {
132 None
133 };
134 let intermediate_refund_tx = bitcoin_tx_from_bytes(&value.intermediate_refund_tx)?;
135
136 Ok(TransferLeaf {
137 leaf,
138 secret_cipher,
139 signature,
140 intermediate_refund_tx,
141 })
142 }
143}
144
145#[derive(Debug)]
146pub struct Transfer {
149 pub id: Uuid,
151 pub sender_identity_public_key: PublicKey,
153 pub receiver_identity_public_key: PublicKey,
155 pub status: spark_protos::spark::TransferStatus,
157 pub total_value_sats: u64,
159 pub expiry_time_seconds: Option<u64>,
161 pub leaves: Vec<TransferLeaf>,
163}
164
165impl PartialEq for Transfer {
166 fn eq(&self, other: &Self) -> bool {
167 self.id == other.id
168 && self.receiver_identity_public_key == other.receiver_identity_public_key
169 && self.status == other.status
170 && self.total_value_sats == other.total_value_sats
171 && self.expiry_time_seconds == other.expiry_time_seconds
172 && self.leaves.len() == other.leaves.len()
173 }
174}
175
176impl TryFrom<spark_protos::spark::Transfer> for Transfer {
177 type Error = SparkSdkError;
178
179 fn try_from(value: spark_protos::spark::Transfer) -> Result<Self, Self::Error> {
180 let id = Uuid::from_str(&value.id)
181 .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))?;
182 let sender_identity_public_key = PublicKey::from_slice(&value.sender_identity_public_key)?;
183 let receiver_identity_public_key =
184 PublicKey::from_slice(&value.receiver_identity_public_key)?;
185 let status = spark_protos::spark::TransferStatus::try_from(value.status).map_err(|_| {
186 SparkSdkError::from(ValidationError::InvalidInput {
187 field: "Invalid TransferStatus".to_string(),
188 })
189 })?;
190 let total_value = value.total_value;
191 let expiry_time_seconds = value.expiry_time.map(|time| time.seconds as u64);
192 let leaves = value
193 .leaves
194 .into_iter()
195 .map(TransferLeaf::try_from)
196 .collect::<Result<Vec<TransferLeaf>, Self::Error>>()?;
197
198 Ok(Transfer {
199 id,
200 sender_identity_public_key,
201 receiver_identity_public_key,
202 status,
203 total_value_sats: total_value,
204 expiry_time_seconds,
205 leaves,
206 })
207 }
208}
209
210pub struct GetAllTransfersResponse {
211 pub transfers: Vec<Transfer>,
213 pub offset: Option<u32>,
215}
216
217impl TryFrom<spark_protos::spark::QueryAllTransfersResponse> for GetAllTransfersResponse {
218 type Error = SparkSdkError;
219
220 fn try_from(
221 value: spark_protos::spark::QueryAllTransfersResponse,
222 ) -> Result<Self, Self::Error> {
223 let transfers = value
224 .transfers
225 .into_iter()
226 .map(Transfer::try_from)
227 .collect::<Result<Vec<Transfer>, Self::Error>>()?;
228 let offset = Some(value.offset)
229 .filter(|offset| *offset >= 0)
230 .map(|offset| offset as u32);
231
232 Ok(GetAllTransfersResponse { transfers, offset })
233 }
234}
235
236impl<S: SparkSigner + Send + Sync + Clone + 'static> SparkSdk<S> {
237 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
262 pub async fn query_pending_transfers(&self) -> Result<Vec<Transfer>, SparkSdkError> {
263 with_handler_lock!(self, async {
264 self.query_pending_transfers_internal().await
265 })
266 .await
267 }
268
269 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
270 pub(crate) async fn query_pending_transfers_internal(
271 &self,
272 ) -> Result<Vec<Transfer>, SparkSdkError> {
273 let request_data = QueryPendingTransfersRequest {
274 transfer_ids: vec![],
275 participant: Some(Participant::ReceiverIdentityPublicKey(
276 self.get_spark_address()?.serialize().to_vec(),
277 )),
278 };
279
280 let response = self
281 .config
282 .spark_config
283 .call_with_retry(
284 request_data,
285 |mut client, req| {
286 Box::pin(async move { client.query_pending_transfers(req).await })
287 },
288 None,
289 )
290 .await?;
291
292 response
294 .transfers
295 .into_iter()
296 .map(Transfer::try_from)
297 .collect()
298 }
299
300 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
355 pub async fn transfer(
356 &self,
357 amount: u64,
358 receiver_spark_address: &PublicKey,
359 ) -> Result<String, SparkSdkError> {
360 with_handler_lock!(self, async {
361 let expiry_time = chrono::Utc::now().timestamp() as u64 + DEFAULT_TRANSFER_EXPIRY;
362
363 self.refresh_timelock_nodes(None).await?;
364
365 let leaf_selection_response = self.prepare_leaves_for_amount(amount).await?;
368 let unlocking_id = leaf_selection_response.unlocking_id.unwrap();
369
370 let selected_leaves = leaf_selection_response.leaves;
372 let leaf_ids = selected_leaves
373 .iter()
374 .map(|leaf| leaf.get_id().clone())
375 .collect::<Vec<String>>();
376
377 let mut leaves_to_transfer = Vec::new();
378 for leaf in selected_leaves {
379 let new_signing_public_key = self.signer.new_ephemeral_keypair()?;
381
382 let keypair = self.signer.derive_spark_key(
384 leaf.get_id().clone(),
385 0,
386 SparkKeyType::BaseSigning,
387 self.config.spark_config.network.to_bitcoin_network(),
388 )?;
389 let old_signing_private_key = keypair.secret_key();
390
391 leaves_to_transfer.push(LeafKeyTweak {
392 leaf: leaf.get_tree_node()?,
393 old_signing_private_key,
394 new_signing_public_key,
395 });
396 }
397
398 let transfer = self
400 .start_send_transfer(&leaves_to_transfer, receiver_spark_address, expiry_time)
401 .await?;
402
403 self.leaf_manager
405 .unlock_leaves(unlocking_id.clone(), &leaf_ids, true)?;
406
407 Ok(transfer.id.to_string())
408 })
409 .await
410 }
411
412 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
413 pub async fn transfer_leaf_ids(
414 &self,
415 leaf_ids: Vec<String>,
416 receiver_identity_pubkey: &PublicKey,
417 ) -> Result<String, SparkSdkError> {
418 with_handler_lock!(self, async {
419 self.transfer_leaf_ids_internal(leaf_ids, receiver_identity_pubkey)
420 .await
421 })
422 .await
423 }
424
425 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
426 pub(crate) async fn transfer_leaf_ids_internal(
427 &self,
428 leaf_ids: Vec<String>,
429 receiver_identity_pubkey: &PublicKey,
430 ) -> Result<String, SparkSdkError> {
431 let expiry_time = chrono::Utc::now().timestamp() as u64 + DEFAULT_TRANSFER_EXPIRY;
432
433 let leaf_selection_response = self
434 .leaf_manager
435 .lock_leaf_ids(&leaf_ids, SparkNodeStatus::Transfer)?;
436
437 let unlocking_id = leaf_selection_response.unlocking_id.unwrap();
438
439 let selected_leaves = leaf_selection_response.leaves;
440
441 let mut leaves_to_transfer = Vec::new();
442 for leaf in selected_leaves {
443 let new_signing_public_key = self.signer.new_ephemeral_keypair()?;
445
446 let network = self.config.spark_config.network.to_bitcoin_network();
447 let old_signing_pubkey = self.signer.get_deposit_signing_key(network)?;
448 let old_signing_private_key = self
449 .signer
450 .sensitive_expose_secret_key_from_pubkey(&old_signing_pubkey, false)?;
451
452 leaves_to_transfer.push(LeafKeyTweak {
453 leaf: leaf.get_tree_node()?,
454 old_signing_private_key,
455 new_signing_public_key,
456 });
457 }
458
459 let transfer = self
461 .start_send_transfer(&leaves_to_transfer, receiver_identity_pubkey, expiry_time)
462 .await?;
463
464 self.leaf_manager
466 .unlock_leaves(unlocking_id.clone(), &leaf_ids, true)?;
467
468 Ok(transfer.id.to_string())
469 }
470
471 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
516 pub async fn claim_transfer(&self, transfer: Transfer) -> Result<(), SparkSdkError> {
517 with_handler_lock!(self, async { self.claim_transfer_internal(transfer).await }).await
518 }
519
520 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
521 async fn claim_transfer_internal(&self, transfer: Transfer) -> Result<(), SparkSdkError> {
522 for transfer_leaf in &transfer.leaves {
524 if transfer_leaf.leaf.is_none() {
525 return Err(SparkSdkError::from(ValidationError::InvalidInput {
526 field: "Transfer leaf is not found".to_string(),
527 }));
528 }
529 }
530
531 let leaves = transfer
533 .leaves
534 .iter()
535 .map(|leaf| {
536 leaf.leaf
537 .clone()
538 .ok_or(SparkSdkError::from(ValidationError::InvalidInput {
539 field: "Transfer leaf not found.".to_string(),
540 }))
541 })
542 .collect::<Result<Vec<TreeNode>, SparkSdkError>>()?;
543
544 if transfer.status != TransferStatus::SenderKeyTweaked {
545 return Err(SparkSdkError::from(ValidationError::InvalidInput {
546 field: "Transfer is not in the correct status".to_string(),
547 }));
548 }
549
550 let mut leaves_to_claim = Vec::new();
551 for leaf in leaves {
552 let leaf_private_key_map = self.verify_pending_transfer(&transfer).await?;
553
554 let leaf_id = leaf.id.to_string();
555 let new_pubkey = self.signer.derive_spark_key(
556 leaf_id.clone(),
557 0,
558 SparkKeyType::BaseSigning,
559 self.config.spark_config.network.to_bitcoin_network(),
560 )?;
561
562 self.signer
563 .insert_secp256k1_keypair_from_secret_key(&leaf_private_key_map[&leaf_id])
564 .unwrap();
565
566 let claim_node = LeafKeyTweak {
567 leaf: leaf.into(),
568 old_signing_private_key: leaf_private_key_map[&leaf_id],
569 new_signing_public_key: new_pubkey.public_key(),
570 };
571
572 leaves_to_claim.push(claim_node);
573 }
574
575 self.claim_finalize_incoming_transfer(&transfer, &leaves_to_claim)
576 .await?;
577
578 self.refresh_timelock_nodes(None).await?;
580
581 Ok(())
582 }
583
584 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
585 pub async fn claim_transfers(&self) -> Result<(), SparkSdkError> {
586 with_handler_lock!(self, async { self.claim_transfers_internal().await }).await
587 }
588
589 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
590 pub async fn claim_transfers_internal(&self) -> Result<(), SparkSdkError> {
591 let pending = self.query_pending_transfers_internal().await?;
592
593 let pending_len = pending.len();
594 let claim_futures = pending
595 .into_iter()
596 .map(|transfer| self.claim_transfer_internal(transfer));
597 futures::future::try_join_all(claim_futures).await?;
598
599 #[cfg(feature = "telemetry")]
600 tracing::debug!("Claimed {:?} pending transfers", pending_len);
601
602 Ok(())
603 }
604
605 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
606 pub async fn get_all_transfers(
607 &self,
608 limit: Option<u32>,
609 offset: Option<u32>,
610 ) -> Result<GetAllTransfersResponse, SparkSdkError> {
611 let limit = limit.unwrap_or(20);
612 let offset = offset.unwrap_or(0);
613
614 self.query_all_transfers(limit, offset).await?.try_into()
615 }
616}
617
618#[cfg(test)]
619mod tests {
620 use chrono::Utc;
621 use std::str::FromStr;
622 use uuid::Uuid;
623
624 use super::GetAllTransfersResponse;
625
626 const TRANSFER_ID: &str = "40145208-5af3-4b86-8389-1f73c38643aa";
627 const SENDER_PUBLIC_KEY: &str =
628 "0234d1da9ec5c83e9b69d8fd3b60ee8294961075bd82ebdbeb6b09eedd09d539be";
629 const RECEIVER_PUBLIC_KEY: &str =
630 "03af6765f8668cb1d9bb34d698ad9211c9ea844a762680e25e1bb520bf32977c2c";
631
632 #[test]
633 fn test_get_all_transfers_response() {
634 let raw_transfer = spark_protos::spark::Transfer {
635 id: TRANSFER_ID.to_string(),
636 sender_identity_public_key: hex::decode(SENDER_PUBLIC_KEY).unwrap(),
637 receiver_identity_public_key: hex::decode(RECEIVER_PUBLIC_KEY).unwrap(),
638 status: 0,
639 total_value: 100_000,
640 expiry_time: Some(prost_types::Timestamp {
641 seconds: Utc::now().timestamp() + 5 * 60,
642 nanos: 0,
643 }),
644 leaves: vec![],
645 created_time: Some(prost_types::Timestamp {
646 seconds: Utc::now().timestamp(),
647 nanos: 0,
648 }),
649 updated_time: Some(prost_types::Timestamp {
650 seconds: Utc::now().timestamp(),
651 nanos: 0,
652 }),
653 r#type: 0,
654 };
655
656 let raw_response = spark_protos::spark::QueryAllTransfersResponse {
657 transfers: vec![raw_transfer],
658 offset: 5,
659 };
660
661 let response = GetAllTransfersResponse::try_from(raw_response).unwrap();
662
663 assert_eq!(response.transfers.len(), 1);
664
665 let transfer = &response.transfers[0];
666 assert_eq!(transfer.id, Uuid::from_str(TRANSFER_ID).unwrap());
667 assert_eq!(
668 transfer.sender_identity_public_key.to_string(),
669 SENDER_PUBLIC_KEY
670 );
671 assert_eq!(
672 transfer.receiver_identity_public_key.to_string(),
673 RECEIVER_PUBLIC_KEY
674 );
675
676 assert_eq!(response.offset, Some(5));
677 }
678
679 #[test]
680 fn test_get_all_transfers_response_no_offset() {
681 let raw_transfer = spark_protos::spark::Transfer {
682 id: TRANSFER_ID.to_string(),
683 sender_identity_public_key: hex::decode(SENDER_PUBLIC_KEY).unwrap(),
684 receiver_identity_public_key: hex::decode(RECEIVER_PUBLIC_KEY).unwrap(),
685 status: 0,
686 total_value: 100_000,
687 expiry_time: Some(prost_types::Timestamp {
688 seconds: Utc::now().timestamp() + 5 * 60,
689 nanos: 0,
690 }),
691 leaves: vec![],
692 created_time: Some(prost_types::Timestamp {
693 seconds: Utc::now().timestamp(),
694 nanos: 0,
695 }),
696 updated_time: Some(prost_types::Timestamp {
697 seconds: Utc::now().timestamp(),
698 nanos: 0,
699 }),
700 r#type: 0,
701 };
702
703 let raw_response = spark_protos::spark::QueryAllTransfersResponse {
704 transfers: vec![raw_transfer],
705 offset: -1,
706 };
707
708 let response = GetAllTransfersResponse::try_from(raw_response).unwrap();
709
710 assert_eq!(response.transfers.len(), 1);
711
712 let transfer = &response.transfers[0];
713 assert_eq!(transfer.id, Uuid::from_str(TRANSFER_ID).unwrap());
714 assert_eq!(
715 transfer.sender_identity_public_key.to_string(),
716 SENDER_PUBLIC_KEY
717 );
718 assert_eq!(
719 transfer.receiver_identity_public_key.to_string(),
720 RECEIVER_PUBLIC_KEY
721 );
722
723 assert_eq!(response.offset, None);
724 }
725}