spark_rust/wallet/handlers/
transfer.rs

1use std::str::FromStr;
2
3use crate::with_handler_lock;
4use crate::{
5    constants::spark::DEFAULT_TRANSFER_EXPIRY,
6    error::{SparkSdkError, ValidationError},
7    signer::traits::{derivation_path::SparkKeyType, SparkSigner},
8    wallet::{
9        internal_handlers::traits::{
10            leaves::LeavesInternalHandlers,
11            transfer::{LeafKeyTweak, TransferInternalHandlers},
12        },
13        leaf_manager::SparkNodeStatus,
14        utils::bitcoin::bitcoin_tx_from_bytes,
15    },
16    SparkNetwork, SparkSdk,
17};
18use bitcoin::{
19    secp256k1::{ecdsa::Signature, PublicKey},
20    Transaction,
21};
22use spark_protos::spark::{
23    query_pending_transfers_request::Participant, QueryPendingTransfersRequest, TransferStatus,
24};
25use uuid::Uuid;
26
27#[derive(Debug, Clone)]
28pub struct TreeNode {
29    pub id: Uuid,
30    pub tree_id: Uuid,
31    pub value_sats: u64,
32    pub parent_node_id: Option<Uuid>,
33    pub node_tx: Transaction,
34    pub refund_tx: Transaction,
35    pub vout: u32,
36    pub verifying_public_key: PublicKey,
37    pub owner_identity_public_key: PublicKey,
38    pub signing_keyshares: Option<(Vec<String>, u32)>,
39    pub status: String,
40    pub network: SparkNetwork,
41}
42
43impl TryFrom<spark_protos::spark::TreeNode> for TreeNode {
44    type Error = SparkSdkError;
45
46    fn try_from(value: spark_protos::spark::TreeNode) -> Result<Self, Self::Error> {
47        let id = Uuid::from_str(&value.id)
48            .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))?;
49        let tree_id = Uuid::from_str(&value.tree_id)
50            .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))?;
51        let value_sats = value.value;
52        let parent_node_id = value
53            .parent_node_id
54            .map(|parent_node_id| {
55                Uuid::from_str(&parent_node_id)
56                    .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))
57            })
58            .transpose()?;
59        let node_tx = bitcoin_tx_from_bytes(&value.node_tx)?;
60        let refund_tx = bitcoin_tx_from_bytes(&value.refund_tx)?;
61        let vout = value.vout;
62        let verifying_public_key = PublicKey::from_slice(&value.verifying_public_key)?;
63        let owner_identity_public_key = PublicKey::from_slice(&value.owner_identity_public_key)?;
64        let signing_keyshares = value.signing_keyshare.map(|signing_keyshare| {
65            (
66                signing_keyshare.owner_identifiers,
67                signing_keyshare.threshold,
68            )
69        });
70        let status = value.status;
71        let network = value.network.try_into()?;
72
73        Ok(TreeNode {
74            id,
75            tree_id,
76            value_sats,
77            parent_node_id,
78            node_tx,
79            refund_tx,
80            vout,
81            verifying_public_key,
82            owner_identity_public_key,
83            signing_keyshares,
84            status,
85            network,
86        })
87    }
88}
89
90impl From<TreeNode> for spark_protos::spark::TreeNode {
91    fn from(value: TreeNode) -> Self {
92        spark_protos::spark::TreeNode {
93            id: value.id.to_string(),
94            tree_id: value.tree_id.to_string(),
95            value: value.value_sats,
96            parent_node_id: value.parent_node_id.map(|id| id.to_string()),
97            node_tx: bitcoin::consensus::serialize(&value.node_tx),
98            refund_tx: bitcoin::consensus::serialize(&value.refund_tx),
99            vout: value.vout,
100            verifying_public_key: value.verifying_public_key.serialize().to_vec(),
101            owner_identity_public_key: value.owner_identity_public_key.serialize().to_vec(),
102            signing_keyshare: value.signing_keyshares.map(|(identifiers, threshold)| {
103                spark_protos::spark::SigningKeyshare {
104                    owner_identifiers: identifiers,
105                    threshold,
106                }
107            }),
108            status: value.status,
109            network: value.network.marshal_proto(),
110        }
111    }
112}
113
114#[derive(Debug, Clone)]
115pub struct TransferLeaf {
116    pub leaf: Option<TreeNode>,
117    pub secret_cipher: Vec<u8>,
118    pub signature: Option<Signature>,
119    pub intermediate_refund_tx: Transaction,
120}
121
122impl TryFrom<spark_protos::spark::TransferLeaf> for TransferLeaf {
123    type Error = SparkSdkError;
124
125    fn try_from(value: spark_protos::spark::TransferLeaf) -> Result<Self, Self::Error> {
126        let leaf = value.leaf.map(TreeNode::try_from).transpose()?;
127        let secret_cipher = value.secret_cipher;
128        let signature = if !value.signature.is_empty() {
129            Some(Signature::from_der(&value.signature)?)
130        } else {
131            None
132        };
133        let intermediate_refund_tx = bitcoin_tx_from_bytes(&value.intermediate_refund_tx)?;
134
135        Ok(TransferLeaf {
136            leaf,
137            secret_cipher,
138            signature,
139            intermediate_refund_tx,
140        })
141    }
142}
143
144#[derive(Debug)]
145/// This structure represents a Spark transfer. It is returned when a transfer is initiated, or
146/// when querying pending transfers.
147pub struct Transfer {
148    /// The unique ID of the transfer.
149    pub id: Uuid,
150    /// The public key of the sender.
151    pub sender_identity_public_key: PublicKey,
152    /// The public key of the receiver.
153    pub receiver_identity_public_key: PublicKey,
154    /// The current status of the transfer.
155    pub status: spark_protos::spark::TransferStatus,
156    /// The total value of the transfer, in satoshis.
157    pub total_value_sats: u64,
158    /// The expiry time of the transfer, in seconds since the Unix epoch.
159    pub expiry_time_seconds: Option<u64>,
160    /// The leaves involved in the transfer.
161    pub leaves: Vec<TransferLeaf>,
162}
163
164impl PartialEq for Transfer {
165    fn eq(&self, other: &Self) -> bool {
166        self.id == other.id
167            && self.receiver_identity_public_key == other.receiver_identity_public_key
168            && self.status == other.status
169            && self.total_value_sats == other.total_value_sats
170            && self.expiry_time_seconds == other.expiry_time_seconds
171            && self.leaves.len() == other.leaves.len()
172    }
173}
174
175impl TryFrom<spark_protos::spark::Transfer> for Transfer {
176    type Error = SparkSdkError;
177
178    fn try_from(value: spark_protos::spark::Transfer) -> Result<Self, Self::Error> {
179        let id = Uuid::from_str(&value.id)
180            .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))?;
181        let sender_identity_public_key = PublicKey::from_slice(&value.sender_identity_public_key)?;
182        let receiver_identity_public_key =
183            PublicKey::from_slice(&value.receiver_identity_public_key)?;
184        let status = spark_protos::spark::TransferStatus::try_from(value.status).map_err(|_| {
185            SparkSdkError::from(ValidationError::InvalidInput {
186                field: "Invalid TransferStatus".to_string(),
187            })
188        })?;
189        let total_value = value.total_value;
190        let expiry_time_seconds = value.expiry_time.map(|time| time.seconds as u64);
191        let leaves = value
192            .leaves
193            .into_iter()
194            .map(TransferLeaf::try_from)
195            .collect::<Result<Vec<TransferLeaf>, Self::Error>>()?;
196
197        Ok(Transfer {
198            id,
199            sender_identity_public_key,
200            receiver_identity_public_key,
201            status,
202            total_value_sats: total_value,
203            expiry_time_seconds,
204            leaves,
205        })
206    }
207}
208
209pub struct GetAllTransfersResponse {
210    /// The list of transfers returned by the query.
211    pub transfers: Vec<Transfer>,
212    /// The offset of the next page of results. If [`None`], then no next page is available.
213    pub offset: Option<u32>,
214}
215
216impl TryFrom<spark_protos::spark::QueryAllTransfersResponse> for GetAllTransfersResponse {
217    type Error = SparkSdkError;
218
219    fn try_from(
220        value: spark_protos::spark::QueryAllTransfersResponse,
221    ) -> Result<Self, Self::Error> {
222        let transfers = value
223            .transfers
224            .into_iter()
225            .map(Transfer::try_from)
226            .collect::<Result<Vec<Transfer>, Self::Error>>()?;
227        let offset = Some(value.offset)
228            .filter(|offset| *offset >= 0)
229            .map(|offset| offset as u32);
230
231        Ok(GetAllTransfersResponse { transfers, offset })
232    }
233}
234
235impl<S: SparkSigner + Send + Sync + Clone + 'static> SparkSdk<S> {
236    /// Queries all pending transfers where the current user is the receiver.
237    ///
238    /// This function retrieves all pending transfers that are waiting to be accepted by the current user.
239    /// A pending transfer represents funds that have been sent to the user but have not yet been claimed.
240    /// The transfers remain in a pending state until the receiver claims them, at which point the funds
241    /// become available in their wallet.
242    ///
243    /// # Returns
244    ///
245    /// * `Ok(Vec<Transfer>)` - A vector of pending [`Transfer`] objects if successful
246    /// * `Err(SparkSdkError)` - If there was an error querying the transfers
247    ///
248    /// # Example
249    ///
250    /// ```
251    /// # use spark_rust::SparkSdk;
252    /// # async fn example(sdk: SparkSdk) -> Result<(), Box<dyn std::error::Error>> {
253    /// let pending = sdk.query_pending_transfers().await?;
254    /// for transfer in pending {
255    ///     println!("Pending transfer: {:?} satoshis", transfer.total_value);
256    /// }
257    /// # Ok(())
258    /// # }
259    /// ```
260    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
261    pub async fn query_pending_transfers(&self) -> Result<Vec<Transfer>, SparkSdkError> {
262        with_handler_lock!(self, async {
263            self.query_pending_transfers_internal().await
264        })
265        .await
266    }
267
268    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
269    pub(crate) async fn query_pending_transfers_internal(
270        &self,
271    ) -> Result<Vec<Transfer>, SparkSdkError> {
272        let request_data = QueryPendingTransfersRequest {
273            transfer_ids: vec![],
274            participant: Some(Participant::ReceiverIdentityPublicKey(
275                self.get_spark_address()?.serialize().to_vec(),
276            )),
277            network: self.config.spark_config.network.marshal_proto(),
278        };
279
280        let response = self
281            .config
282            .spark_config
283            .call_with_retry(
284                request_data,
285                |mut client, req| {
286                    Box::pin(async move { client.query_pending_transfers(req).await })
287                },
288                None,
289            )
290            .await?;
291
292        // Ok(response.transfers)
293        response
294            .transfers
295            .into_iter()
296            .map(Transfer::try_from)
297            .collect()
298    }
299
300    /// Initiates a transfer of funds to another user.
301    ///
302    /// This function handles the process of transferring funds from the current user's wallet to another user,
303    /// identified by their public key. The transfer process involves several steps:
304    ///
305    /// 1. Selecting appropriate leaves (UTXOs) that contain sufficient funds for the transfer
306    /// 2. Locking the selected leaves to prevent concurrent usage
307    /// 3. Generating new signing keys for the transfer
308    /// 4. Creating and signing the transfer transaction
309    /// 5. Removing the used leaves from the wallet
310    ///
311    /// The transfer remains in a pending state until the receiver claims it. The expiry time is set to
312    /// 30 days by default (see `DEFAULT_TRANSFER_EXPIRY`).
313    ///
314    /// # Arguments
315    ///
316    /// * `amount` - The amount to transfer in satoshis. Must be greater than the dust limit and the wallet
317    ///             must have a leaf with exactly this amount.
318    /// * `receiver_spark_address` - The Spark address identifying the receiver of the transfer. This should
319    ///                               be the receiver's Spark address, not a regular Bitcoin public key.
320    ///
321    /// # Returns
322    ///
323    /// * `Ok(String)` - The transfer ID if successful. This ID can be used to track the transfer status.
324    /// * `Err(SparkSdkError)` - If the transfer fails. Common error cases include:
325    ///   - No leaf with exact amount available
326    ///   - Failed to lock leaves
327    ///   - Failed to generate new signing keys
328    ///   - Network errors when communicating with Spark operators
329    ///
330    /// # Example
331    ///
332    /// ```
333    /// # use spark_rust::SparkSdk;
334    /// # use bitcoin::secp256k1::PublicKey;
335    /// # use std::str::FromStr;
336    /// # use uuid::Uuid;
337    /// # async fn example(sdk: &mut SparkSdk) -> Result<(), Box<dyn std::error::Error>> {
338    /// let amount = 100_000;
339    ///
340    /// // Currently, a user's Spark address is their public key.
341    /// let receiver_spark_address = PublicKey::from_str("02782d7ba8764306bd324e23082f785f7c880b7202cb10c85a2cb96496aedcaba7").unwrap();
342    ///
343    /// let transfer_id_string = sdk.transfer(amount, &receiver_spark_address).await?;
344    /// let transfer_id = Uuid::parse_str(&transfer_id_string).unwrap();
345    /// println!("Transfer ID is {}", transfer_id);
346    /// # Ok(())
347    /// # }
348    /// ```
349    ///
350    /// # Notes
351    ///
352    /// Currently, the leaf selection algorithm only supports selecting a single leaf with the exact
353    /// transfer amount. Future versions will support combining multiple leaves and handling change outputs.
354    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
355    pub async fn transfer(
356        &self,
357        amount: u64,
358        receiver_spark_address: &PublicKey,
359    ) -> Result<String, SparkSdkError> {
360        with_handler_lock!(self, async {
361            let expiry_time = chrono::Utc::now().timestamp() as u64 + DEFAULT_TRANSFER_EXPIRY;
362
363            self.refresh_timelock_nodes(None).await?;
364
365            // do leaf selection
366            // if not sufficient leaves are found, a swap with the SSP will be requested
367            let leaf_selection_response = self.prepare_leaves_for_amount(amount).await?;
368            let unlocking_id = leaf_selection_response.unlocking_id.unwrap();
369
370            // TODO: expect that at this point, leaf_selection_response.total_value == amount, because a swap should happen between the SSP and the wallet.
371            let selected_leaves = leaf_selection_response.leaves;
372            let leaf_ids = selected_leaves
373                .iter()
374                .map(|leaf| leaf.get_id().clone())
375                .collect::<Vec<String>>();
376
377            let mut leaves_to_transfer = Vec::new();
378            for leaf in selected_leaves {
379                // get new
380                let new_signing_public_key = self.signer.new_ephemeral_keypair()?;
381
382                // get old
383                let old_signing_private_key = self.signer.expose_leaf_secret_key_for_transfer(
384                    leaf.get_id().clone(),
385                    SparkKeyType::BaseSigning,
386                    0,
387                    self.config.spark_config.network.to_bitcoin_network(),
388                )?;
389
390                leaves_to_transfer.push(LeafKeyTweak {
391                    leaf: leaf.get_tree_node()?,
392                    old_signing_private_key,
393                    new_signing_public_key,
394                });
395            }
396
397            // TODO - when add actual leaf selection, this might be an array of length > 1.
398            let transfer = self
399                .start_send_transfer(&leaves_to_transfer, receiver_spark_address, expiry_time)
400                .await?;
401
402            // unlock and remove leaves from the leaf manager
403            self.leaf_manager
404                .unlock_leaves(unlocking_id.clone(), &leaf_ids, true)?;
405
406            Ok(transfer.id.to_string())
407        })
408        .await
409    }
410
411    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
412    pub async fn transfer_leaf_ids(
413        &self,
414        leaf_ids: Vec<String>,
415        receiver_identity_pubkey: &PublicKey,
416    ) -> Result<String, SparkSdkError> {
417        with_handler_lock!(self, async {
418            self.transfer_leaf_ids_internal(leaf_ids, receiver_identity_pubkey)
419                .await
420        })
421        .await
422    }
423
424    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
425    pub(crate) async fn transfer_leaf_ids_internal(
426        &self,
427        leaf_ids: Vec<String>,
428        receiver_identity_pubkey: &PublicKey,
429    ) -> Result<String, SparkSdkError> {
430        let expiry_time = chrono::Utc::now().timestamp() as u64 + DEFAULT_TRANSFER_EXPIRY;
431
432        let leaf_selection_response = self
433            .leaf_manager
434            .lock_leaf_ids(&leaf_ids, SparkNodeStatus::Transfer)?;
435
436        let unlocking_id = leaf_selection_response.unlocking_id.unwrap();
437
438        let selected_leaves = leaf_selection_response.leaves;
439
440        let mut leaves_to_transfer = Vec::new();
441        for leaf in selected_leaves {
442            // get new
443            let new_signing_public_key = self.signer.new_ephemeral_keypair()?;
444
445            let network = self.config.spark_config.network.to_bitcoin_network();
446            let old_signing_pubkey = self.signer.get_deposit_signing_key(network)?;
447            let old_signing_private_key = self
448                .signer
449                .sensitive_expose_secret_key_from_pubkey(&old_signing_pubkey, false)?;
450
451            leaves_to_transfer.push(LeafKeyTweak {
452                leaf: leaf.get_tree_node()?,
453                old_signing_private_key,
454                new_signing_public_key,
455            });
456        }
457
458        // TODO - when add actual leaf selection, this might be an array of length > 1.
459        let transfer = self
460            .start_send_transfer(&leaves_to_transfer, receiver_identity_pubkey, expiry_time)
461            .await?;
462
463        // unlock and remove leaves from the leaf manager
464        self.leaf_manager
465            .unlock_leaves(unlocking_id.clone(), &leaf_ids, true)?;
466
467        Ok(transfer.id.to_string())
468    }
469
470    /// Claims a pending transfer that was sent to this wallet.
471    ///
472    /// This function processes a pending transfer and claims the funds into the wallet. It performs the following steps:
473    /// 1. Verifies the transfer is in the correct state (SenderKeyTweaked)
474    /// 2. Verifies and decrypts the leaf private keys using the wallet's identity key
475    /// 3. Generates new signing keys for the claimed leaves
476    /// 4. Finalizes the transfer by:
477    ///    - Tweaking the leaf keys
478    ///    - Signing refund transactions
479    ///    - Submitting the signatures to the Spark network
480    ///    - Storing the claimed leaves in the wallet's database
481    ///
482    /// # Arguments
483    ///
484    /// * `transfer` - The pending transfer to claim, must be in SenderKeyTweaked status
485    ///
486    /// # Returns
487    ///
488    /// * `Ok(())` - If the transfer was successfully claimed
489    /// * `Err(SparkSdkError)` - If there was an error during the claim process
490    ///
491    /// # Errors
492    ///
493    /// Returns [`SparkSdkError::InvalidInput`] if:
494    /// - The transfer is not in SenderKeyTweaked status
495    ///
496    /// May also return other `SparkSdkError` variants for network, signing or storage errors.
497    ///
498    /// # Example
499    ///
500    /// ```
501    /// # use spark_rust::{SparkSdk, SparkNetwork, signer::default_signer::DefaultSigner, signer::traits::SparkSigner};
502    ///
503    /// async fn example() {
504    ///     let mnemonic = "abandon ability able about above absent absorb abstract absurd abuse access accident";
505    ///     let network = SparkNetwork::Regtest;
506    ///     let signer = DefaultSigner::from_mnemonic(mnemonic, network.clone()).await.unwrap();
507    ///     let sdk = SparkSdk::new(network, signer).await.unwrap();
508    ///     let pending = sdk.query_pending_transfers().await.unwrap();
509    ///     for transfer in pending {
510    ///         sdk.claim_transfer(transfer).await.unwrap();
511    ///     }
512    /// }
513    /// ```
514    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
515    pub async fn claim_transfer(&self, transfer: Transfer) -> Result<(), SparkSdkError> {
516        with_handler_lock!(self, async { self.claim_transfer_internal(transfer).await }).await
517    }
518
519    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
520    async fn claim_transfer_internal(&self, transfer: Transfer) -> Result<(), SparkSdkError> {
521        // Validate the request
522        for transfer_leaf in &transfer.leaves {
523            if transfer_leaf.leaf.is_none() {
524                return Err(SparkSdkError::from(ValidationError::InvalidInput {
525                    field: "Transfer leaf is not found".to_string(),
526                }));
527            }
528        }
529
530        // Ensure all leaves are specified in the transfer.
531        let leaves = transfer
532            .leaves
533            .iter()
534            .map(|leaf| {
535                leaf.leaf
536                    .clone()
537                    .ok_or(SparkSdkError::from(ValidationError::InvalidInput {
538                        field: "Transfer leaf not found.".to_string(),
539                    }))
540            })
541            .collect::<Result<Vec<TreeNode>, SparkSdkError>>()?;
542
543        if transfer.status != TransferStatus::SenderKeyTweaked {
544            return Err(SparkSdkError::from(ValidationError::InvalidInput {
545                field: "Transfer is not in the correct status".to_string(),
546            }));
547        }
548
549        let mut leaves_to_claim = Vec::new();
550        for leaf in leaves {
551            let leaf_private_key_map = self.verify_pending_transfer(&transfer).await?;
552
553            let leaf_id = leaf.id.to_string();
554            let new_pubkey = self.signer.new_secp256k1_keypair(
555                leaf_id.clone(),
556                SparkKeyType::BaseSigning,
557                0,
558                self.config.spark_config.network.to_bitcoin_network(),
559            )?;
560
561            self.signer
562                .insert_secp256k1_keypair_from_secret_key(&leaf_private_key_map[&leaf_id])
563                .unwrap();
564
565            let claim_node = LeafKeyTweak {
566                leaf: leaf.into(),
567                old_signing_private_key: leaf_private_key_map[&leaf_id],
568                new_signing_public_key: new_pubkey,
569            };
570
571            leaves_to_claim.push(claim_node);
572        }
573
574        self.claim_finalize_incoming_transfer(&transfer, &leaves_to_claim)
575            .await?;
576
577        // refresh timelock nodes
578        self.refresh_timelock_nodes(None).await?;
579
580        Ok(())
581    }
582
583    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
584    pub async fn claim_transfers(&self) -> Result<(), SparkSdkError> {
585        with_handler_lock!(self, async { self.claim_transfers_internal().await }).await
586    }
587
588    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
589    pub async fn claim_transfers_internal(&self) -> Result<(), SparkSdkError> {
590        let pending = self.query_pending_transfers_internal().await?;
591
592        let pending_len = pending.len();
593        let claim_futures = pending
594            .into_iter()
595            .map(|transfer| self.claim_transfer_internal(transfer));
596        futures::future::try_join_all(claim_futures).await?;
597
598        #[cfg(feature = "telemetry")]
599        tracing::debug!("Claimed {:?} pending transfers", pending_len);
600
601        Ok(())
602    }
603
604    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
605    pub async fn get_all_transfers(
606        &self,
607        limit: Option<u32>,
608        offset: Option<u32>,
609    ) -> Result<GetAllTransfersResponse, SparkSdkError> {
610        let limit = limit.unwrap_or(20);
611        let offset = offset.unwrap_or(0);
612
613        self.query_all_transfers(limit, offset).await?.try_into()
614    }
615}
616
617#[cfg(test)]
618mod tests {
619    use chrono::Utc;
620    use std::str::FromStr;
621    use uuid::Uuid;
622
623    use super::GetAllTransfersResponse;
624
625    const TRANSFER_ID: &str = "40145208-5af3-4b86-8389-1f73c38643aa";
626    const SENDER_PUBLIC_KEY: &str =
627        "0234d1da9ec5c83e9b69d8fd3b60ee8294961075bd82ebdbeb6b09eedd09d539be";
628    const RECEIVER_PUBLIC_KEY: &str =
629        "03af6765f8668cb1d9bb34d698ad9211c9ea844a762680e25e1bb520bf32977c2c";
630
631    #[test]
632    fn test_get_all_transfers_response() {
633        let raw_transfer = spark_protos::spark::Transfer {
634            id: TRANSFER_ID.to_string(),
635            sender_identity_public_key: hex::decode(SENDER_PUBLIC_KEY).unwrap(),
636            receiver_identity_public_key: hex::decode(RECEIVER_PUBLIC_KEY).unwrap(),
637            status: 0,
638            total_value: 100_000,
639            expiry_time: Some(prost_types::Timestamp {
640                seconds: Utc::now().timestamp() + 5 * 60,
641                nanos: 0,
642            }),
643            leaves: vec![],
644        };
645
646        let raw_response = spark_protos::spark::QueryAllTransfersResponse {
647            transfers: vec![raw_transfer],
648            offset: 5,
649        };
650
651        let response = GetAllTransfersResponse::try_from(raw_response).unwrap();
652
653        assert_eq!(response.transfers.len(), 1);
654
655        let transfer = &response.transfers[0];
656        assert_eq!(transfer.id, Uuid::from_str(TRANSFER_ID).unwrap());
657        assert_eq!(
658            transfer.sender_identity_public_key.to_string(),
659            SENDER_PUBLIC_KEY
660        );
661        assert_eq!(
662            transfer.receiver_identity_public_key.to_string(),
663            RECEIVER_PUBLIC_KEY
664        );
665
666        assert_eq!(response.offset, Some(5));
667    }
668
669    #[test]
670    fn test_get_all_transfers_response_no_offset() {
671        let raw_transfer = spark_protos::spark::Transfer {
672            id: TRANSFER_ID.to_string(),
673            sender_identity_public_key: hex::decode(SENDER_PUBLIC_KEY).unwrap(),
674            receiver_identity_public_key: hex::decode(RECEIVER_PUBLIC_KEY).unwrap(),
675            status: 0,
676            total_value: 100_000,
677            expiry_time: Some(prost_types::Timestamp {
678                seconds: Utc::now().timestamp() + 5 * 60,
679                nanos: 0,
680            }),
681            leaves: vec![],
682        };
683
684        let raw_response = spark_protos::spark::QueryAllTransfersResponse {
685            transfers: vec![raw_transfer],
686            offset: -1,
687        };
688
689        let response = GetAllTransfersResponse::try_from(raw_response).unwrap();
690
691        assert_eq!(response.transfers.len(), 1);
692
693        let transfer = &response.transfers[0];
694        assert_eq!(transfer.id, Uuid::from_str(TRANSFER_ID).unwrap());
695        assert_eq!(
696            transfer.sender_identity_public_key.to_string(),
697            SENDER_PUBLIC_KEY
698        );
699        assert_eq!(
700            transfer.receiver_identity_public_key.to_string(),
701            RECEIVER_PUBLIC_KEY
702        );
703
704        assert_eq!(response.offset, None);
705    }
706}