spark_rust/wallet/handlers/
transfer.rs

1use std::str::FromStr;
2
3use crate::with_handler_lock;
4use crate::{
5    constants::spark::DEFAULT_TRANSFER_EXPIRY,
6    error::{SparkSdkError, ValidationError},
7    signer::traits::SparkSigner,
8    wallet::{
9        internal_handlers::traits::{
10            leaves::LeavesInternalHandlers,
11            transfer::{LeafKeyTweak, TransferInternalHandlers},
12        },
13        leaf_manager::SparkNodeStatus,
14        utils::bitcoin::bitcoin_tx_from_bytes,
15    },
16    SparkNetwork, SparkSdk,
17};
18use bitcoin::{
19    secp256k1::{ecdsa::Signature, PublicKey},
20    Transaction,
21};
22use spark_cryptography::derivation_path::SparkKeyType;
23use spark_protos::spark::{
24    query_pending_transfers_request::Participant, QueryPendingTransfersRequest, TransferStatus,
25};
26use uuid::Uuid;
27
28#[derive(Debug, Clone)]
29pub struct TreeNode {
30    pub id: Uuid,
31    pub tree_id: Uuid,
32    pub value_sats: u64,
33    pub parent_node_id: Option<Uuid>,
34    pub node_tx: Transaction,
35    pub refund_tx: Transaction,
36    pub vout: u32,
37    pub verifying_public_key: PublicKey,
38    pub owner_identity_public_key: PublicKey,
39    pub signing_keyshares: Option<(Vec<String>, u32)>,
40    pub status: String,
41    pub network: SparkNetwork,
42}
43
44impl TryFrom<spark_protos::spark::TreeNode> for TreeNode {
45    type Error = SparkSdkError;
46
47    fn try_from(value: spark_protos::spark::TreeNode) -> Result<Self, Self::Error> {
48        let id = Uuid::from_str(&value.id)
49            .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))?;
50        let tree_id = Uuid::from_str(&value.tree_id)
51            .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))?;
52        let value_sats = value.value;
53        let parent_node_id = value
54            .parent_node_id
55            .map(|parent_node_id| {
56                Uuid::from_str(&parent_node_id)
57                    .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))
58            })
59            .transpose()?;
60        let node_tx = bitcoin_tx_from_bytes(&value.node_tx)?;
61        let refund_tx = bitcoin_tx_from_bytes(&value.refund_tx)?;
62        let vout = value.vout;
63        let verifying_public_key = PublicKey::from_slice(&value.verifying_public_key)?;
64        let owner_identity_public_key = PublicKey::from_slice(&value.owner_identity_public_key)?;
65        let signing_keyshares = value.signing_keyshare.map(|signing_keyshare| {
66            (
67                signing_keyshare.owner_identifiers,
68                signing_keyshare.threshold,
69            )
70        });
71        let status = value.status;
72        let network = value.network.try_into()?;
73
74        Ok(TreeNode {
75            id,
76            tree_id,
77            value_sats,
78            parent_node_id,
79            node_tx,
80            refund_tx,
81            vout,
82            verifying_public_key,
83            owner_identity_public_key,
84            signing_keyshares,
85            status,
86            network,
87        })
88    }
89}
90
91impl From<TreeNode> for spark_protos::spark::TreeNode {
92    fn from(value: TreeNode) -> Self {
93        spark_protos::spark::TreeNode {
94            id: value.id.to_string(),
95            tree_id: value.tree_id.to_string(),
96            value: value.value_sats,
97            parent_node_id: value.parent_node_id.map(|id| id.to_string()),
98            node_tx: bitcoin::consensus::serialize(&value.node_tx),
99            refund_tx: bitcoin::consensus::serialize(&value.refund_tx),
100            vout: value.vout,
101            verifying_public_key: value.verifying_public_key.serialize().to_vec(),
102            owner_identity_public_key: value.owner_identity_public_key.serialize().to_vec(),
103            signing_keyshare: value.signing_keyshares.map(|(identifiers, threshold)| {
104                spark_protos::spark::SigningKeyshare {
105                    owner_identifiers: identifiers,
106                    threshold,
107                }
108            }),
109            status: value.status,
110            network: value.network.marshal_proto(),
111        }
112    }
113}
114
115#[derive(Debug, Clone)]
116pub struct TransferLeaf {
117    pub leaf: Option<TreeNode>,
118    pub secret_cipher: Vec<u8>,
119    pub signature: Option<Signature>,
120    pub intermediate_refund_tx: Transaction,
121}
122
123impl TryFrom<spark_protos::spark::TransferLeaf> for TransferLeaf {
124    type Error = SparkSdkError;
125
126    fn try_from(value: spark_protos::spark::TransferLeaf) -> Result<Self, Self::Error> {
127        let leaf = value.leaf.map(TreeNode::try_from).transpose()?;
128        let secret_cipher = value.secret_cipher;
129        let signature = if !value.signature.is_empty() {
130            Some(Signature::from_der(&value.signature)?)
131        } else {
132            None
133        };
134        let intermediate_refund_tx = bitcoin_tx_from_bytes(&value.intermediate_refund_tx)?;
135
136        Ok(TransferLeaf {
137            leaf,
138            secret_cipher,
139            signature,
140            intermediate_refund_tx,
141        })
142    }
143}
144
145#[derive(Debug)]
146/// This structure represents a Spark transfer. It is returned when a transfer is initiated, or
147/// when querying pending transfers.
148pub struct Transfer {
149    /// The unique ID of the transfer.
150    pub id: Uuid,
151    /// The public key of the sender.
152    pub sender_identity_public_key: PublicKey,
153    /// The public key of the receiver.
154    pub receiver_identity_public_key: PublicKey,
155    /// The current status of the transfer.
156    pub status: spark_protos::spark::TransferStatus,
157    /// The total value of the transfer, in satoshis.
158    pub total_value_sats: u64,
159    /// The expiry time of the transfer, in seconds since the Unix epoch.
160    pub expiry_time_seconds: Option<u64>,
161    /// The leaves involved in the transfer.
162    pub leaves: Vec<TransferLeaf>,
163}
164
165impl PartialEq for Transfer {
166    fn eq(&self, other: &Self) -> bool {
167        self.id == other.id
168            && self.receiver_identity_public_key == other.receiver_identity_public_key
169            && self.status == other.status
170            && self.total_value_sats == other.total_value_sats
171            && self.expiry_time_seconds == other.expiry_time_seconds
172            && self.leaves.len() == other.leaves.len()
173    }
174}
175
176impl TryFrom<spark_protos::spark::Transfer> for Transfer {
177    type Error = SparkSdkError;
178
179    fn try_from(value: spark_protos::spark::Transfer) -> Result<Self, Self::Error> {
180        let id = Uuid::from_str(&value.id)
181            .map_err(|err| SparkSdkError::from(ValidationError::InvalidUuid(err)))?;
182        let sender_identity_public_key = PublicKey::from_slice(&value.sender_identity_public_key)?;
183        let receiver_identity_public_key =
184            PublicKey::from_slice(&value.receiver_identity_public_key)?;
185        let status = spark_protos::spark::TransferStatus::try_from(value.status).map_err(|_| {
186            SparkSdkError::from(ValidationError::InvalidInput {
187                field: "Invalid TransferStatus".to_string(),
188            })
189        })?;
190        let total_value = value.total_value;
191        let expiry_time_seconds = value.expiry_time.map(|time| time.seconds as u64);
192        let leaves = value
193            .leaves
194            .into_iter()
195            .map(TransferLeaf::try_from)
196            .collect::<Result<Vec<TransferLeaf>, Self::Error>>()?;
197
198        Ok(Transfer {
199            id,
200            sender_identity_public_key,
201            receiver_identity_public_key,
202            status,
203            total_value_sats: total_value,
204            expiry_time_seconds,
205            leaves,
206        })
207    }
208}
209
210pub struct GetAllTransfersResponse {
211    /// The list of transfers returned by the query.
212    pub transfers: Vec<Transfer>,
213    /// The offset of the next page of results. If [`None`], then no next page is available.
214    pub offset: Option<u32>,
215}
216
217impl TryFrom<spark_protos::spark::QueryAllTransfersResponse> for GetAllTransfersResponse {
218    type Error = SparkSdkError;
219
220    fn try_from(
221        value: spark_protos::spark::QueryAllTransfersResponse,
222    ) -> Result<Self, Self::Error> {
223        let transfers = value
224            .transfers
225            .into_iter()
226            .map(Transfer::try_from)
227            .collect::<Result<Vec<Transfer>, Self::Error>>()?;
228        let offset = Some(value.offset)
229            .filter(|offset| *offset >= 0)
230            .map(|offset| offset as u32);
231
232        Ok(GetAllTransfersResponse { transfers, offset })
233    }
234}
235
236impl<S: SparkSigner + Send + Sync + Clone + 'static> SparkSdk<S> {
237    /// Queries all pending transfers where the current user is the receiver.
238    ///
239    /// This function retrieves all pending transfers that are waiting to be accepted by the current user.
240    /// A pending transfer represents funds that have been sent to the user but have not yet been claimed.
241    /// The transfers remain in a pending state until the receiver claims them, at which point the funds
242    /// become available in their wallet.
243    ///
244    /// # Returns
245    ///
246    /// * `Ok(Vec<Transfer>)` - A vector of pending [`Transfer`] objects if successful
247    /// * `Err(SparkSdkError)` - If there was an error querying the transfers
248    ///
249    /// # Example
250    ///
251    /// ```
252    /// # use spark_rust::SparkSdk;
253    /// # async fn example(sdk: SparkSdk) -> Result<(), Box<dyn std::error::Error>> {
254    /// let pending = sdk.query_pending_transfers().await?;
255    /// for transfer in pending {
256    ///     println!("Pending transfer: {:?} satoshis", transfer.total_value_sats);
257    /// }
258    /// # Ok(())
259    /// # }
260    /// ```
261    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
262    pub async fn query_pending_transfers(&self) -> Result<Vec<Transfer>, SparkSdkError> {
263        with_handler_lock!(self, async {
264            self.query_pending_transfers_internal().await
265        })
266        .await
267    }
268
269    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
270    pub(crate) async fn query_pending_transfers_internal(
271        &self,
272    ) -> Result<Vec<Transfer>, SparkSdkError> {
273        let request_data = QueryPendingTransfersRequest {
274            transfer_ids: vec![],
275            participant: Some(Participant::ReceiverIdentityPublicKey(
276                self.get_spark_address()?.serialize().to_vec(),
277            )),
278        };
279
280        let response = self
281            .config
282            .spark_config
283            .call_with_retry(
284                request_data,
285                |mut client, req| {
286                    Box::pin(async move { client.query_pending_transfers(req).await })
287                },
288                None,
289            )
290            .await?;
291
292        // Ok(response.transfers)
293        response
294            .transfers
295            .into_iter()
296            .map(Transfer::try_from)
297            .collect()
298    }
299
300    /// Initiates a transfer of funds to another user.
301    ///
302    /// This function handles the process of transferring funds from the current user's wallet to another user,
303    /// identified by their public key. The transfer process involves several steps:
304    ///
305    /// 1. Selecting appropriate leaves (UTXOs) that contain sufficient funds for the transfer
306    /// 2. Locking the selected leaves to prevent concurrent usage
307    /// 3. Generating new signing keys for the transfer
308    /// 4. Creating and signing the transfer transaction
309    /// 5. Removing the used leaves from the wallet
310    ///
311    /// The transfer remains in a pending state until the receiver claims it. The expiry time is set to
312    /// 30 days by default (see `DEFAULT_TRANSFER_EXPIRY`).
313    ///
314    /// # Arguments
315    ///
316    /// * `amount` - The amount to transfer in satoshis. Must be greater than the dust limit and the wallet
317    ///             must have a leaf with exactly this amount.
318    /// * `receiver_spark_address` - The Spark address identifying the receiver of the transfer. This should
319    ///                               be the receiver's Spark address, not a regular Bitcoin public key.
320    ///
321    /// # Returns
322    ///
323    /// * `Ok(String)` - The transfer ID if successful. This ID can be used to track the transfer status.
324    /// * `Err(SparkSdkError)` - If the transfer fails. Common error cases include:
325    ///   - No leaf with exact amount available
326    ///   - Failed to lock leaves
327    ///   - Failed to generate new signing keys
328    ///   - Network errors when communicating with Spark operators
329    ///
330    /// # Example
331    ///
332    /// ```
333    /// # use spark_rust::SparkSdk;
334    /// # use bitcoin::secp256k1::PublicKey;
335    /// # use std::str::FromStr;
336    /// # use uuid::Uuid;
337    /// # async fn example(sdk: &mut SparkSdk) -> Result<(), Box<dyn std::error::Error>> {
338    /// let amount = 100_000;
339    ///
340    /// // Currently, a user's Spark address is their public key.
341    /// let receiver_spark_address = PublicKey::from_str("02782d7ba8764306bd324e23082f785f7c880b7202cb10c85a2cb96496aedcaba7").unwrap();
342    ///
343    /// let transfer_id_string = sdk.transfer(amount, &receiver_spark_address).await?;
344    /// let transfer_id = Uuid::parse_str(&transfer_id_string).unwrap();
345    /// println!("Transfer ID is {}", transfer_id);
346    /// # Ok(())
347    /// # }
348    /// ```
349    ///
350    /// # Notes
351    ///
352    /// Currently, the leaf selection algorithm only supports selecting a single leaf with the exact
353    /// transfer amount. Future versions will support combining multiple leaves and handling change outputs.
354    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
355    pub async fn transfer(
356        &self,
357        amount: u64,
358        receiver_spark_address: &PublicKey,
359    ) -> Result<String, SparkSdkError> {
360        with_handler_lock!(self, async {
361            let expiry_time = chrono::Utc::now().timestamp() as u64 + DEFAULT_TRANSFER_EXPIRY;
362
363            self.refresh_timelock_nodes(None).await?;
364
365            // do leaf selection
366            // if not sufficient leaves are found, a swap with the SSP will be requested
367            let leaf_selection_response = self.prepare_leaves_for_amount(amount).await?;
368            let unlocking_id = leaf_selection_response.unlocking_id.unwrap();
369
370            // TODO: expect that at this point, leaf_selection_response.total_value == amount, because a swap should happen between the SSP and the wallet.
371            let selected_leaves = leaf_selection_response.leaves;
372            let leaf_ids = selected_leaves
373                .iter()
374                .map(|leaf| leaf.get_id().clone())
375                .collect::<Vec<String>>();
376
377            let mut leaves_to_transfer = Vec::new();
378            for leaf in selected_leaves {
379                // get new
380                let new_signing_public_key = self.signer.new_ephemeral_keypair()?;
381
382                // get old
383                let keypair = self.signer.derive_spark_key(
384                    leaf.get_id().clone(),
385                    0,
386                    SparkKeyType::BaseSigning,
387                    self.config.spark_config.network.to_bitcoin_network(),
388                )?;
389                let old_signing_private_key = keypair.secret_key();
390
391                leaves_to_transfer.push(LeafKeyTweak {
392                    leaf: leaf.get_tree_node()?,
393                    old_signing_private_key,
394                    new_signing_public_key,
395                });
396            }
397
398            // TODO - when add actual leaf selection, this might be an array of length > 1.
399            let transfer = self
400                .start_send_transfer(&leaves_to_transfer, receiver_spark_address, expiry_time)
401                .await?;
402
403            // unlock and remove leaves from the leaf manager
404            self.leaf_manager
405                .unlock_leaves(unlocking_id.clone(), &leaf_ids, true)?;
406
407            Ok(transfer.id.to_string())
408        })
409        .await
410    }
411
412    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
413    pub async fn transfer_leaf_ids(
414        &self,
415        leaf_ids: Vec<String>,
416        receiver_identity_pubkey: &PublicKey,
417    ) -> Result<String, SparkSdkError> {
418        with_handler_lock!(self, async {
419            self.transfer_leaf_ids_internal(leaf_ids, receiver_identity_pubkey)
420                .await
421        })
422        .await
423    }
424
425    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
426    pub(crate) async fn transfer_leaf_ids_internal(
427        &self,
428        leaf_ids: Vec<String>,
429        receiver_identity_pubkey: &PublicKey,
430    ) -> Result<String, SparkSdkError> {
431        let expiry_time = chrono::Utc::now().timestamp() as u64 + DEFAULT_TRANSFER_EXPIRY;
432
433        let leaf_selection_response = self
434            .leaf_manager
435            .lock_leaf_ids(&leaf_ids, SparkNodeStatus::Transfer)?;
436
437        let unlocking_id = leaf_selection_response.unlocking_id.unwrap();
438
439        let selected_leaves = leaf_selection_response.leaves;
440
441        let mut leaves_to_transfer = Vec::new();
442        for leaf in selected_leaves {
443            // get new
444            let new_signing_public_key = self.signer.new_ephemeral_keypair()?;
445
446            let network = self.config.spark_config.network.to_bitcoin_network();
447            let old_signing_pubkey = self.signer.get_deposit_signing_key(network)?;
448            let old_signing_private_key = self
449                .signer
450                .sensitive_expose_secret_key_from_pubkey(&old_signing_pubkey, false)?;
451
452            leaves_to_transfer.push(LeafKeyTweak {
453                leaf: leaf.get_tree_node()?,
454                old_signing_private_key,
455                new_signing_public_key,
456            });
457        }
458
459        // TODO - when add actual leaf selection, this might be an array of length > 1.
460        let transfer = self
461            .start_send_transfer(&leaves_to_transfer, receiver_identity_pubkey, expiry_time)
462            .await?;
463
464        // unlock and remove leaves from the leaf manager
465        self.leaf_manager
466            .unlock_leaves(unlocking_id.clone(), &leaf_ids, true)?;
467
468        Ok(transfer.id.to_string())
469    }
470
471    /// Claims a pending transfer that was sent to this wallet.
472    ///
473    /// This function processes a pending transfer and claims the funds into the wallet. It performs the following steps:
474    /// 1. Verifies the transfer is in the correct state (SenderKeyTweaked)
475    /// 2. Verifies and decrypts the leaf private keys using the wallet's identity key
476    /// 3. Generates new signing keys for the claimed leaves
477    /// 4. Finalizes the transfer by:
478    ///    - Tweaking the leaf keys
479    ///    - Signing refund transactions
480    ///    - Submitting the signatures to the Spark network
481    ///    - Storing the claimed leaves in the wallet's database
482    ///
483    /// # Arguments
484    ///
485    /// * `transfer` - The pending transfer to claim, must be in SenderKeyTweaked status
486    ///
487    /// # Returns
488    ///
489    /// * `Ok(())` - If the transfer was successfully claimed
490    /// * `Err(SparkSdkError)` - If there was an error during the claim process
491    ///
492    /// # Errors
493    ///
494    /// Returns [`SparkSdkError::InvalidInput`] if:
495    /// - The transfer is not in SenderKeyTweaked status
496    ///
497    /// May also return other `SparkSdkError` variants for network, signing or storage errors.
498    ///
499    /// # Example
500    ///
501    /// ```
502    /// # use spark_rust::{SparkSdk, SparkNetwork, signer::default_signer::DefaultSigner, signer::traits::SparkSigner};
503    ///
504    /// async fn example() {
505    ///     let mnemonic = "abandon ability able about above absent absorb abstract absurd abuse access accident";
506    ///     let network = SparkNetwork::Regtest;
507    ///     let signer = DefaultSigner::from_mnemonic(mnemonic, network.clone()).await.unwrap();
508    ///     let sdk = SparkSdk::new(network, signer).await.unwrap();
509    ///     let pending = sdk.query_pending_transfers().await.unwrap();
510    ///     for transfer in pending {
511    ///         sdk.claim_transfer(transfer).await.unwrap();
512    ///     }
513    /// }
514    /// ```
515    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
516    pub async fn claim_transfer(&self, transfer: Transfer) -> Result<(), SparkSdkError> {
517        with_handler_lock!(self, async { self.claim_transfer_internal(transfer).await }).await
518    }
519
520    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
521    async fn claim_transfer_internal(&self, transfer: Transfer) -> Result<(), SparkSdkError> {
522        // Validate the request
523        for transfer_leaf in &transfer.leaves {
524            if transfer_leaf.leaf.is_none() {
525                return Err(SparkSdkError::from(ValidationError::InvalidInput {
526                    field: "Transfer leaf is not found".to_string(),
527                }));
528            }
529        }
530
531        // Ensure all leaves are specified in the transfer.
532        let leaves = transfer
533            .leaves
534            .iter()
535            .map(|leaf| {
536                leaf.leaf
537                    .clone()
538                    .ok_or(SparkSdkError::from(ValidationError::InvalidInput {
539                        field: "Transfer leaf not found.".to_string(),
540                    }))
541            })
542            .collect::<Result<Vec<TreeNode>, SparkSdkError>>()?;
543
544        if transfer.status != TransferStatus::SenderKeyTweaked {
545            return Err(SparkSdkError::from(ValidationError::InvalidInput {
546                field: "Transfer is not in the correct status".to_string(),
547            }));
548        }
549
550        let mut leaves_to_claim = Vec::new();
551        for leaf in leaves {
552            let leaf_private_key_map = self.verify_pending_transfer(&transfer).await?;
553
554            let leaf_id = leaf.id.to_string();
555            let new_pubkey = self.signer.derive_spark_key(
556                leaf_id.clone(),
557                0,
558                SparkKeyType::BaseSigning,
559                self.config.spark_config.network.to_bitcoin_network(),
560            )?;
561
562            self.signer
563                .insert_secp256k1_keypair_from_secret_key(&leaf_private_key_map[&leaf_id])
564                .unwrap();
565
566            let claim_node = LeafKeyTweak {
567                leaf: leaf.into(),
568                old_signing_private_key: leaf_private_key_map[&leaf_id],
569                new_signing_public_key: new_pubkey.public_key(),
570            };
571
572            leaves_to_claim.push(claim_node);
573        }
574
575        self.claim_finalize_incoming_transfer(&transfer, &leaves_to_claim)
576            .await?;
577
578        // refresh timelock nodes
579        self.refresh_timelock_nodes(None).await?;
580
581        Ok(())
582    }
583
584    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
585    pub async fn claim_transfers(&self) -> Result<(), SparkSdkError> {
586        with_handler_lock!(self, async { self.claim_transfers_internal().await }).await
587    }
588
589    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
590    pub async fn claim_transfers_internal(&self) -> Result<(), SparkSdkError> {
591        let pending = self.query_pending_transfers_internal().await?;
592
593        let pending_len = pending.len();
594        let claim_futures = pending
595            .into_iter()
596            .map(|transfer| self.claim_transfer_internal(transfer));
597        futures::future::try_join_all(claim_futures).await?;
598
599        #[cfg(feature = "telemetry")]
600        tracing::debug!("Claimed {:?} pending transfers", pending_len);
601
602        Ok(())
603    }
604
605    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
606    pub async fn get_all_transfers(
607        &self,
608        limit: Option<u32>,
609        offset: Option<u32>,
610    ) -> Result<GetAllTransfersResponse, SparkSdkError> {
611        let limit = limit.unwrap_or(20);
612        let offset = offset.unwrap_or(0);
613
614        self.query_all_transfers(limit, offset).await?.try_into()
615    }
616}
617
618#[cfg(test)]
619mod tests {
620    use chrono::Utc;
621    use std::str::FromStr;
622    use uuid::Uuid;
623
624    use super::GetAllTransfersResponse;
625
626    const TRANSFER_ID: &str = "40145208-5af3-4b86-8389-1f73c38643aa";
627    const SENDER_PUBLIC_KEY: &str =
628        "0234d1da9ec5c83e9b69d8fd3b60ee8294961075bd82ebdbeb6b09eedd09d539be";
629    const RECEIVER_PUBLIC_KEY: &str =
630        "03af6765f8668cb1d9bb34d698ad9211c9ea844a762680e25e1bb520bf32977c2c";
631
632    #[test]
633    fn test_get_all_transfers_response() {
634        let raw_transfer = spark_protos::spark::Transfer {
635            id: TRANSFER_ID.to_string(),
636            sender_identity_public_key: hex::decode(SENDER_PUBLIC_KEY).unwrap(),
637            receiver_identity_public_key: hex::decode(RECEIVER_PUBLIC_KEY).unwrap(),
638            status: 0,
639            total_value: 100_000,
640            expiry_time: Some(prost_types::Timestamp {
641                seconds: Utc::now().timestamp() + 5 * 60,
642                nanos: 0,
643            }),
644            leaves: vec![],
645            created_time: Some(prost_types::Timestamp {
646                seconds: Utc::now().timestamp(),
647                nanos: 0,
648            }),
649            updated_time: Some(prost_types::Timestamp {
650                seconds: Utc::now().timestamp(),
651                nanos: 0,
652            }),
653            r#type: 0,
654        };
655
656        let raw_response = spark_protos::spark::QueryAllTransfersResponse {
657            transfers: vec![raw_transfer],
658            offset: 5,
659        };
660
661        let response = GetAllTransfersResponse::try_from(raw_response).unwrap();
662
663        assert_eq!(response.transfers.len(), 1);
664
665        let transfer = &response.transfers[0];
666        assert_eq!(transfer.id, Uuid::from_str(TRANSFER_ID).unwrap());
667        assert_eq!(
668            transfer.sender_identity_public_key.to_string(),
669            SENDER_PUBLIC_KEY
670        );
671        assert_eq!(
672            transfer.receiver_identity_public_key.to_string(),
673            RECEIVER_PUBLIC_KEY
674        );
675
676        assert_eq!(response.offset, Some(5));
677    }
678
679    #[test]
680    fn test_get_all_transfers_response_no_offset() {
681        let raw_transfer = spark_protos::spark::Transfer {
682            id: TRANSFER_ID.to_string(),
683            sender_identity_public_key: hex::decode(SENDER_PUBLIC_KEY).unwrap(),
684            receiver_identity_public_key: hex::decode(RECEIVER_PUBLIC_KEY).unwrap(),
685            status: 0,
686            total_value: 100_000,
687            expiry_time: Some(prost_types::Timestamp {
688                seconds: Utc::now().timestamp() + 5 * 60,
689                nanos: 0,
690            }),
691            leaves: vec![],
692            created_time: Some(prost_types::Timestamp {
693                seconds: Utc::now().timestamp(),
694                nanos: 0,
695            }),
696            updated_time: Some(prost_types::Timestamp {
697                seconds: Utc::now().timestamp(),
698                nanos: 0,
699            }),
700            r#type: 0,
701        };
702
703        let raw_response = spark_protos::spark::QueryAllTransfersResponse {
704            transfers: vec![raw_transfer],
705            offset: -1,
706        };
707
708        let response = GetAllTransfersResponse::try_from(raw_response).unwrap();
709
710        assert_eq!(response.transfers.len(), 1);
711
712        let transfer = &response.transfers[0];
713        assert_eq!(transfer.id, Uuid::from_str(TRANSFER_ID).unwrap());
714        assert_eq!(
715            transfer.sender_identity_public_key.to_string(),
716            SENDER_PUBLIC_KEY
717        );
718        assert_eq!(
719            transfer.receiver_identity_public_key.to_string(),
720            RECEIVER_PUBLIC_KEY
721        );
722
723        assert_eq!(response.offset, None);
724    }
725}