spark_rust/wallet/handlers/
transfer.rs

1use crate::{
2    constants::spark::DEFAULT_TRANSFER_EXPIRY,
3    error::{NetworkError, SparkSdkError, ValidationError},
4    signer::traits::{derivation_path::SparkKeyType, SparkSigner},
5    wallet::{
6        internal_handlers::traits::leaves::LeavesInternalHandlers,
7        internal_handlers::traits::transfer::{LeafKeyTweak, TransferInternalHandlers},
8        leaf_manager::SparkNodeStatus,
9    },
10    SparkSdk,
11};
12use bitcoin::secp256k1::PublicKey;
13use spark_protos::spark::{
14    query_pending_transfers_request::Participant, QueryAllTransfersResponse,
15    QueryPendingTransfersRequest, Transfer, TransferStatus,
16};
17
18impl<S: SparkSigner + Send + Sync + Clone + 'static> SparkSdk<S> {
19    /// Queries all pending transfers where the current user is the receiver.
20    ///
21    /// This function retrieves all pending transfers that are waiting to be accepted by the current user.
22    /// A pending transfer represents funds that have been sent to the user but have not yet been claimed.
23    /// The transfers remain in a pending state until the receiver claims them, at which point the funds
24    /// become available in their wallet.
25    ///
26    /// # Returns
27    ///
28    /// * `Ok(Vec<Transfer>)` - A vector of pending [`Transfer`] objects if successful
29    /// * `Err(SparkSdkError)` - If there was an error querying the transfers
30    ///
31    /// # Example
32    ///
33    /// ```
34    /// # use spark_rust::SparkSdk;
35    /// # async fn example(sdk: SparkSdk) -> Result<(), Box<dyn std::error::Error>> {
36    /// let pending = sdk.query_pending_transfers().await?;
37    /// for transfer in pending {
38    ///     println!("Pending transfer: {:?} satoshis", transfer.total_value);
39    /// }
40    /// # Ok(())
41    /// # }
42    /// ```
43    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
44    pub async fn query_pending_transfers(&self) -> Result<Vec<Transfer>, SparkSdkError> {
45        let mut spark_client = self.config.spark_config.get_spark_connection(None).await?;
46        let network = self.config.spark_config.network;
47
48        let mut request = tonic::Request::new(QueryPendingTransfersRequest {
49            transfer_ids: vec![],
50            participant: Some(Participant::ReceiverIdentityPublicKey(
51                self.get_spark_address()?.serialize().to_vec(),
52            )),
53            network: network.marshal_proto(),
54        });
55        self.add_authorization_header_to_request(&mut request, None);
56
57        let response = spark_client
58            .query_pending_transfers(request)
59            .await
60            .map_err(|status| SparkSdkError::from(NetworkError::Status(status)))?
61            .into_inner();
62
63        Ok(response.transfers)
64    }
65
66    /// Initiates a transfer of funds to another user.
67    ///
68    /// This function handles the process of transferring funds from the current user's wallet to another user,
69    /// identified by their public key. The transfer process involves several steps:
70    ///
71    /// 1. Selecting appropriate leaves (UTXOs) that contain sufficient funds for the transfer
72    /// 2. Locking the selected leaves to prevent concurrent usage
73    /// 3. Generating new signing keys for the transfer
74    /// 4. Creating and signing the transfer transaction
75    /// 5. Removing the used leaves from the wallet
76    ///
77    /// The transfer remains in a pending state until the receiver claims it. The expiry time is set to
78    /// 30 days by default (see `DEFAULT_TRANSFER_EXPIRY`).
79    ///
80    /// # Arguments
81    ///
82    /// * `amount` - The amount to transfer in satoshis. Must be greater than the dust limit and the wallet
83    ///             must have a leaf with exactly this amount.
84    /// * `receiver_spark_address` - The Spark address identifying the receiver of the transfer. This should
85    ///                               be the receiver's Spark address, not a regular Bitcoin public key.
86    ///
87    /// # Returns
88    ///
89    /// * `Ok(String)` - The transfer ID if successful. This ID can be used to track the transfer status.
90    /// * `Err(SparkSdkError)` - If the transfer fails. Common error cases include:
91    ///   - No leaf with exact amount available
92    ///   - Failed to lock leaves
93    ///   - Failed to generate new signing keys
94    ///   - Network errors when communicating with Spark operators
95    ///
96    /// # Example
97    ///
98    /// ```
99    /// # use spark_rust::SparkSdk;
100    /// # use bitcoin::secp256k1::PublicKey;
101    /// # use std::str::FromStr;
102    /// # use uuid::Uuid;
103    /// # async fn example(sdk: &mut SparkSdk) -> Result<(), Box<dyn std::error::Error>> {
104    /// let amount = 100_000;
105    ///
106    /// // Currently, a user's Spark address is their public key.
107    /// let receiver_spark_address = PublicKey::from_str("02782d7ba8764306bd324e23082f785f7c880b7202cb10c85a2cb96496aedcaba7").unwrap();
108    ///
109    /// let transfer_id_string = sdk.transfer(amount, &receiver_spark_address).await?;
110    /// let transfer_id = Uuid::parse_str(&transfer_id_string).unwrap();
111    /// println!("Transfer ID is {}", transfer_id);
112    /// # Ok(())
113    /// # }
114    /// ```
115    ///
116    /// # Notes
117    ///
118    /// Currently, the leaf selection algorithm only supports selecting a single leaf with the exact
119    /// transfer amount. Future versions will support combining multiple leaves and handling change outputs.
120    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
121    pub async fn transfer(
122        &self,
123        amount: u64,
124        receiver_spark_address: &PublicKey,
125    ) -> Result<String, SparkSdkError> {
126        // TODO: leaf selection currently only returns one leaf. It must be changed to return multiple leaves.
127        let expiry_time = chrono::Utc::now().timestamp() as u64 + DEFAULT_TRANSFER_EXPIRY;
128
129        // do leaf selection
130        // if not sufficient leaves are found, a swap with the SSP will be requested
131        let leaf_selection_response = self.prepare_leaves_for_amount(amount).await?;
132
133        let unlocking_id = leaf_selection_response.unlocking_id.unwrap();
134
135        // TODO: expect that at this point, leaf_selection_response.total_value == amount, because a swap should happen between the SSP and the wallet.
136        let selected_leaves = leaf_selection_response.leaves;
137        let leaf_ids = selected_leaves
138            .iter()
139            .map(|leaf| leaf.get_id().clone())
140            .collect::<Vec<String>>();
141
142        let mut leaves_to_transfer = Vec::new();
143        for leaf in selected_leaves {
144            // get new
145            let new_signing_public_key = self.signer.new_ephemeral_keypair()?;
146
147            // get old
148            let old_signing_private_key = self.signer.expose_leaf_secret_key_for_transfer(
149                leaf.get_id().clone(),
150                SparkKeyType::BaseSigning,
151                0,
152                self.config.spark_config.network.to_bitcoin_network(),
153            )?;
154
155            leaves_to_transfer.push(LeafKeyTweak {
156                leaf: leaf.get_tree_node()?,
157                old_signing_private_key,
158                new_signing_public_key,
159            });
160        }
161
162        // TODO - when add actual leaf selection, this might be an array of length > 1.
163        let transfer = self
164            .start_send_transfer(&leaves_to_transfer, receiver_spark_address, expiry_time)
165            .await?;
166
167        // unlock and remove leaves from the leaf manager
168        self.leaf_manager
169            .unlock_leaves(unlocking_id.clone(), &leaf_ids, true)?;
170
171        Ok(transfer.id)
172    }
173
174    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
175    pub async fn transfer_leaf_ids(
176        &self,
177        leaf_ids: Vec<String>,
178        receiver_identity_pubkey: &PublicKey,
179    ) -> Result<String, SparkSdkError> {
180        let expiry_time = chrono::Utc::now().timestamp() as u64 + DEFAULT_TRANSFER_EXPIRY;
181
182        let leaf_selection_response = self
183            .leaf_manager
184            .lock_leaf_ids(&leaf_ids, SparkNodeStatus::Transfer)?;
185
186        let unlocking_id = leaf_selection_response.unlocking_id.unwrap();
187
188        let selected_leaves = leaf_selection_response.leaves;
189
190        let mut leaves_to_transfer = Vec::new();
191        for leaf in selected_leaves {
192            // get new
193            let new_signing_public_key = self.signer.new_ephemeral_keypair()?;
194
195            let network = self.config.spark_config.network.to_bitcoin_network();
196            let old_signing_pubkey = self.signer.get_deposit_signing_key(network)?;
197            let old_signing_private_key = self
198                .signer
199                .sensitive_expose_secret_key_from_pubkey(&old_signing_pubkey, false)?;
200
201            leaves_to_transfer.push(LeafKeyTweak {
202                leaf: leaf.get_tree_node()?,
203                old_signing_private_key,
204                new_signing_public_key,
205            });
206        }
207
208        // TODO - when add actual leaf selection, this might be an array of length > 1.
209        let transfer = self
210            .start_send_transfer(&leaves_to_transfer, receiver_identity_pubkey, expiry_time)
211            .await?;
212
213        // unlock and remove leaves from the leaf manager
214        self.leaf_manager
215            .unlock_leaves(unlocking_id.clone(), &leaf_ids, true)?;
216
217        Ok(transfer.id)
218    }
219
220    /// Claims a pending transfer that was sent to this wallet.
221    ///
222    /// This function processes a pending transfer and claims the funds into the wallet. It performs the following steps:
223    /// 1. Verifies the transfer is in the correct state (SenderKeyTweaked)
224    /// 2. Verifies and decrypts the leaf private keys using the wallet's identity key
225    /// 3. Generates new signing keys for the claimed leaves
226    /// 4. Finalizes the transfer by:
227    ///    - Tweaking the leaf keys
228    ///    - Signing refund transactions
229    ///    - Submitting the signatures to the Spark network
230    ///    - Storing the claimed leaves in the wallet's database
231    ///
232    /// # Arguments
233    ///
234    /// * `transfer` - The pending transfer to claim, must be in SenderKeyTweaked status
235    ///
236    /// # Returns
237    ///
238    /// * `Ok(())` - If the transfer was successfully claimed
239    /// * `Err(SparkSdkError)` - If there was an error during the claim process
240    ///
241    /// # Errors
242    ///
243    /// Returns [`SparkSdkError::InvalidInput`] if:
244    /// - The transfer is not in SenderKeyTweaked status
245    ///
246    /// May also return other `SparkSdkError` variants for network, signing or storage errors.
247    ///
248    /// # Example
249    ///
250    /// ```
251    /// # use spark_rust::{SparkSdk, SparkNetwork, signer::default_signer::DefaultSigner, signer::traits::SparkSigner};
252    ///
253    /// async fn example() {
254    ///     let mnemonic = "abandon ability able about above absent absorb abstract absurd abuse access accident";
255    ///     let network = SparkNetwork::Regtest;
256    ///     let signer = DefaultSigner::from_mnemonic(mnemonic, network.clone()).await.unwrap();
257    ///     let sdk = SparkSdk::new(network, signer).await.unwrap();
258    ///     let pending = sdk.query_pending_transfers().await.unwrap();
259    ///     for transfer in pending {
260    ///         sdk.claim_transfer(transfer).await.unwrap();
261    ///     }
262    /// }
263    /// ```
264    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
265    pub async fn claim_transfer(&self, transfer: Transfer) -> Result<(), SparkSdkError> {
266        // Validate the request
267        for transfer_leaf in &transfer.leaves {
268            if transfer_leaf.leaf.is_none() {
269                return Err(SparkSdkError::from(ValidationError::InvalidInput {
270                    field: "Transfer leaf is not found".to_string(),
271                }));
272            }
273        }
274
275        if transfer.status != TransferStatus::SenderKeyTweaked as i32 {
276            return Err(SparkSdkError::from(ValidationError::InvalidInput {
277                field: "Transfer is not in the correct status".to_string(),
278            }));
279        }
280
281        let mut leaves_to_claim = Vec::new();
282        for leaf in &transfer.leaves {
283            let leaf_private_key_map = self.verify_pending_transfer(&transfer).await?;
284
285            let leaf_id = leaf.leaf.as_ref().unwrap().id.clone();
286            let new_pubkey = self.signer.new_secp256k1_keypair(
287                leaf_id.clone(),
288                SparkKeyType::BaseSigning,
289                0,
290                self.config.spark_config.network.to_bitcoin_network(),
291            )?;
292
293            self.signer
294                .insert_secp256k1_keypair_from_secret_key(&leaf_private_key_map[&leaf_id])
295                .unwrap();
296
297            let claim_node = LeafKeyTweak {
298                leaf: leaf.leaf.as_ref().unwrap().clone(),
299                old_signing_private_key: leaf_private_key_map[&leaf_id],
300                new_signing_public_key: new_pubkey,
301            };
302
303            leaves_to_claim.push(claim_node);
304        }
305
306        self.claim_finalize_incoming_transfer(&transfer, &leaves_to_claim)
307            .await?;
308
309        Ok(())
310    }
311
312    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
313    pub async fn claim_transfers(&self) -> Result<(), SparkSdkError> {
314        let pending = self.query_pending_transfers().await?;
315
316        let pending_len = pending.len();
317        let claim_futures = pending.into_iter().map(|transfer| {
318            let transfer_clone = transfer.clone();
319            self.claim_transfer(transfer_clone)
320        });
321        futures::future::try_join_all(claim_futures).await?;
322
323        #[cfg(feature = "telemetry")]
324        tracing::debug!("Claimed {:?} pending transfers", pending_len);
325        Ok(())
326    }
327
328    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
329    pub async fn get_all_transfers(
330        &self,
331        limit: Option<u32>,
332        offset: Option<u32>,
333    ) -> Result<QueryAllTransfersResponse, SparkSdkError> {
334        let limit = limit.unwrap_or(20);
335        let offset = offset.unwrap_or(0);
336
337        let response = self.query_all_transfers(limit, offset).await?;
338        Ok(response)
339    }
340}