spark_rust/wallet/handlers/deposit.rs
1use std::collections::HashMap;
2
3use crate::with_handler_lock;
4use bitcoin::secp256k1::PublicKey;
5use spark_protos::spark::{
6 DepositAddressQueryResult, QueryUnusedDepositAddressesRequest, TreeNode,
7};
8
9use crate::{
10 error::{NetworkError, SparkSdkError, ValidationError},
11 signer::traits::SparkSigner,
12 wallet::{
13 internal_handlers::{
14 implementations::deposit::Address,
15 traits::{deposit::DepositInternalHandlers, mempool::MempoolInternalHandlers},
16 },
17 utils::bitcoin::serialize_bitcoin_transaction,
18 },
19 SparkSdk,
20};
21
22/// The response from the [`SparkSdk::generate_deposit_address`] method.
23pub struct GenerateDepositAddressSdkResponse {
24 /// The deposit address, as [``bitcoin::Address``]. Deposit addresses are P2TR addresses.
25 pub deposit_address: bitcoin::Address,
26
27 /// The signing public key for this deposit address, as [``bitcoin::secp256k1::PublicKey``].
28 pub signing_public_key: PublicKey,
29
30 /// The verifying public key as [``bitcoin::secp256k1::PublicKey``]. *Note:* this is an interpolated key used to verify that threshold signatures between the user and the Spark Operators are valid. This should *not* by used for signing.
31 pub verifying_public_key: PublicKey,
32}
33
34impl<S: SparkSigner + Send + Sync + Clone + 'static> SparkSdk<S> {
35 /// Generates a new deposit address for receiving funds into the Spark wallet.
36 ///
37 /// This function handles the generation of a new deposit address by:
38 /// 1. Creating a new signing keypair for the deposit address
39 /// 2. Requesting a deposit address from the Spark network
40 /// 3. Validating the returned address and proof of possession
41 ///
42 /// # Returns
43 ///
44 /// * `Ok(GenerateDepositAddressSdkResponse)` - Contains the validated deposit address and signing public key
45 /// * `Err(SparkSdkError)` - If there was an error during address generation
46 ///
47 /// # Errors
48 ///
49 /// Returns [`SparkSdkError`] if:
50 /// - Failed to generate new signing keypair
51 /// - Network errors when communicating with Spark operators
52 /// - Address validation fails (e.g. invalid proof of possession)
53 ///
54 /// # Example
55 ///
56 /// ```
57 /// # use spark_rust::SparkSdk;
58 /// # async fn example(mut sdk: SparkSdk) -> Result<(), Box<dyn std::error::Error>> {
59 /// let deposit_address = sdk.generate_deposit_address().await?;
60 /// println!("New deposit address: {}", deposit_address.deposit_address);
61 /// # Ok(())
62 /// # }
63 /// ```
64 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
65 pub async fn generate_deposit_address(
66 &self,
67 ) -> Result<GenerateDepositAddressSdkResponse, SparkSdkError> {
68 with_handler_lock!(self, async {
69 let identity_public_key = self.get_spark_address()?;
70
71 let signing_public_key = self
72 .signer
73 .get_deposit_signing_key(self.config.spark_config.network.to_bitcoin_network())?;
74
75 // You can do this:
76 let request_data = spark_protos::spark::GenerateDepositAddressRequest {
77 signing_public_key: signing_public_key.serialize().to_vec(),
78 identity_public_key: identity_public_key.serialize().to_vec(),
79 network: self.config.spark_config.network.marshal_proto(),
80 };
81
82 let response = self
83 .config
84 .spark_config
85 .call_with_retry(
86 request_data,
87 |mut client, req| {
88 Box::pin(async move { client.generate_deposit_address(req).await })
89 },
90 None,
91 )
92 .await?;
93
94 let address = Address::try_from_address(
95 response
96 .deposit_address
97 .ok_or(SparkSdkError::from(NetworkError::InvalidResponse))?,
98 self.config.spark_config.network,
99 )?;
100
101 self.validate_deposit_address(&address, &signing_public_key)
102 .await?;
103
104 Ok(GenerateDepositAddressSdkResponse {
105 deposit_address: address.address,
106 signing_public_key,
107 verifying_public_key: address.verifying_key,
108 })
109 })
110 .await
111 }
112
113 /// Claims a pending deposit by txid
114 /// 1. Querying if the txid is a pending deposit
115 /// 2. Checking the mempool for transaction
116 /// 3. Finalizing the deposit by creating tree nodes
117 ///
118 /// # Errors
119 ///
120 /// Returns [`SparkSdkError`] if:
121 /// - Failed to connect to Spark service
122 /// - Failed to query mempool
123 /// - Failed to finalize deposits
124 ///
125 /// # Example
126 ///
127 /// ```
128 /// # use spark_rust::{SparkSdk, SparkNetwork, signer::default_signer::DefaultSigner, signer::traits::SparkSigner};
129 ///
130 /// async fn example() {
131 /// let mnemonic = "abandon ability able about above absent absorb abstract absurd abuse access accident";
132 /// let network = SparkNetwork::Regtest;
133 /// let signer = DefaultSigner::from_mnemonic(mnemonic, network.clone()).await.unwrap();
134 /// let sdk = SparkSdk::new(network, signer).await.unwrap();
135 ///
136 /// let hardcoded_txid = "edb5575e6ee96fcf175c9114e0b0d86d99d4642956edcd02e6ec7b6899e90b41";
137 /// sdk.claim_deposit(hardcoded_txid.to_string()).await.unwrap();
138 /// }
139 /// ```
140 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
141 pub async fn claim_deposit(&self, txid: String) -> Result<Vec<TreeNode>, SparkSdkError> {
142 with_handler_lock!(self, async {
143 // query the transaction in the mempool
144 let deposit_tx = self.query_mempool_transaction_by_txid(txid).await?;
145
146 // query unused deposit addresses from Spark
147 let time_start = std::time::Instant::now();
148 let identity_public_key = self.get_spark_address()?;
149
150 let request_data = QueryUnusedDepositAddressesRequest {
151 identity_public_key: identity_public_key.serialize().to_vec(),
152 };
153
154 let response = self
155 .config
156 .spark_config
157 .call_with_retry(
158 request_data,
159 |mut client, req| {
160 Box::pin(async move { client.query_unused_deposit_addresses(req).await })
161 },
162 None,
163 )
164 .await?;
165
166 let duration = time_start.elapsed();
167 #[cfg(feature = "telemetry")]
168 tracing::debug!(duration = ?duration, "query_unused_deposit_addresses");
169
170 let deposit_addresses = response.deposit_addresses;
171 // create a hashmap from an iterator that maps the returned Spark deposit addresses to the user's address
172 let unused_deposit_addresses: HashMap<String, DepositAddressQueryResult> =
173 deposit_addresses
174 .iter()
175 .map(|deposit_address| {
176 (
177 deposit_address.deposit_address.clone(),
178 deposit_address.clone(),
179 )
180 })
181 .collect();
182
183 let mut vout: u32 = 0;
184 let mut found_deposit_data: Option<DepositAddressQueryResult> = None;
185
186 for (i, tx_output) in deposit_tx.output.iter().enumerate() {
187 let parsed_script = tx_output.script_pubkey.clone();
188 let script_address = bitcoin::Address::from_script(
189 &parsed_script,
190 self.config.spark_config.network.to_bitcoin_network(),
191 )
192 .ok()
193 .map(|addr| addr.to_string())
194 .ok_or_else(|| {
195 SparkSdkError::from(ValidationError::InvalidAddress {
196 address: "Invalid address".to_string(),
197 })
198 })?;
199
200 if unused_deposit_addresses.contains_key(&script_address) {
201 // Found a matching deposit address
202 vout = i as u32;
203 found_deposit_data = Some(
204 unused_deposit_addresses
205 .get(&script_address)
206 .unwrap()
207 .clone(),
208 );
209
210 #[cfg(feature = "telemetry")]
211 tracing::debug!(address = script_address, "Found matching deposit address");
212
213 break;
214 }
215 }
216
217 // If no matching deposit address is found, return an error
218 let deposit_data = found_deposit_data.ok_or_else(|| {
219 SparkSdkError::from(ValidationError::InvalidAddress {
220 address: "Deposit address not found".to_string(),
221 })
222 })?;
223
224 let nodes = self
225 .finalize_deposit_internal(
226 deposit_data.user_signing_public_key.clone(),
227 deposit_data.verifying_public_key.clone(),
228 deposit_tx.clone(),
229 vout,
230 )
231 .await?;
232
233 self.claim_transfers_internal().await?;
234
235 Ok(nodes)
236 })
237 .await
238 }
239
240 /// Finalizes a deposit by creating a tree node and transferring it to self
241 ///
242 /// # Arguments
243 ///
244 /// * `signing_pubkey` - The public key used for signing
245 /// * `deposit_tx` - The Bitcoin transaction containing the deposit
246 /// * `vout` - The output index in the transaction
247 ///
248 /// # Errors
249 ///
250 /// Returns [`SparkSdkError`] if:
251 /// - Failed to create tree node
252 /// - Failed to transfer deposits
253 ///
254 /// # Returns
255 ///
256 /// Returns an empty vector of `TreeNode`s on success
257 pub async fn finalize_deposit(
258 &self,
259 signing_pubkey: Vec<u8>,
260 verifying_pubkey: Vec<u8>,
261 deposit_tx: bitcoin::Transaction,
262 vout: u32,
263 ) -> Result<Vec<TreeNode>, SparkSdkError> {
264 with_handler_lock!(self, async {
265 self.finalize_deposit_internal(signing_pubkey, verifying_pubkey, deposit_tx, vout)
266 .await
267 })
268 .await
269 }
270
271 async fn finalize_deposit_internal(
272 &self,
273 signing_pubkey: Vec<u8>,
274 verifying_pubkey: Vec<u8>,
275 deposit_tx: bitcoin::Transaction,
276 vout: u32,
277 ) -> Result<Vec<TreeNode>, SparkSdkError> {
278 let time_start = std::time::Instant::now();
279 let created_root = self
280 .create_tree_root(
281 signing_pubkey,
282 verifying_pubkey,
283 serialize_bitcoin_transaction(&deposit_tx)?,
284 vout,
285 )
286 .await?;
287 let duration = time_start.elapsed();
288
289 #[cfg(feature = "telemetry")]
290 tracing::debug!(duration = ?duration, "finalize_deposit");
291
292 // Transfer the leaves to self.
293 let time_start = std::time::Instant::now();
294 self.transfer_deposits_to_self(
295 created_root
296 .nodes
297 .iter()
298 .map(|node| node.id.clone())
299 .collect(),
300 )
301 .await?;
302
303 let duration = time_start.elapsed();
304 #[cfg(feature = "telemetry")]
305 tracing::debug!(duration = ?duration, "transfer_deposits_to_self");
306
307 Ok(created_root.nodes)
308 }
309
310 /// Retrieves all unused deposit addresses that have been previously generated for your wallet.
311 ///
312 /// This function queries the Spark network for all deposit addresses associated with your
313 /// identity public key that haven't been used for deposits yet. This helps you track
314 /// deposit addresses that you've created but haven't received funds on yet.
315 ///
316 /// # Returns
317 ///
318 /// * `Ok(Vec<DepositAddressQueryResult>)` - A vector of unused deposit addresses if successful
319 /// * `Err(SparkSdkError)` - If there was an error querying the addresses
320 ///
321 /// # Example
322 ///
323 /// ```
324 /// # use spark_rust::SparkSdk;
325 /// # async fn example(sdk: SparkSdk) -> Result<(), Box<dyn std::error::Error>> {
326 /// // Query all unused deposit addresses associated with your wallet
327 /// let unused_addresses = sdk.query_unused_deposit_addresses().await?;
328 ///
329 /// // Process each unused address
330 /// for address_result in unused_addresses {
331 /// println!("Unused address: {}", address_result.deposit_address);
332 /// }
333 /// # Ok(())
334 /// # }
335 /// ```
336 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
337 pub async fn query_unused_deposit_addresses(
338 &self,
339 ) -> Result<Vec<DepositAddressQueryResult>, SparkSdkError> {
340 with_handler_lock!(self, async {
341 let identity_public_key = self.get_spark_address()?;
342
343 let request = QueryUnusedDepositAddressesRequest {
344 identity_public_key: identity_public_key.serialize().to_vec(),
345 };
346 let response = self
347 .config
348 .spark_config
349 .call_with_retry(
350 request,
351 |mut client, req| {
352 Box::pin(async move { client.query_unused_deposit_addresses(req).await })
353 },
354 None,
355 )
356 .await?;
357
358 Ok(response.deposit_addresses)
359 })
360 .await
361 }
362
363 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
364 async fn transfer_deposits_to_self(&self, leaf_ids: Vec<String>) -> Result<(), SparkSdkError> {
365 self.transfer_leaf_ids_internal(leaf_ids, &self.get_spark_address()?)
366 .await?;
367
368 Ok(())
369 }
370}