spark-rust 0.1.11

Rust Development Kit for Spark
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
# Spark Wallet SDK

[![License: Apache-2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE)
![Rust Version: 1.75+](https://img.shields.io/badge/Rust-1.75+-orange.svg)

This workspace is the semi-official Rust development environment for Spark. This crate forms a complete wallet with all necessary Spark utilities. The cryptographic primitives are provided by the [spark-cryptography](https://github.com/polarityorg/spark-rs/tree/main/crates/spark-cryptography) crate.

## Overview

Spark Wallet SDK has 5 components:
- `config`: This module contains the configuration for the Spark wallet, as found in the [config directory]https://github.com/polarityorg/spark-rs/tree/main/crates/sparks/src/wallet/config.
- `handlers`: This module contains the user-facing APIs for the Spark wallet, as found in the [handlers directory]https://github.com/polarityorg/spark-rs/tree/main/crates/sparks/src/wallet/handlers. Examples illustrating typical usage are provided below.
- `internal_handlers`: Contains the internal service handlers for coordinating signing processes and Spark RPC communications, as documented in the [internal_handlers directory]https://github.com/polarityorg/spark-rs/tree/main/crates/sparks/src/wallet/internal_handlers.
- `rpc`: Provides an RPC client for establishing secure connections to Spark nodes, handling TLS configurations, and creating service-specific clients.
- `signer`: Provides comprehensive key management, storage, and signing capabilities, fully conforming to the traits found in [src/signer/traits]https://github.com/polarityorg/spark-rs/tree/main/crates/sparks/src/signer/traits. In addition, a convenient built-in signer ([default_signer.rs]https://github.com/polarityorg/spark-rs/blob/main/crates/sparks/src/signer/default_signer.rs) is included for quick and straightforward integration.
  
## Installation
Make sure that you have protos installed.
```bash
# Make sure you have protos installed
brew install protobuf
```

Also, make sure you have Rust version `1.75.0`. Ideally, you should use the latest stable version.
```bash
# For version 1.75.0
rustup update
rustup install 1.75.0

# For the latest stable version
rustup update stable
```


## Quick Start
```rust
use spark_rust::SparkSdk;
use spark_rust::SparkNetwork;
use spark_rust::signer::default_signer::DefaultSigner;

#[tokio::main]
async fn main() -> Result<SparkSdk, SparkSdkError> {
    // Initialize the default signer. Alternatively, you can also create a custom signer as long as it implements all the necessary signing traits. In this case, it is your responsibility to make sure that the signer is safe to use and works as expected.
    let mnemonic = "abandon ability able about above absent absorb abstract absurd \
        abuse access accident";
    let default_signer = DefaultSigner::from_mnemonic(mnemonic, SparkNetwork::Regtest).await?;
    let sdk = SparkSdk::new(SparkNetwork::Regtest, default_signer).await?;

    // Generate a deposit address. Note: This deposit address is one time use only!
    let deposit_address = sdk.generate_deposit_address().await?;
    println!("Deposit address: {}", deposit_address.deposit_address);

    // You should send a deposit to this address on L1, and Spark will detect it. You can choose the amount of sats.
    // This line sends 100,000 sats to the deposit address.
    let txid = l1_wallet.send_to_address(deposit_address.deposit_address, 100_000).await?;

    // For Regtest, sleep for 30 seconds
    sleep(Duration::from_secs(30)).await;

    // Claim the deposit
    let deposits = sdk.claim_deposit(txid).await?;

    let balance = sdk.get_bitcoin_balance();
    assert_eq!(balance, 100_000);

    // Also, query all the incoming transfers from other Spark users.
    let pending = sdk.query_pending_transfers().await?;
    println!("Querying all transfers...");
    for transfer in all_transfers.transfers {
        println!("Transfer: {:?} satoshis", transfer.total_value);
    }

    // So far, you have NOT claimed these transfers.
    // You should claim them by calling `sdk.claim_transfers()`.
    let claimed = sdk.claim_transfers().await?;

    // And now, your Bitcoin balance should be updated.
    let balance = sdk.get_bitcoin_balance();
}
```

## Conceptual Overview: SSP and Fees in Spark

The Spark network operates with fee structures that are different from traditional Bitcoin or Lightning wallets. All fees in Spark are service fees charged by the Spark Service Provider (SSP) for various operations they facilitate on behalf of users.

### Fee Overview

- **All fees are taken by the SSP** - The Spark Service Provider charges service fees for operations they perform on your behalf.
- **No direct mining fees** - You don't directly pay Bitcoin mining fees when using Spark. These are handled by the SSP when they interact with the Bitcoin network.
- **Fee estimation** - Before performing fee-incurring operations, you can use the estimation methods to determine the cost.
- **Common fee operations** include:
  - Lightning payments (sending and receiving)
  - Leaves swaps (optimizing your wallet structure)
  - Cooperative exits (withdrawing to on-chain Bitcoin)

### Types of Fees

1. **Lightning Send Fees** - Charged when you pay a Lightning invoice through the SSP.
2. **Lightning Receive Fees** - Charged when you receive Lightning payments through the SSP.
3. **Cooperative Exit Fees** - Charged when you withdraw funds from Spark to an on-chain Bitcoin address.
4. **Leaves Swap Fees** - Charged when you optimize your wallet leaf structure through the SSP.

Each fee type has a corresponding estimation method that helps you determine the cost before performing the actual operation. The fee structure is designed to be transparent and predictable.

---

## Concurrency and Thread Safety Considerations

Note: You may want to revisit this section after reading the API docs and understanding how the SDK works by sequential API calls.

The Spark SDK includes locking mechanisms to coordinate wallet operations, but has important limitations when used in multi-threaded or concurrent environments.

### Important Thread Safety Considerations

Despite internal locking, concurrent operations that modify wallet state can lead to race conditions and unexpected behavior, particularly with leaf selection and swap operations.

#### Concurrent Transfer Operations

The SDK uses internal locks to protect wallet state, but these locks have limitations in async environments. Complex operations involving network calls and leaf selection may not maintain exclusive access throughout their entire lifecycle.

#### Leaf Selection and Swap Race Conditions

When executing transfers, the SDK automatically performs leaf selection to find appropriate UTXOs for the transfer amount. If a leaf with the exact amount isn't available, the SDK requests a swap with the SSP to optimize denominations.

**Example scenario:**
```rust
// Assume the user has 2000 sats total in their wallet, divided into two leaves:
// - leaf1 = 1000 sats
// - leaf2 = 1000 sats
// Each leaf represents a UTXO that can be spent.

// UNSAFE: Concurrent transfers that will likely fail
let (first_transfer_result, second_transfer_result) = tokio::join!(
    sender_sdk.transfer(800, &receiver_address),
    sender_sdk.transfer(1200, &receiver_address)
);
```

Here's what happens in this concurrent scenario:
1. Both transfers start executing at nearly the same time
2. For the 800 sat transfer: 
   - No exact 800 sat leaf exists
   - The SDK locks all available funds (2000 sats) for a swap operation
   - These funds remain locked while the swap is negotiated with the SSP
3. For the 1200 sat transfer:
   - It also needs to perform a swap
   - But it finds that all funds are already locked by the first transfer
   - It fails with `LeafSelectionInsufficientFunds` error

The key issue is that **swap operations temporarily lock all available funds**, not just the amount being transferred. This is necessary for the secure swap protocol with the SSP.

**Solution: Sequential Operations**
```rust
// SAFE: Sequential transfers will both succeed
let result1 = sender_sdk.transfer(1200, &receiver_address).await?;
// After the first transfer completes, the wallet has 800 sats remaining
let result2 = sender_sdk.transfer(800, &receiver_address).await?;
```

With sequential operations:
1. The first transfer completes its swap and transfer (1200 sats)
2. The remaining 800 sats are then available for the second transfer
3. Both operations succeed because they don't compete for the same locked resources

Even though there are sufficient total funds for both operations (1200 + 800 = 2000), running them concurrently leads to failures because the swap protocol temporarily locks more funds than are being transferred.

#### Balance Reporting Inaccuracies During Concurrent Operations

During swap operations or other leaf-modifying processes, your wallet's available balance may appear lower than the actual total balance. This occurs because leaves are locked during operations but may not yet be fully processed.

**Balance reporting is always conservative** - your balance may appear less than it actually is, but never more.

### Recommendations for Safe Usage

To avoid race conditions and ensure predictable behavior:

1. **Serialize Leaf-Modifying Operations**:
   - Do not run multiple transfers or swaps concurrently
   - Wait for one transfer operation to complete before starting another
   - Consider implementing a queue if your application needs to handle multiple transfers

2. **Safe Concurrent Operations**:
   These operations are generally safe to perform concurrently:
   - Creating and retrieving Lightning invoices
   - Reading wallet information (balance checks, history)
   - Querying transfer history or pending transfers

3. **Operations That Should Be Serialized**:
   These operations should never run concurrently with other leaf-modifying operations:
   - Transfer operations
   - Withdrawal operations
   - Lightning payments
   - Any operation that modifies leaf state

### Example: Safe Alternative to Concurrent Transfers

Instead of concurrent transfers, use sequential processing:

```rust
// UNSAFE: Concurrent transfers that may both require swaps
// let (result1, result2) = tokio::join!(
//     sender_sdk.transfer(1200, &receiver_address),
//     sender_sdk.transfer(800, &receiver_address)
// );

// SAFE: Sequential transfers
let result1 = sender_sdk.transfer(1200, &receiver_address).await?;
let result2 = sender_sdk.transfer(800, &receiver_address).await?;
```

### Future Improvements

We plan to enhance thread safety in future versions of the SDK by implementing:
1. Transaction queueing to serialize leaf-modifying operations
2. Improved locking mechanisms that span entire operations
3. Better handling of concurrent leaf selection and swap requests

Until these improvements are in place, applications using the Spark SDK should take care to avoid concurrent leaf-modifying operations.

---

## API Documentation

Below you will find the primary wallet API documentation. For developers interested in implementing custom signers, refer to the signer documentation at the end.

---

### Initialize the SDK

Use the `new` method to create a new instance of the Spark SDK. This is the main entry point for interacting with the Spark protocol.

#### Parameters
- `network: SparkNetwork` - The Spark network to connect to (e.g., Regtest or Mainnet)
- `signer: S where S: SparkSigner` - Implementation of the SparkSigner trait for secure key management

#### Response
Returns a `Result<SparkSdk, SparkSdkError>`, which contains:
• The initialized SparkSdk instance if successful
• A SparkSdkError if initialization fails

Internally, this constructor:
1. Creates the wallet configuration for the specified network
2. Initializes the leaf manager to track UTXOs
3. Authenticates with the Spark network
4. Synchronizes Bitcoin leaves to establish the wallet state

#### Steps

1. Create a signer implementation (typically DefaultSigner)
2. Call `SparkSdk::new(network, signer)` with the desired network and signer
3. The SDK connects to the network, authenticates, and initializes its state

#### Example
```rust
// Create a signer using a mnemonic phrase
let mnemonic = "abandon ability able about above absent absorb abstract absurd abuse access accident";
let network = SparkNetwork::Regtest;
let signer = DefaultSigner::from_mnemonic(mnemonic, network.clone()).await?;

// Initialize the SDK with the signer and network
let sdk = SparkSdk::new(network, signer).await?;

// The SDK is now ready to use
println!("SDK initialized successfully");
```

---

### Get Spark Address

Use the `get_spark_address` method to retrieve the Spark address of the wallet, which is derived from the wallet's identity public key.

#### Parameters
None - This method doesn't require any parameters.

#### Response
Returns a `Result<PublicKey, SparkSdkError>`, which contains:
• A PublicKey representing the wallet's identity public key
• This key serves as the wallet's unique identifier on the Spark network

The Spark address serves several purposes:
- Authenticates the wallet with Spark operators during API calls
- Used in deposit address generation to prove ownership
- Required for validating operator signatures
- Helps prevent unauthorized access to wallet funds

#### Steps

1. Call `sdk.get_spark_address()`.
2. SDK returns the PublicKey that represents the wallet's identity.

#### Example
```rust
// Get the wallet's Spark address
let spark_address = sdk.get_spark_address()?;

// This address is a compressed secp256k1 public key in SEC format (33 bytes)
println!("Your Spark address: {}", spark_address);

// You can share this address with others so they can send you funds
let serialized_address = spark_address.serialize();
assert_eq!(serialized_address.len(), 33); // 33-byte compressed format
```

---

### Get Network

Use the `get_network` method to retrieve the Bitcoin network that this wallet is connected to.

#### Parameters
None - This method doesn't require any parameters.

#### Response
Returns a `SparkNetwork` enum indicating whether this is a mainnet or regtest wallet.

The network determines which Spark operators the wallet communicates with and which Bitcoin network (mainnet or regtest) is used for transactions. It's set when creating the wallet and cannot be changed after initialization.

#### Steps

1. Call `sdk.get_network()`.
2. SDK returns the SparkNetwork enum value.

#### Example
```rust
// Get the network this wallet is connected to
let network = sdk.get_network();

// You can use this to display appropriate information in your UI
match network {
    SparkNetwork::Mainnet => println!("Connected to Spark Mainnet"),
    SparkNetwork::Regtest => println!("Connected to Spark Regtest (testing network)"),
}

// Or use it for conditional logic
if network == SparkNetwork::Regtest {
    println!("This is a test wallet - don't use real funds!");
}
```

---

### Generate Deposit Address

Use the `generate_deposit_address` method to obtain a unique, one-time-use deposit address for Spark. This method returns a 
`GenerateDepositAddressSdkResponse`, which explicitly contains:
• A deposit address of type `bitcoin::Address`
• A signing public key of type `bitcoin::secp256k1::PublicKey`
• A verifying public key of type `bitcoin::secp256k1::PublicKey`

Internally, Spark combines the user's signing public key with a Spark Operator public key to derive a taproot address.

#### Steps

1. Call `sdk.generate_deposit_address()`.
2. Spark returns a `GenerateDepositAddressSdkResponse` containing all three fields.
3. Normally, you only need the deposit address. For advanced use cases, you can also use the signing public key and verifying public key.

#### Example
```rust
// 1. Calling Spark to generate all three fields in GenerateDepositAddressSdkResponse.
let generate_deposit_response = sdk.generate_deposit_address().await?;

// 2. This deposit address (bitcoin::Address) is a one-time address you can use to send funds on L1.
let deposit_address = generate_deposit_response.deposit_address;

// 3. The signing public key (bitcoin::secp256k1::PublicKey) for the deposit address, 
//    generally managed internally by the SDK.
let signing_public_key = generate_deposit_response.signing_public_key;

// 4. The verifying public key (bitcoin::secp256k1::PublicKey), 
//    used to verify threshold signatures (not typically needed directly).
let verifying_public_key = generate_deposit_response.verifying_public_key;
```

---

### Claim Deposit

Use the `claim_deposit` method to claim funds that have been deposited to a Spark deposit address.

#### Parameters
- `txid: String` - The transaction ID of the L1 transaction that sent funds to the Spark deposit address

#### Response
Returns a `Result<Vec<TreeNode>, SparkSdkError>`, which explicitly contains:
• A vector of `TreeNode` objects representing the claimed deposits
• Each `TreeNode` are returned by Spark Operators and contains details about the deposit such as amount and status
• `TreeNode` is a `tonic` message type pre-compiled using Spark's official `protobuf` definitions

Internally, Spark processes the L1 transaction, verifies the deposit, and adds it to your wallet balance, making the funds available for use in the Spark network.

#### Steps

1. Call `sdk.claim_deposit(txid)` with the transaction ID of your deposit.
2. Spark processes the deposit and returns a vector of `TreeNode` objects.
3. The funds are now available in your Spark wallet.

#### Example
```rust
// 1. Generate a deposit address first (as shown in the previous example)
let deposit_address = sdk.generate_deposit_address().await?;

// 2. Send bitcoin to this address on L1 (using an L1 wallet)
let txid = l1_wallet.send_to_address(deposit_address.deposit_address, 100_000).await?;

// 3. Wait for the transaction to be confirmed
// For Regtest, this will take around 30 seconds
sleep(Duration::from_secs(30)).await;

// 4. Claim the deposit using the transaction ID
let deposits = sdk.claim_deposit(txid).await?;

// 5. Verify the balance has been updated
let balance = sdk.get_bitcoin_balance();
assert_eq!(balance, 100_000);
```

---

### Query Unused Deposit Addresses

Use the `query_unused_deposit_addresses` method to retrieve all unused deposit addresses that have been previously generated for your wallet. This helps you track deposit addresses that you've created but haven't received funds on yet.

#### Parameters
None - This method doesn't require any parameters.

#### Response
Returns a `Result<Vec<DepositAddressQueryResult>, SparkSdkError>`, which explicitly contains:
• A vector of `DepositAddressQueryResult` objects representing unused deposit addresses
• Each result contains the deposit address and associated metadata
• `DepositAddressQueryResult` is a `tonic` message type pre-compiled using Spark's official `protobuf` definitions

Internally, Spark queries the network for all deposit addresses associated with your identity public key that haven't been used for deposits yet.

#### Steps

1. Call `sdk.query_unused_deposit_addresses()`.
2. Spark returns a vector of `DepositAddressQueryResult` objects representing all unused deposit addresses.
3. You can use these addresses to track expected deposits or for reconciliation purposes.

#### Example
```rust
// Query all unused deposit addresses associated with your wallet
let unused_addresses = sdk.query_unused_deposit_addresses().await?;

// Process each unused address
for address_result in unused_addresses {
    println!("Unused address: {}", address_result.deposit_address);
    
    // You might want to check if these addresses have received funds on L1
    // or display them to users who are expected to make deposits
}

// You can also count how many unused addresses you have
println!("You have {} unused deposit addresses", unused_addresses.len());
```

---

### Finalize Deposit

**This is an advanced method** that allows you to finalize a deposit without using the `claim_deposit` method for custom use cases. Use the `finalize_deposit` method to finalize the claiming process for funds deposited to a Spark deposit address. **Note:** Users typically do not need to call this method directly, as `claim_deposit` automatically calls it internally. This method is provided for advanced use cases where you need to override the default claiming logic.

#### Parameters
- `signing_pubkey: Vec<u8>` - Binary representation of the signing public key used for the deposit
- `verifying_pubkey: Vec<u8>` - Binary representation of the verifying public key used for the deposit
- `deposit_tx: bitcoin::Transaction` - The full Bitcoin transaction containing the deposit
- `vout: u32` - The output index in the transaction that contains the deposit

#### Response
Returns a `Result<TreeNode, SparkSdkError>`, which explicitly contains:
• A `TreeNode` object representing the finalized deposit
• Contains details about the deposit such as amount and status
• `TreeNode` is a `tonic` message type pre-compiled using Spark's official `protobuf` definitions

Internally, Spark finalizes the deposit process by submitting the provided parameters to Spark Operators, who verify and process the deposit, making the funds available in your wallet.

#### Steps

1. Call `sdk.finalize_deposit()` with the required parameters.
2. Spark processes the finalization request and returns a `TreeNode` object.
3. The funds are now available in your Spark wallet.

#### Example
```rust
// STANDARD APPROACH: In most cases, you would simply use claim_deposit:
// let deposits = sdk.claim_deposit(txid).await?;

// ADVANCED APPROACH: Only if you need to bypass claim_deposit for custom logic:
// 1. Get the Bitcoin transaction containing the deposit
let deposit_tx = bitcoin_client.get_transaction(txid).await?;

// 2. Identify which output contains the deposit (custom logic)
let vout = 0; // Example: using custom logic to determine output index

// 3. Get the signing and verifying public keys from your deposit tracking system
let signing_pubkey = your_custom_storage.get_signing_pubkey_for_deposit(txid).await?;
let verifying_pubkey = your_custom_storage.get_verifying_pubkey_for_deposit(txid).await?;

// 4. Call finalize_deposit directly (bypassing claim_deposit)
let deposit = sdk.finalize_deposit(
    signing_pubkey,
    verifying_pubkey,
    deposit_tx,
    vout
).await?;

// The funds are now available in your wallet
let balance = sdk.get_bitcoin_balance();
```

---

### Query Pending Transfers

Use the `query_pending_transfers` method to retrieve all pending transfers where the current user is the receiver. A pending transfer represents funds that have been sent to the user but have not yet been claimed. The transfers remain in a pending state until the receiver claims them, at which point the funds become available in their wallet.

**This function does not claim any pending transfers.** To claim a transfer, you should call `claim_transfers()`. This will execute key tweaking, which is the core of Spark's security mechanism. Before the receiver tweaks the keys, the transfer is not final.

#### Parameters
None - This method doesn't require any parameters.

#### Response
Returns a `Result<Vec<Transfer>, SparkSdkError>`, which explicitly contains:
• A vector of `Transfer` objects representing pending transfers
• Each `Transfer` contains details about the pending transfer such as amount, sender, and status
• `Transfer` is a `tonic` message type pre-compiled using Spark's official `protobuf` definitions

Internally, Spark queries the network for all pending transfers associated with the user's identity public key.

#### Steps

1. Call `sdk.query_pending_transfers()`.
2. Spark returns a vector of `Transfer` objects representing all pending transfers.
3. You can then process these pending transfers as needed, such as displaying them to the user or accepting them.

#### Example
```rust
// Query all pending transfers where the current user is the receiver
let pending = sdk.query_pending_transfers().await?;

// Process each pending transfer
for transfer in pending {
    println!("Pending transfer: {:?} satoshis", transfer.total_value);
    
    // You might want to automatically accept transfers or display them to the user
    // For example:
    // if should_auto_accept(&transfer) {
    //     sdk.accept_transfer(transfer.id).await?;
    // }
}
```

---

### Transfer Funds

Use the `transfer` method to send funds from your wallet to another Spark user. This initiates a transfer process where the funds are removed from your wallet and become available for the recipient to claim.

#### Parameters
- `amount: u64` - The amount to transfer in satoshis. Must be greater than the dust limit and the wallet must have a leaf with exactly this amount.
- `receiver_spark_address: &bitcoin::secp256k1::PublicKey` - The Spark address identifying the receiver of the transfer. This should be the receiver's identity public key, not a regular Bitcoin public key.

#### Response
Returns a `Result<String, SparkSdkError>`, which explicitly contains:
• A String representing the transfer ID if successful
• This ID can be used to track the status of the transfer

Internally, Spark handles the process of transferring funds by selecting appropriate leaves (UTXOs), locking them, generating new signing keys, creating and signing the transfer transaction, and removing the used leaves from your wallet.

#### Steps

1. Call `sdk.transfer(amount, &receiver_spark_address)` with the amount and receiver's Spark address.
2. Spark selects appropriate leaves (UTXOs) containing sufficient funds for the transfer.
3. Spark generates new signing keys and creates the transfer transaction.
4. The transfer is submitted to the Spark network, and the leaves are removed from your wallet.
5. The transfer remains in a pending state until the receiver claims it (expiry is set to 30 days by default).

#### Example
```rust
// Define the amount to transfer (in satoshis)
let amount = 100_000;

// Get the recipient's Spark address (which is their public key)
// This can be shared between users in your application
let receiver_spark_address = PublicKey::from_str(
    "02782d7ba8764306bd324e23082f785f7c880b7202cb10c85a2cb96496aedcaba7"
).unwrap();

// Send the transfer
let transfer_id_string = sdk.transfer(amount, &receiver_spark_address).await?;

// The transfer ID is a UUID string that can be parsed and stored
let transfer_id = Uuid::parse_str(&transfer_id_string).unwrap();
println!("Transfer successfully initiated with ID: {}", transfer_id);

// The recipient will need to call query_pending_transfers() and claim_transfer()
// to receive these funds
```

---

### Transfer Specific Leaves (Advanced)

**This is an advanced method** intended for specialized use cases where you need precise control over which leaves (UTXOs) are used in a transfer. Most users should use the standard `transfer(amount, receiver)` method instead.

Use the `transfer_leaf_ids` method to transfer specific leaves from your wallet to another Spark user by directly providing the leaf IDs to be transferred.

#### Parameters
- `leaf_ids: Vec<String>` - Vector of leaf IDs to transfer. Each ID identifies a specific UTXO in your wallet.
- `receiver_identity_pubkey: &PublicKey` - The Spark address identifying the receiver of the transfer. This should be the receiver's identity public key.

#### Response
Returns a `Result<String, SparkSdkError>`, which explicitly contains:
• A String representing the transfer ID if successful
• This ID can be used to track the status of the transfer

Internally, this method follows a similar process to the standard transfer, but instead of selecting leaves based on an amount, it uses the exact leaves specified by their IDs.

#### Steps

1. Call `sdk.transfer_leaf_ids(leaf_ids, &receiver_spark_address)` with the leaf IDs and receiver's Spark address.
2. Spark locks the specified leaves and generates new signing keys.
3. The transfer is created, signed, and submitted to the Spark network.
4. The specified leaves are removed from your wallet.
5. The transfer remains in a pending state until the receiver claims it (expiry is set to 30 days by default).

#### Example
```rust
// Get specific leaf IDs from your wallet that you want to transfer
// This requires knowledge of your wallet's internal leaf structure
let leaf_ids = vec!["leaf_id_1".to_string(), "leaf_id_2".to_string()];

// Get the recipient's Spark address
let receiver_spark_address = PublicKey::from_str(
    "02782d7ba8764306bd324e23082f785f7c880b7202cb10c85a2cb96496aedcaba7"
).unwrap();

// Transfer the specified leaves
let transfer_id_string = sdk.transfer_leaf_ids(leaf_ids, &receiver_spark_address).await?;

// The transfer ID can be parsed and stored
let transfer_id = Uuid::parse_str(&transfer_id_string).unwrap();
println!("Leaf transfer initiated with ID: {}", transfer_id);
```

---

### Claim Transfer

Use the `claim_transfer` method to claim a specific pending transfer that was sent to your wallet. This method processes a pending transfer and adds the funds to your wallet balance.

#### Parameters
- `transfer: Transfer` - The pending transfer to claim, must be in `SenderKeyTweaked` status

#### Response
Returns a `Result<(), SparkSdkError>`, which indicates:
• Success (Ok) if the transfer was successfully claimed
• Error (Err) if there was an issue during the claim process

Internally, Spark performs several security-critical steps:
1. Verifies the transfer is in the correct state (SenderKeyTweaked)
2. Verifies and decrypts the leaf private keys using your identity key
3. Generates new signing keys for the claimed leaves
4. Finalizes the transfer by tweaking the leaf keys, signing refund transactions, and submitting signatures

#### Steps

1. Obtain a pending transfer (typically from `query_pending_transfers()`)
2. Call `sdk.claim_transfer(transfer)` with the transfer object
3. Spark processes the transfer and adds the funds to your wallet
4. The funds are now available for use in your wallet

#### Example
```rust
// First get pending transfers
let pending = sdk.query_pending_transfers().await?;

// Then claim each transfer individually
for transfer in pending {
    sdk.claim_transfer(transfer).await?;
    println!("Successfully claimed transfer: {}", transfer.id);
}

// Verify your updated balance
let balance = sdk.get_bitcoin_balance();
println!("Updated balance: {} satoshis", balance);
```

---

### Claim All Transfers

Use the `claim_transfers` method to claim all pending transfers sent to your wallet in a single operation. This convenience method automatically retrieves all pending transfers and claims them for you.

#### Parameters
None - This method doesn't require any parameters.

#### Response
Returns a `Result<(), SparkSdkError>`, which indicates:
• Success (Ok) if all transfers were successfully claimed
• Error (Err) if there was an issue during the claim process

Internally, this method:
1. Calls `query_pending_transfers()` to get all pending transfers
2. Processes each transfer in parallel using `claim_transfer`
3. Returns success only if all transfers are claimed successfully

#### Steps

1. Call `sdk.claim_transfers()`
2. Spark automatically retrieves and processes all pending transfers
3. The funds from all claimed transfers are added to your wallet

#### Example
```rust
// Claim all pending transfers in a single call
sdk.claim_transfers().await?;
println!("Successfully claimed all pending transfers");

// Verify your updated balance
let balance = sdk.get_bitcoin_balance();
println!("Updated balance: {} satoshis", balance);

// You can also check if there are any remaining pending transfers
// (there shouldn't be any if claim_transfers was successful)
let pending = sdk.query_pending_transfers().await?;
assert!(pending.is_empty(), "All transfers should have been claimed");
```

---

### Get All Transfers (Get transfer history)

Use the `get_all_transfers` method to retrieve the history of all transfers (both sent and received) associated with your wallet. This method supports pagination to manage large transfer histories.

#### Parameters
- `limit: Option<u32>` - Optional maximum number of transfers to return (defaults to 20 if not specified)
- `offset: Option<u32>` - Optional number of transfers to skip (defaults to 0 if not specified)

#### Response
Returns a `Result<QueryAllTransfersResponse, SparkSdkError>`, which explicitly contains:
• A `QueryAllTransfersResponse` object containing the list of transfers
• This response includes both sent and received transfers
• Each transfer contains details such as amount, sender, receiver, status, and timestamp
• `QueryAllTransfersResponse` is a `tonic` message type pre-compiled using Spark's official `protobuf` definitions

Internally, Spark queries the network for all transfers associated with your identity public key and applies the pagination parameters.

#### Steps

1. Call `sdk.get_all_transfers(limit, offset)` with optional pagination parameters
2. Spark returns a `QueryAllTransfersResponse` containing the requested transfers
3. You can process these transfers as needed, such as displaying them in a transaction history UI

#### Example
```rust
// Get the first 20 transfers (default pagination)
let first_page = sdk.get_all_transfers(None, None).await?;
println!("First page of transfers: {}", first_page.transfers.len());

// Display transfer details
for transfer in &first_page.transfers {
    println!("Transfer ID: {}, Amount: {} sats, Status: {}", 
             transfer.id, 
             transfer.total_value, 
             transfer.status);
}

// Get the next 20 transfers (pagination)
let second_page = sdk.get_all_transfers(Some(20), Some(20)).await?;
println!("Second page of transfers: {}", second_page.transfers.len());

// You can implement pagination controls in your UI
let page_size = 10;
let page_number = 3; // 0-indexed
let transfers = sdk.get_all_transfers(
    Some(page_size), 
    Some(page_size * page_number)
).await?;
```

---

### Get Bitcoin Balance

Use the `get_bitcoin_balance` method to retrieve the current total balance of your wallet in satoshis.

#### Parameters
None - This method doesn't require any parameters.

#### Response
Returns a `u64` value representing the total available balance in satoshis.

Internally, Spark calculates this by summing the value of all available leaves (UTXOs) in your wallet.

#### Steps

1. Call `sdk.get_bitcoin_balance()`.
2. Spark returns the total balance as a u64 value.

#### Example
```rust
// Get the current wallet balance
let balance = sdk.get_bitcoin_balance();
println!("Your current balance is {} satoshis", balance);

// You can also use this to check if you have enough funds for a transfer
let amount_to_send = 50_000;
if balance >= amount_to_send {
    sdk.transfer(amount_to_send, &receiver_spark_address).await?;
} else {
    println!("Insufficient funds: you need {} but only have {}", 
             amount_to_send, balance);
}
```

---

### Sync Wallet

Use the `sync_wallet` method to perform a comprehensive synchronization of your wallet with the Spark network. This is a convenience method that executes multiple synchronization operations in a single call.

#### Parameters
None - This method doesn't require any parameters.

#### Response
Returns a `Result<(), SparkSdkError>`, which indicates:
• Success (Ok) if all synchronization operations completed successfully
• Error (Err) if there was an issue during any synchronization step

Internally, this method performs the following operations in sequence:
1. Claims all pending Bitcoin transfers
2. Synchronizes all leaves (UTXOs) with the Spark network
3. Optimizes leaf distribution for efficient wallet operation

#### Steps

1. Call `sdk.sync_wallet()`.
2. Spark automatically performs all synchronization operations.
3. Your wallet state is updated with the latest information from the network.

#### Example
```rust
// Perform a full wallet synchronization
sdk.sync_wallet().await?;
println!("Wallet successfully synchronized with the network");

// After syncing, you'll have the most up-to-date balance
let updated_balance = sdk.get_bitcoin_balance();
println!("Updated balance: {} satoshis", updated_balance);

// Your wallet will also have claimed all pending transfers
let pending = sdk.query_pending_transfers().await?;
assert!(pending.is_empty(), "All transfers should have been claimed during sync");
```

---

### Request Leaves Swap (Advanced)

**This is an advanced method** that allows you to optimize your wallet's leaf structure by swapping your current leaves with the Spark Service Provider (SSP). This function is primarily used internally by the SDK when you need to transfer an amount that doesn't match any of your existing leaves.

For example, if you have a single leaf of 100,000 satoshis but need to send 80,000 satoshis, this function will swap with the SSP to get leaves totaling 100,000 satoshis but with denominations that include the 80,000 you need. The SSP typically provides leaves in power-of-2 denominations for optimal efficiency.

#### Parameters
- `target_amount: u64` - The amount (in satoshis) you want to have in a specific leaf after the swap

#### Response
Returns a `Result<String, SparkSdkError>`, which explicitly contains:
• A String representing the ID of the newly created leaf with the target amount
• This leaf ID can be used for future transfers

Internally, this method:
1. Locks all available Bitcoin leaves in your wallet
2. Prepares leaf key tweaks for each leaf
3. Creates a transfer to the SSP with all your available leaves
4. Uses cryptographic adaptor signatures for security
5. Requests new leaves from the SSP with your desired target amount
6. Verifies the cryptographic integrity of the returned leaves
7. Completes the swap process and claims the new leaves
8. Deletes your old leaves

#### Steps

1. Call `sdk.request_leaves_swap(target_amount)` with your desired amount
2. Spark handles the entire swap process with the SSP
3. Your wallet now has optimized leaves including one with your target amount

#### Example
```rust
// Let's say you have a single leaf of 100,000 satoshis but need to send 80,000
let target_amount = 80_000;

// Request a swap with the SSP to get optimized leaves
let new_leaf_id = sdk.request_leaves_swap(target_amount).await?;
println!("Created new leaf with ID: {}", new_leaf_id);

// Now you can transfer exactly 80,000 satoshis
let receiver_spark_address = PublicKey::from_str(
    "02782d7ba8764306bd324e23082f785f7c880b7202cb10c85a2cb96496aedcaba7"
).unwrap();
sdk.transfer(target_amount, &receiver_spark_address).await?;

// Your wallet balance should still total 100,000 satoshis, but in optimized denominations
let balance = sdk.get_bitcoin_balance();
assert_eq!(balance, 100_000);
```

---

### Pay Lightning Invoice

Use the `pay_lightning_invoice` method to pay a Lightning Network invoice using the Spark Service Provider (SSP) as an intermediary. Unlike traditional Lightning wallets, Spark doesn't directly connect to the Lightning Network. Instead, it uses a cooperative approach where:

1. You provide your leaves (UTXOs) to the SSP
2. The SSP makes the Lightning payment on your behalf
3. The transaction is secured using cryptographic techniques

#### Parameters
- `invoice: &String` - A BOLT11 Lightning invoice string that you want to pay

#### Response
Returns a `Result<String, SparkSdkError>`, which explicitly contains:
• A String representing the payment ID if successful
• This ID can be used to track the payment status

Internally, this method:
1. Parses and validates the Lightning invoice
2. Selects appropriate leaves to cover the invoice amount
3. Prepares cryptographic leaf tweaks for security
4. Executes a swap with the SSP (your leaves in exchange for the invoice payment)
5. The SSP processes the Lightning payment using their Lightning node
6. The leaf transfer is completed and your old leaves are removed

#### Steps

1. Call `sdk.pay_lightning_invoice(invoice)` with the Lightning invoice string
2. Spark handles the entire payment process with the SSP
3. If successful, you'll receive a payment ID

#### Example
```rust
// Get a Lightning invoice from somewhere (e.g., a merchant)
let invoice = "lnbc1500n1p3zty3app5wkf0hagkc4egr8rl88msr4c5lp0ygt6gvzna5hdg4tpna65pzqdq0vehk7cnpwga5xzmnwvycqzpgxqyz5vqsp5v9ym7xsyf0qxqwzlmwjl3g0g9q2tg977h70hcheske9xlgfsggls9qyyssqtghx3qqpwm9zl4m398nm40wj8ryaz8v7v4rrdvczypdpy7qtc6rdrkklm9uxlkmtp3jf29yhqjw2vwmlp82y5ctft94k23cwgqd9llgy".to_string();

// Pay the invoice
let payment_id = sdk.pay_lightning_invoice(&invoice).await?;
println!("Lightning payment initiated with ID: {}", payment_id);

// Your leaves have been transferred to the SSP, and the SSP has made the Lightning payment
```

---

### Create Lightning Invoice

Use the `create_lightning_invoice` method to generate a Lightning Network invoice that others can pay to you. When someone pays this invoice via Lightning, the funds will be received by the SSP and then transferred to your Spark wallet.

#### Parameters
- `amount_sats: u64` - The amount in satoshis that you want to receive
- `memo: Option<String>` - Optional description/memo for the invoice
- `expiry_seconds: Option<i32>` - Optional expiry time in seconds (defaults to 30 days if not specified)

#### Response
Returns a `Result<Bolt11Invoice, SparkSdkError>`, which explicitly contains:
• A `Bolt11Invoice` object representing the generated Lightning invoice
• This invoice can be shared with anyone who wants to pay you via Lightning

Internally, this method:
1. Generates a secure payment preimage and hash
2. Creates the invoice through the SSP
3. Distributes preimage shares to Spark operators using a threshold secret sharing scheme
4. Returns the formatted BOLT11 invoice

#### Steps

1. Call `sdk.create_lightning_invoice(amount, memo, expiry)` with your desired parameters
2. Spark generates the invoice and distributes the cryptographic material
3. Share the returned invoice string with the person who wants to pay you

#### Example
```rust
// Create an invoice for 50,000 satoshis
let amount_sats = 50_000;
let memo = Some("Payment for services".to_string());
let expiry = Some(3600 * 24); // 24 hours

// Generate the Lightning invoice
let invoice = sdk.create_lightning_invoice(amount_sats, memo, expiry).await?;

// Get the invoice string to share with the payer
let invoice_string = invoice.to_string();
println!("Lightning Invoice: {}", invoice_string);

// When someone pays this invoice via Lightning, the funds will automatically
// appear in your Spark wallet (after being processed by the SSP)
```

---

### Withdraw Funds (Cooperative Exit)

Use the `withdraw` method to transfer funds from your Spark wallet back to the Bitcoin blockchain through a cooperative process with the Spark Service Provider (SSP). This process, also known as a "cooperative exit," allows you to convert your Spark funds into regular on-chain Bitcoin.

#### Parameters
- `onchain_address: &Address` - The Bitcoin address where the funds should be sent
- `target_amount_sats: Option<u64>` - Optional amount in satoshis to withdraw. If not specified, attempts to withdraw all available funds in your wallet

#### Response
Returns a `Result<CoopExitResponse, SparkSdkError>`, which explicitly contains:
• A `CoopExitResponse` object with:
  - `request_id`: A `CoopExitRequestId` identifying this withdrawal request
  - `exit_txid`: The transaction ID of the exit transaction on the Bitcoin blockchain

Internally, this method:
1. Validates the withdrawal amount (ensuring it meets minimum requirements)
2. Selects appropriate leaves to cover the withdrawal amount
3. Generates leaf key tweaks for security
4. Initiates the cooperative exit with the SSP
5. Creates and signs refund transactions
6. Completes the cooperative exit process with the SSP
7. Returns the exit transaction details

#### Steps

1. Call `sdk.withdraw(&onchain_address, target_amount_sats)` with the Bitcoin address and optional amount
2. The examples use P2TR addresses, but you can also use other address types.
3. Spark handles the entire withdrawal process with the SSP
4. If successful, you'll receive a response with the request ID and exit transaction ID
5. The funds will be sent to your specified Bitcoin address in an on-chain transaction

#### Example
```rust
// Create a Bitcoin address to receive the withdrawn funds
let bitcoin_address = Address::from_str("bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t4")?;

// Option 1: Withdraw all available funds
let withdrawal_response = sdk.withdraw(&bitcoin_address, None).await?;
println!("Withdrawal initiated with request ID: {:?}", withdrawal_response.request_id);
println!("Exit transaction ID: {}", withdrawal_response.exit_txid);

// Option 2: Withdraw a specific amount (e.g., 50,000 satoshis)
let specific_amount = 50_000;
let withdrawal_response = sdk.withdraw(&bitcoin_address, Some(specific_amount)).await?;

// You can check the status of the Bitcoin transaction using the exit_txid
// with any Bitcoin block explorer or your Bitcoin wallet
```

#### Fee Estimation
Withdrawals incur a service fee charged by the SSP for facilitating the on-chain transaction. You can estimate this fee before initiating a withdrawal:

```rust
// Get the leaf IDs you want to withdraw
let leaf_ids = sdk.leaf_manager
    .get_available_bitcoin_leaves(None, SparkNodeStatus::Available)
    .iter()
    .map(|leaf| leaf.get_id().clone())
    .collect();

// Get the Bitcoin address as a string
let bitcoin_address_string = bitcoin_address.to_string();

// Estimate the withdrawal fee
let fee_estimate = sdk.get_cooperative_exit_fee_estimate(leaf_ids, bitcoin_address_string).await?;
println!("Estimated withdrawal fee: {} satoshis", fee_estimate.fees);

// Decide whether to proceed based on the fee
if fee_estimate.fees < 5000 { // Example threshold
    sdk.withdraw(&bitcoin_address, None).await?;
} else {
    println!("Fee too high ({}), withdrawal aborted", fee_estimate.fees);
}
```

#### Important Notes
- The minimum withdrawal amount is defined by `DEFAULT_WITHDRAWAL_AMOUNT` (typically 10,000 satoshis)
- If you attempt to withdraw less than the minimum amount, the function will return an error
- The cooperative exit process requires coordination with the SSP and may take some time to complete
- Once initiated, the withdrawal process cannot be easily reversed
- The withdrawal will appear as a regular Bitcoin transaction on the blockchain

---

### Get Lightning Send Fee Estimate

Use the `get_lightning_send_fee_estimate` method to estimate the fees associated with sending a Lightning payment through the Spark Service Provider (SSP).

#### Parameters
- `invoice: String` - The Lightning invoice you want to pay

#### Response
Returns a `Result<SparkFeeEstimate, SparkSdkError>`, which explicitly contains:
• A `SparkFeeEstimate` object with the estimated fees in satoshis

This helps you understand the cost of making a Lightning payment before you commit to it. The fee is a service fee charged by the SSP for facilitating the Lightning payment.

#### Steps

1. Call `sdk.get_lightning_send_fee_estimate(invoice)` with the invoice
2. Spark returns the estimated fees from the SSP

#### Example
```rust
// Get a Lightning invoice from somewhere
let invoice = "lnbc1500n1p3zty3app...".to_string();

// Get fee estimate before paying
let fee_estimate = sdk.get_lightning_send_fee_estimate(invoice.clone()).await?;
println!("Estimated fee: {} satoshis", fee_estimate.fees);

// Decide whether to proceed with the payment
if fee_estimate.fees < 100 {
    // Fee is acceptable, proceed with payment
    sdk.pay_lightning_invoice(&invoice).await?;
} else {
    println!("Fee too high, payment aborted");
}
```

---

### Get Lightning Receive Fee Estimate

Use the `get_lightning_receive_fee_estimate` method to estimate the fees associated with receiving a Lightning payment through the Spark Service Provider (SSP).

#### Parameters
- `amount: u64` - The amount in satoshis you want to receive

#### Response
Returns a `Result<SparkFeeEstimate, SparkSdkError>`, which explicitly contains:
• A `SparkFeeEstimate` object with the estimated fees in satoshis

This helps you understand how much will be deducted from the payment amount as fees. The fee is a service fee charged by the SSP for facilitating the Lightning payment reception.

#### Steps

1. Call `sdk.get_lightning_receive_fee_estimate(amount)` with the desired amount
2. Spark returns the estimated fees from the SSP

#### Example
```rust
// Amount you want to receive
let amount_sats = 50_000;

// Get fee estimate for receiving this amount
let fee_estimate = sdk.get_lightning_receive_fee_estimate(amount_sats).await?;
println!("Estimated receive fee: {} satoshis", fee_estimate.fees);

// Calculate the net amount you'll receive after fees
let net_amount = amount_sats - fee_estimate.fees;
println!("You'll receive {} satoshis after fees", net_amount);

// Create invoice if fees are acceptable
if fee_estimate.fees < amount_sats * 0.01 { // Less than 1% fee
    sdk.create_lightning_invoice(amount_sats, None, None).await?;
}
```

---

### Get Cooperative Exit Fee Estimate

Use the `get_cooperative_exit_fee_estimate` method to estimate the fees associated with withdrawing funds from Spark to an on-chain Bitcoin address through the Spark Service Provider (SSP).

#### Parameters
- `leaf_ids: Vec<String>` - The specific leaf IDs you want to withdraw
- `on_chain_address: String` - The Bitcoin address where you want to receive the funds

#### Response
Returns a `Result<SparkFeeEstimate, SparkSdkError>`, which explicitly contains:
• A `SparkFeeEstimate` object with the estimated fees in satoshis

This helps you understand the cost of withdrawing your funds back to the Bitcoin blockchain before initiating the withdrawal. The fee is a service fee charged by the SSP for facilitating the on-chain exit.

#### Steps

1. Call `sdk.get_cooperative_exit_fee_estimate(leaf_ids, on_chain_address)` with the leaf IDs and address
2. Spark returns the estimated fees from the SSP

#### Example
```rust
// Identify the leaves you want to withdraw
let leaf_ids = vec!["leaf_id_1".to_string(), "leaf_id_2".to_string()];

// Specify the Bitcoin address to receive funds
let onchain_address = "bc1q...".to_string();

// Get fee estimate before withdrawing
let fee_estimate = sdk.get_cooperative_exit_fee_estimate(leaf_ids.clone(), onchain_address.clone()).await?;
println!("Estimated withdrawal fee: {} satoshis", fee_estimate.fees);

// Decide whether to proceed with the withdrawal
if fee_estimate.fees < 1000 { // Example threshold
    // Fee is acceptable, proceed with withdrawal
    let bitcoin_address = Address::from_str(&onchain_address).unwrap();
    sdk.withdraw(&bitcoin_address, None).await?;
} else {
    println!("Fee too high, withdrawal aborted");
}
```

---

### Get Leaves Swap Fee Estimate

Use the `get_leaves_swap_fee_estimate` method to estimate the fees associated with optimizing your wallet's leaf structure by swapping your leaves with the Spark Service Provider (SSP).

#### Parameters
- `total_amount_sats: u64` - The total amount in satoshis that will be involved in the swap

#### Response
Returns a `Result<SparkFeeEstimate, SparkSdkError>`, which explicitly contains:
• A `SparkFeeEstimate` object with the estimated fees in satoshis

This helps you understand the cost of optimizing your leaf structure before initiating the swap. The fee is a service fee charged by the SSP for facilitating the leaves swap operation.

#### Steps

1. Call `sdk.get_leaves_swap_fee_estimate(total_amount_sats)` with the total amount
2. Spark returns the estimated fees from the SSP

#### Example
```rust
// Total amount to be swapped
let total_amount_sats = 100_000;

// Get fee estimate before swapping leaves
let fee_estimate = sdk.get_leaves_swap_fee_estimate(total_amount_sats).await?;
println!("Estimated swap fee: {} satoshis", fee_estimate.fees);

// Decide whether to proceed with the swap
if fee_estimate.fees < total_amount_sats * 0.005 { // Less than 0.5% fee
    // Fee is acceptable, proceed with swap
    let target_amount = 80_000; // The specific denomination you need
    sdk.request_leaves_swap(target_amount).await?;
} else {
    println!("Fee too high, swap aborted");
}
```

---

## Signer Documentation

The signing system is a critical component of the Spark wallet, handling all cryptographic operations including key derivation, transaction signing, and threshold signatures via the FROST protocol. This documentation is intended for developers who need to implement custom signers or understand the internal signing architecture.

### Signer Architecture Overview

The signer in Spark follows a trait-based architecture, where various cryptographic capabilities are separated into distinct traits that together form a complete signing system:

```
SparkSigner
├── SparkSignerDerivationPath - Key derivation path handling
├── SparkSignerEcdsa - ECDSA signature operations
├── SparkSignerEcies - Encryption/decryption of secret keys
├── SparkSignerFrost - FROST nonce and commitment management
├── SparkSignerFrostSigning - FROST threshold signature operations
├── SparkSignerSecp256k1 - Secp256k1 keypair operations
└── SparkSignerShamir - Verifiable secret sharing operations
```

The SDK includes a `DefaultSigner` implementation that manages keys in memory. While this implementation works well for most use cases, you may implement your own signer for specialized needs such as remote signing or integration with custom key management systems.

### Security Model

The Spark security model requires that both the user and Spark Operators participate in signing Bitcoin transactions:

1. The user always maintains control of their signing keys
2. Spark Operators use threshold signing (FROST) for their portion of signatures
3. The total signature that appears on the blockchain is a single signature, composed of both the user's signature and the aggregated operator signatures
4. The user always initiates the signing process and receives signature shares from operators first

This ensures that neither the user nor the operators alone can spend funds, providing a secure multi-party computation model for Bitcoin transactions.

### Implementing a Custom Signer

To create a custom signer, you must implement the `SparkSigner` trait and all its associated sub-traits. The implementation details will depend on your specific requirements, but there are some important considerations:

#### Key Derivation Path

The derivation path scheme is **critical** for compatibility with other Spark wallets. The scheme follows:

```
m/8797555'/account'/key_type'/[leaf_index']
```

Where:
- `8797555'` is the purpose value (derived from "spark")
- `account'` is the account index (hardened, starting from 0)
- `key_type'` is the key type:
  - `0'` for identity key
  - `1'` for base signing key
  - `2'` for temporary signing key
- `leaf_index'` is a hash-derived index for leaf-specific keys (optional)

All indices use hardened derivation for enhanced security.

#### FROST Threshold Signing

The FROST implementation in Spark is customized to support Taproot tweaking. The process generally follows these steps:

1. Generate nonce pairs and commitments (`SparkSignerFrost`)
2. Create signing jobs with the appropriate messages and participant information
3. Perform signing operations to generate signature shares (`SparkSignerFrostSigning`)
4. Aggregate signature shares from all participants into a complete signature

Your custom signer will need to properly implement these steps while maintaining the security properties of the FROST protocol.

#### Key Management Considerations

When implementing a custom signer, carefully consider:

- **Private Key Storage**: Determine how to securely store private keys (default implementation keeps them in memory)
- **Deterministic Keys**: Signing keys are deterministic given the seed and leaf ID
- **Ephemeral Keys**: One-time keys used for commitments or during transfers 
- **Key Recovery**: Consider how keys can be recovered or backed up

### Trait Details

#### SparkSignerDerivationPath

Handles the derivation of keys according to Spark's custom path scheme.

```rust
fn get_deposit_signing_key(&self, network: Network) -> Result<PublicKey, SparkSdkError>;
fn derive_spark_key(leaf_id: Option<String>, account: u32, seed_bytes: &[u8], 
                   key_type: SparkKeyType, network: Network) -> Result<SecretKey, SparkSdkError>;
fn get_identity_derivation_path(account_index: u32) -> Result<SparkDerivationPath, SparkSdkError>;
```

#### SparkSignerEcdsa

Provides ECDSA signature capabilities for identity verification and other non-threshold operations.

```rust
fn sign_message_ecdsa_with_identity_key<T: AsRef<[u8]>>(&self, message: T, 
                                                      apply_hashing: bool,
                                                      network: Network) -> Result<Signature, SparkSdkError>;
fn sign_message_ecdsa_with_key<T: AsRef<[u8]>>(&self, message: T, 
                                             public_key_for_signing_key: &PublicKey,
                                             apply_hashing: bool) -> Result<Signature, SparkSdkError>;
```

#### SparkSignerEcies

Handles encryption and decryption of secret keys for secure exchange between parties.

```rust
fn encrypt_secret_key_with_ecies(&self, receiver_public_key: &PublicKey,
                                pubkey_for_sk_to_encrypt: &PublicKey) -> Result<Vec<u8>, SparkSdkError>;
fn decrypt_secret_key_with_ecies<T>(&self, ciphertext: T,
                                  network: Network) -> Result<SecretKey, SparkSdkError>
where T: AsRef<[u8]>;
```

#### SparkSignerFrost

Manages FROST nonce pairs and commitments for threshold signing.

```rust
fn new_frost_signing_noncepair(&self) -> Result<SigningCommitments, SparkSdkError>;
fn sensitive_expose_nonces_from_commitments<T>(&self, signing_commitments: &T) 
                                             -> Result<SigningNonces, SparkSdkError>
where T: AsRef<[u8]>;
fn sensitive_create_if_not_found_expose_nonces_from_commitments(&self, signing_commitments: Option<&[u8]>) 
                                                              -> Result<SigningNonces, SparkSdkError>;
```

#### SparkSignerFrostSigning

Performs the actual FROST threshold signing operations, including signing and aggregation.

```rust
fn sign_frost(&self, signing_jobs: Vec<FrostSigningJob>) -> Result<SignFrostResponse, SparkSdkError>;
fn aggregate_frost(&self, request: AggregateFrostRequest) -> Result<AggregateFrostResponse, SparkSdkError>;
// Additional specialized signing methods...
```

#### SparkSignerSecp256k1

Manages secp256k1 keypairs for various wallet operations.

```rust
fn get_identity_public_key(&self, account_index: u32, network: Network) -> Result<PublicKey, SparkSdkError>;
fn new_secp256k1_keypair(&self, leaf_id: String, key_type: SparkKeyType,
                        account_index: u32, network: Network) -> Result<PublicKey, SparkSdkError>;
fn insert_secp256k1_keypair_from_secret_key(&self, secret_key: &SecretKey) -> Result<PublicKey, SparkSdkError>;
// Additional keypair management methods...
```

#### SparkSignerShamir

Provides verifiable secret sharing operations for secure key distribution.

```rust
fn split_with_verifiable_secret_sharing(&self, message: Vec<u8>, threshold: usize,
                                      num_shares: usize) -> Result<Vec<VerifiableSecretShare>, SparkSdkError>;
fn split_from_public_key_with_verifiable_secret_sharing(&self, public_key: &PublicKey,
                                                      threshold: usize, 
                                                      num_shares: usize) -> Result<Vec<VerifiableSecretShare>, SparkSdkError>;
```

### Example: Using the Default Signer

For most applications, the provided `DefaultSigner` implementation will be sufficient:

```rust
// Create a DefaultSigner from a mnemonic
let mnemonic = "abandon ability able about above absent absorb abstract absurd abuse access accident";
let network = SparkNetwork::Regtest;
let signer = DefaultSigner::from_mnemonic(mnemonic, network.clone()).await?;

// Initialize the SDK with the signer
let sdk = SparkSdk::new(network, signer).await?;
```

### Note on Current Implementation

This is an early version of the Spark signing system. The architecture may undergo optimizations and refinements in future releases while maintaining backward compatibility where possible. The current implementation prioritizes security and correctness over performance optimization.

For most users, the provided `DefaultSigner` will be sufficient. Custom signer implementations should be undertaken only when specific requirements necessitate it, such as integration with remote signing services or hardware security modules.