spark_rust/wallet/handlers/init.rs
1use std::sync::Arc;
2
3use bitcoin::secp256k1::PublicKey;
4
5use crate::{
6 error::SparkSdkError,
7 signer::traits::SparkSigner,
8 wallet::{
9 config::WalletConfig,
10 internal_handlers::traits::{
11 authenticate::AuthenticateInternalHandlers, leaves::LeavesInternalHandlers,
12 },
13 leaf_manager::LeafManager,
14 },
15 SparkNetwork, SparkSdk,
16};
17
18impl<S: SparkSigner + Send + Sync + Clone + 'static> SparkSdk<S> {
19 /// Creates a new instance of the Spark SDK.
20 ///
21 /// This is the main entry point for interacting with the Spark protocol. It initializes the SDK with
22 /// the provided network configuration, signer implementation, and optional data storage path.
23 ///
24 /// # Arguments
25 ///
26 /// * `network` - The Spark network to connect to (e.g. Regtest or Mainnet)
27 /// * `signer` - Implementation of the SparkSigner trait wrapped in Arc for thread-safe access
28 ///
29 /// # Returns
30 ///
31 /// Returns a Result containing either:
32 /// * The initialized SparkSdk instance
33 /// * A SparkSdkError if initialization fails
34 ///
35 /// # Examples
36 ///
37 /// ```
38 /// use spark_rust::{
39 /// error::SparkSdkError,
40 /// signer::default_signer::DefaultSigner,
41 /// signer::traits::SparkSigner,
42 /// SparkNetwork, SparkSdk,
43 /// };
44 ///
45 /// async fn init_sdk() {
46 /// let mnemonic = "abandon ability able about above absent absorb abstract absurd abuse access accident";
47 /// let network = SparkNetwork::Regtest;
48 /// let signer = DefaultSigner::from_mnemonic(mnemonic, network.clone()).await.unwrap();
49 /// let sdk = SparkSdk::new(
50 /// network,
51 /// signer
52 /// ).await.unwrap();
53 /// }
54 /// ```
55 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all, fields(network = ?network)))]
56 pub async fn new(network: SparkNetwork, signer: Arc<S>) -> Result<Self, SparkSdkError> {
57 let config = WalletConfig::new(network).await?;
58 let leaf_manager = Arc::new(LeafManager::new());
59 let session = Arc::new(parking_lot::RwLock::new(Vec::new()));
60
61 let sdk = Self {
62 config,
63 signer: signer.clone(),
64 leaf_manager,
65 session,
66 };
67
68 let sessions = sdk.authenticate().await?;
69
70 {
71 let mut write = sdk.session.write();
72 *write = sessions;
73 }
74
75 let sdk_clone = sdk.clone();
76
77 // refresh BTC leaves
78 sdk_clone.sync_leaves().await?;
79
80 // TODO: refresh tokens. This is only needed if the authn server will not refresh tokens per request.
81
82 // TODO: set optional param to claim bg tasks in the background
83
84 Ok(sdk)
85 }
86
87 /// Returns the Spark address of the wallet, which is the identity public key.
88 ///
89 /// The Spark address is derived from the identity public key of the wallet.
90 /// This key is generated when the wallet is first created and remains constant throughout the
91 /// wallet's lifetime.
92 ///
93 /// The Spark address serves several purposes:
94 /// - Authenticates the wallet with Spark operators during API calls
95 /// - Used in deposit address generation to prove ownership
96 /// - Required for validating operator signatures
97 /// - Helps prevent unauthorized access to wallet funds
98 ///
99 /// # Returns
100 /// A byte slice containing the 33-byte compressed secp256k1 public key in SEC format.
101 /// The first byte is either 0x02 or 0x03 (the parity), followed by the 32-byte X coordinate.
102 ///
103 /// # Examples
104 /// ```
105 /// # use spark_rust::{SparkSdk, SparkNetwork, signer::default_signer::DefaultSigner, signer::traits::SparkSigner};
106 ///
107 /// # async fn example() {
108 /// let network = SparkNetwork::Regtest;
109 /// let mnemonic = "abandon ability able about above absent absorb abstract absurd abuse access accident";
110 /// let signer = DefaultSigner::from_mnemonic(mnemonic, network.clone()).await.unwrap();
111 /// let sdk = SparkSdk::new(network, signer).await.unwrap();
112 ///
113 /// // Spark address is the identity public key of the user. This is derived using the derivation path (TODO: add docs)
114 /// let spark_address = sdk.get_spark_address().unwrap();
115 ///
116 /// // Currently, a user's Spark address is their public key.
117 /// assert_eq!(spark_address.serialize().len(), 33);
118 /// # }
119 /// ```
120 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
121 pub fn get_spark_address(&self) -> Result<PublicKey, SparkSdkError> {
122 let account_index = 0;
123 let network = self.config.spark_config.network.to_bitcoin_network();
124 let pubkey = self
125 .signer
126 .get_identity_public_key(account_index, network)?;
127
128 Ok(pubkey)
129 }
130
131 /// Returns the Bitcoin network that this wallet is connected to.
132 ///
133 /// The network determines which Spark operators the wallet communicates with and which Bitcoin network
134 /// (mainnet or regtest) is used for transactions.
135 ///
136 /// # Network Types
137 /// - [`SparkNetwork::Mainnet`] - Production Bitcoin mainnet environment
138 /// - [`SparkNetwork::Regtest`] - Testing environment using Lightspark's regtest network
139 ///
140 /// The network is set when creating the wallet and cannot be changed after initialization.
141 /// All transactions and addresses will be created for the configured network.
142 ///
143 /// # Returns
144 /// Returns a [`SparkNetwork`] enum indicating whether this is a mainnet or regtest wallet.
145 ///
146 /// # Examples
147 /// ```
148 /// # use spark_rust::{SparkSdk, SparkNetwork, signer::default_signer::DefaultSigner, signer::traits::SparkSigner};
149 /// # async fn example() {
150 /// let mnemonic = "abandon ability able about above absent absorb abstract absurd abuse access accident";
151 /// let network = SparkNetwork::Regtest;
152 ///
153 /// // Create a DefaultSigner that implements SparkSigner
154 /// let signer = DefaultSigner::from_mnemonic(mnemonic, network.clone()).await.unwrap();
155 /// let sdk = SparkSdk::new(network, signer).await.unwrap();
156 ///
157 /// assert_eq!(sdk.get_network(), SparkNetwork::Regtest);
158 ///
159 /// # }
160 /// ```
161 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
162 pub fn get_network(&self) -> SparkNetwork {
163 self.config.spark_config.network
164 }
165}