spark_rust/wallet/config/
spark.rs

1use crate::constants::spark::connection::DEFAULT_COORDINATOR_INDEX;
2use crate::constants::spark::LIGHTSPARK_SSP_ENDPOINT;
3use crate::constants::spark::LIGHTSPARK_SSP_IDENTITY_PUBLIC_KEY;
4use crate::error::CryptoError;
5use crate::error::{NetworkError, SparkSdkError, ValidationError};
6use crate::rpc::connections::connection::SparkConnection;
7use crate::rpc::SparkRpcClient;
8use crate::SparkNetwork;
9use bitcoin::secp256k1::PublicKey;
10use frost_secp256k1_tr_unofficial::Identifier;
11use hashbrown::HashMap;
12use parking_lot::RwLock;
13use spark_cryptography::signing::identifier_to_hex_string;
14use spark_protos::spark::spark_service_client::SparkServiceClient;
15use std::str::FromStr;
16use std::sync::Arc;
17use tonic::transport::Channel;
18use tonic::transport::Uri;
19
20use crate::constants::spark::connection::SPARK_REGTEST_OPERATORS;
21use crate::constants::spark::SPARK_REGTEST_SIGNING_THRESHOLD;
22
23use crate::rpc::traits::SparkRpcConnection;
24
25/// Configuration for the Spark wallet
26#[derive(Clone)]
27pub(crate) struct SparkConfig {
28    /// Network to use for the wallet (mainnet, testnet, etc.)
29    pub(crate) network: SparkNetwork,
30
31    /// Map of signing operator identifiers to their configurations
32    pub(crate) spark_operators: Vec<SparkOperator>,
33
34    /// SSP endpoint
35    pub(crate) ssp_endpoint: String,
36
37    /// SSP identity public key
38    pub(crate) ssp_identity_public_key: PublicKey,
39
40    /// Index of the coordinator signing operator
41    pub(crate) coordinator_index: u32,
42
43    /// The threshold constant used for signing
44    pub(crate) threshold: u32,
45
46    /// gRPC client used for Spark Operator RPC calls
47    spark_clients: Arc<RwLock<HashMap<u32, SparkRpcClient>>>,
48}
49
50/// Configuration for a signing operator
51#[derive(Debug, Clone)]
52pub(crate) struct SparkOperator {
53    /// The index of the signing operator.
54    pub(crate) id: u32,
55
56    /// Identifier is the FROST identifier of the signing operator, which will be index + 1 in 32 bytes big endian hex string.
57    /// Used as shamir secret share identifier in DKG key shares.
58    pub(crate) frost_identifier: Identifier,
59
60    /// Address of the signing operator
61    pub(crate) address: Uri,
62
63    /// Public key of the signing operator
64    pub(crate) identity_public_key: PublicKey,
65}
66
67impl SparkOperator {
68    // Parse an operator specified as a string (in the format <pubkey>@<address>) into a
69    // [`SparkOperator`].
70    pub(crate) fn parse(id: u32, op: &str) -> Result<Self, SparkSdkError> {
71        let parts: Vec<&str> = op.split('@').collect();
72        if parts.len() != 2 {
73            return Err(SparkSdkError::from(ValidationError::InvalidArgument {
74                argument: format!(
75                    "Invalid operator string format {} (should be <pubkey>@<address>)",
76                    op
77                ),
78            }));
79        }
80
81        let identity_public_key = PublicKey::from_str(parts[0]).map_err(|err| {
82            SparkSdkError::from(ValidationError::InvalidArgument {
83                argument: format!("Invalid public key for operator {}: {}", parts[0], err),
84            })
85        })?;
86
87        let address = Uri::from_str(parts[1])
88            .map_err(|err| SparkSdkError::from(NetworkError::InvalidUri(err)))?;
89
90        let frost_identifier = Identifier::try_from(id as u16 + 1).map_err(|err| {
91            SparkSdkError::from(ValidationError::InvalidArgument {
92                argument: format!(
93                    "Invalid frost identifier for operator {}: {}",
94                    parts[0], err
95                ),
96            })
97        })?;
98
99        Ok(SparkOperator {
100            id,
101            frost_identifier,
102            address,
103            identity_public_key,
104        })
105    }
106
107    pub(crate) fn frost_identifier_str(&self) -> String {
108        identifier_to_hex_string(&self.frost_identifier)
109    }
110}
111
112impl SparkConfig {
113    /// Try and get operators from environment variables. Each operator should be in a separate
114    /// environment variable in the format `SPARK_OPERATOR_<index>`.
115    ///
116    /// Returns `None` if operators are not specified as environmental variables. Otherwise,
117    /// returns Ok with the parsed operators, or an error if parsing operators fails.
118    fn operators_from_env() -> Option<Result<Vec<SparkOperator>, SparkSdkError>> {
119        // If the first operator is not set, assume the rest aren't set either.
120        if std::env::var("SPARK_OPERATOR_0").is_err() {
121            return None;
122        }
123
124        let mut operators = vec![];
125        for i in 0..10 {
126            let variable = format!("SPARK_OPERATOR_{}", i);
127            let operator = std::env::var(&variable);
128            let operator = match operator {
129                Ok(operator) => operator,
130                Err(_) => break,
131            };
132
133            match SparkOperator::parse(i as u32, &operator) {
134                Ok(operator) => operators.push(operator),
135                Err(err) => {
136                    return Some(Err(SparkSdkError::from(ValidationError::InvalidArgument {
137                        argument: format!("Unable to parse operator {}: {}", variable, err),
138                    })))
139                }
140            }
141        }
142
143        Some(Ok(operators))
144    }
145
146    /// Try to get the operators from the environment first. Otherwise, parse the defaults from
147    /// [`SPARK_REGTEST_OPERATORS`].
148    fn operators() -> Result<Vec<SparkOperator>, SparkSdkError> {
149        if let Some(operators) = Self::operators_from_env() {
150            return operators;
151        }
152
153        let mut spark_operators = vec![];
154        for (i, operator) in SPARK_REGTEST_OPERATORS.iter().enumerate() {
155            // TODO: Add environment
156            let address = Uri::from_str(operator.0)
157                .map_err(|err| SparkSdkError::from(NetworkError::InvalidUri(err)))?;
158
159            let identity_public_key = PublicKey::from_str(operator.1).map_err(|err| {
160                SparkSdkError::from(ValidationError::InvalidArgument {
161                    argument: format!("Invalid public key for operator {}: {}", operator.1, err),
162                })
163            })?;
164
165            let frost_identifier = Identifier::try_from(i as u16 + 1).map_err(|err| {
166                SparkSdkError::from(ValidationError::InvalidArgument {
167                    argument: format!(
168                        "Invalid frost identifier for operator {}: {}",
169                        operator.1, err
170                    ),
171                })
172            })?;
173
174            spark_operators.push(SparkOperator {
175                id: i as u32,
176                frost_identifier,
177                address,
178                identity_public_key,
179            });
180        }
181
182        Ok(spark_operators)
183    }
184
185    /// Creates a new wallet configuration with the specified network.
186    ///
187    /// This function:
188    /// 1. Creates a vector of 5 Spark operators with their configurations
189    /// 2. Sets up gRPC connections to each operator
190    /// 3. Initializes the wallet configuration with default values
191    ///
192    /// # Arguments
193    ///
194    /// * `network` - The Spark network to use (mainnet, testnet, regtest)
195    ///
196    /// # Returns
197    ///
198    /// Returns a Result containing either:
199    /// * The initialized SparkConfig struct on success
200    /// * A [`SparkSdkError`] on failure (e.g. connection errors, invalid URLs)
201    ///
202    /// # Errors
203    ///
204    /// This function will return an error if:
205    /// * Failed to establish gRPC connections to operators
206    /// * Failed to parse operator URLs
207    /// * Failed to decode operator public keys
208    pub async fn new(network: SparkNetwork) -> Result<Self, SparkSdkError> {
209        // set the threshold
210        let threshold = SPARK_REGTEST_SIGNING_THRESHOLD;
211
212        // set the default coordinator index
213        let coordinator_index = DEFAULT_COORDINATOR_INDEX;
214
215        // set the operators
216        let spark_operators = Self::operators()?;
217
218        // set the connection pool for operators
219        let mut spark_clients = HashMap::new();
220        for operator in &spark_operators {
221            // establish the secure connection
222            // since this uses rustls, self-signed certificates will fail
223            let spark_rpc_client =
224                SparkConnection::establish_connection(operator.address.clone()).await?;
225            spark_clients.insert(operator.id, spark_rpc_client);
226        }
227
228        // Create the wallet configuration
229        let ssp_identity_public_key = PublicKey::from_str(LIGHTSPARK_SSP_IDENTITY_PUBLIC_KEY)
230            .map_err(|err| SparkSdkError::from(CryptoError::Secp256k1(err)))?;
231
232        #[cfg(feature = "telemetry")]
233        tracing::info!(
234            ssp_identity_public_key = ssp_identity_public_key.to_string(),
235            "ssp public key"
236        );
237        let wallet_config = Self {
238            network,
239            spark_operators,
240            coordinator_index,
241            threshold,
242            spark_clients: Arc::new(RwLock::new(spark_clients)),
243            ssp_endpoint: LIGHTSPARK_SSP_ENDPOINT.to_string(),
244            ssp_identity_public_key,
245        };
246
247        Ok(wallet_config)
248    }
249
250    /// Gets a connection to a Spark operator service.
251    ///
252    /// This function manages connections to Spark operators, creating new connections if needed
253    /// and reusing existing ones. It handles both connecting to a specific operator or defaulting
254    /// to the coordinator.
255    ///
256    /// # Arguments
257    ///
258    /// * `operator_id` - Optional ID of the specific operator to connect to. If None, connects to
259    ///                   the default coordinator.
260    ///
261    /// # Returns
262    ///
263    /// * [`Result<SparkServiceClient<Channel>, SparkSdkError>`] - A client for the Spark service on success,
264    ///    or an error if the connection fails
265    ///
266    /// # Errors
267    ///
268    /// Returns [`SparkSdkError::InvalidArgument`] if:
269    /// * The operator_id is out of bounds for the available operators
270    ///
271    /// May also return errors from:
272    /// * Channel creation/connection
273    /// * URI parsing
274    #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
275    pub(crate) async fn get_spark_connection(
276        &self,
277        operator_id: Option<u32>,
278    ) -> Result<SparkServiceClient<Channel>, SparkSdkError> {
279        let operator_id = operator_id.unwrap_or(self.coordinator_index);
280
281        // if doesn't exist, create the connection for the operator
282        if !self.spark_clients.read().contains_key(&operator_id) {
283            let spark_operators = self.spark_operators.clone();
284            if operator_id >= spark_operators.len() as u32 {
285                drop(spark_operators);
286                return Err(SparkSdkError::from(ValidationError::InvalidArgument {
287                    argument: format!("Operator index {} is out of bounds", operator_id),
288                }));
289            }
290
291            let uri = spark_operators[operator_id as usize].address.clone();
292            let spark_rpc_instance = SparkConnection::establish_connection(uri).await?;
293            self.spark_clients
294                .write()
295                .insert(operator_id, spark_rpc_instance);
296        }
297
298        // get the connection
299        let client = self.spark_clients.read().get(&operator_id).unwrap().clone();
300        let spark_client = client.get_new_spark_service_connection()?;
301
302        Ok(spark_client)
303    }
304}
305
306#[cfg(test)]
307mod test {
308    use super::SparkOperator;
309
310    #[test]
311    fn test_parse_spark_operator() {
312        let operator = SparkOperator::parse(
313            0,
314            "0322ca18fc489ae25418a0e768273c2c61cabb823edfb14feb891e9bec62016510@http://localhost:8535",
315        )
316        .unwrap();
317
318        assert_eq!(operator.id, 0);
319        assert_eq!(
320            operator.frost_identifier_str(),
321            "0000000000000000000000000000000000000000000000000000000000000001"
322        );
323        assert_eq!(operator.address.to_string(), "http://localhost:8535/");
324        assert_eq!(
325            operator.identity_public_key.to_string(),
326            "0322ca18fc489ae25418a0e768273c2c61cabb823edfb14feb891e9bec62016510"
327        );
328    }
329}