spark_rust/wallet/config/
spark.rs1use crate::constants::spark::connection::DEFAULT_COORDINATOR_INDEX;
2use crate::constants::spark::LIGHTSPARK_SSP_ENDPOINT;
3use crate::constants::spark::LIGHTSPARK_SSP_IDENTITY_PUBLIC_KEY;
4use crate::error::CryptoError;
5use crate::error::{NetworkError, SparkSdkError, ValidationError};
6use crate::rpc::connections::connection::SparkConnection;
7use crate::rpc::SparkRpcClient;
8use crate::SparkNetwork;
9use bitcoin::secp256k1::PublicKey;
10use frost_secp256k1_tr_unofficial::Identifier;
11use hashbrown::HashMap;
12use parking_lot::RwLock;
13use spark_cryptography::signing::identifier_to_hex_string;
14use spark_protos::spark::spark_service_client::SparkServiceClient;
15use std::str::FromStr;
16use std::sync::Arc;
17use tonic::transport::Channel;
18use tonic::transport::Uri;
19
20use crate::constants::spark::connection::SPARK_REGTEST_OPERATORS;
21use crate::constants::spark::SPARK_REGTEST_SIGNING_THRESHOLD;
22
23use crate::rpc::traits::SparkRpcConnection;
24
25#[derive(Clone)]
27pub(crate) struct SparkConfig {
28 pub(crate) network: SparkNetwork,
30
31 pub(crate) spark_operators: Vec<SparkOperator>,
33
34 pub(crate) ssp_endpoint: String,
36
37 pub(crate) ssp_identity_public_key: PublicKey,
39
40 pub(crate) coordinator_index: u32,
42
43 pub(crate) threshold: u32,
45
46 spark_clients: Arc<RwLock<HashMap<u32, SparkRpcClient>>>,
48}
49
50#[derive(Debug, Clone)]
52pub(crate) struct SparkOperator {
53 pub(crate) id: u32,
55
56 pub(crate) frost_identifier: Identifier,
59
60 pub(crate) address: Uri,
62
63 pub(crate) identity_public_key: PublicKey,
65}
66
67impl SparkOperator {
68 pub(crate) fn parse(id: u32, op: &str) -> Result<Self, SparkSdkError> {
71 let parts: Vec<&str> = op.split('@').collect();
72 if parts.len() != 2 {
73 return Err(SparkSdkError::from(ValidationError::InvalidArgument {
74 argument: format!(
75 "Invalid operator string format {} (should be <pubkey>@<address>)",
76 op
77 ),
78 }));
79 }
80
81 let identity_public_key = PublicKey::from_str(parts[0]).map_err(|err| {
82 SparkSdkError::from(ValidationError::InvalidArgument {
83 argument: format!("Invalid public key for operator {}: {}", parts[0], err),
84 })
85 })?;
86
87 let address = Uri::from_str(parts[1])
88 .map_err(|err| SparkSdkError::from(NetworkError::InvalidUri(err)))?;
89
90 let frost_identifier = Identifier::try_from(id as u16 + 1).map_err(|err| {
91 SparkSdkError::from(ValidationError::InvalidArgument {
92 argument: format!(
93 "Invalid frost identifier for operator {}: {}",
94 parts[0], err
95 ),
96 })
97 })?;
98
99 Ok(SparkOperator {
100 id,
101 frost_identifier,
102 address,
103 identity_public_key,
104 })
105 }
106
107 pub(crate) fn frost_identifier_str(&self) -> String {
108 identifier_to_hex_string(&self.frost_identifier)
109 }
110}
111
112impl SparkConfig {
113 fn operators_from_env() -> Option<Result<Vec<SparkOperator>, SparkSdkError>> {
119 if std::env::var("SPARK_OPERATOR_0").is_err() {
121 return None;
122 }
123
124 let mut operators = vec![];
125 for i in 0..10 {
126 let variable = format!("SPARK_OPERATOR_{}", i);
127 let operator = std::env::var(&variable);
128 let operator = match operator {
129 Ok(operator) => operator,
130 Err(_) => break,
131 };
132
133 match SparkOperator::parse(i as u32, &operator) {
134 Ok(operator) => operators.push(operator),
135 Err(err) => {
136 return Some(Err(SparkSdkError::from(ValidationError::InvalidArgument {
137 argument: format!("Unable to parse operator {}: {}", variable, err),
138 })))
139 }
140 }
141 }
142
143 Some(Ok(operators))
144 }
145
146 fn operators() -> Result<Vec<SparkOperator>, SparkSdkError> {
149 if let Some(operators) = Self::operators_from_env() {
150 return operators;
151 }
152
153 let mut spark_operators = vec![];
154 for (i, operator) in SPARK_REGTEST_OPERATORS.iter().enumerate() {
155 let address = Uri::from_str(operator.0)
157 .map_err(|err| SparkSdkError::from(NetworkError::InvalidUri(err)))?;
158
159 let identity_public_key = PublicKey::from_str(operator.1).map_err(|err| {
160 SparkSdkError::from(ValidationError::InvalidArgument {
161 argument: format!("Invalid public key for operator {}: {}", operator.1, err),
162 })
163 })?;
164
165 let frost_identifier = Identifier::try_from(i as u16 + 1).map_err(|err| {
166 SparkSdkError::from(ValidationError::InvalidArgument {
167 argument: format!(
168 "Invalid frost identifier for operator {}: {}",
169 operator.1, err
170 ),
171 })
172 })?;
173
174 spark_operators.push(SparkOperator {
175 id: i as u32,
176 frost_identifier,
177 address,
178 identity_public_key,
179 });
180 }
181
182 Ok(spark_operators)
183 }
184
185 pub async fn new(network: SparkNetwork) -> Result<Self, SparkSdkError> {
209 let threshold = SPARK_REGTEST_SIGNING_THRESHOLD;
211
212 let coordinator_index = DEFAULT_COORDINATOR_INDEX;
214
215 let spark_operators = Self::operators()?;
217
218 let mut spark_clients = HashMap::new();
220 for operator in &spark_operators {
221 let spark_rpc_client =
224 SparkConnection::establish_connection(operator.address.clone()).await?;
225 spark_clients.insert(operator.id, spark_rpc_client);
226 }
227
228 let ssp_identity_public_key = PublicKey::from_str(LIGHTSPARK_SSP_IDENTITY_PUBLIC_KEY)
230 .map_err(|err| SparkSdkError::from(CryptoError::Secp256k1(err)))?;
231
232 #[cfg(feature = "telemetry")]
233 tracing::info!(
234 ssp_identity_public_key = ssp_identity_public_key.to_string(),
235 "ssp public key"
236 );
237 let wallet_config = Self {
238 network,
239 spark_operators,
240 coordinator_index,
241 threshold,
242 spark_clients: Arc::new(RwLock::new(spark_clients)),
243 ssp_endpoint: LIGHTSPARK_SSP_ENDPOINT.to_string(),
244 ssp_identity_public_key,
245 };
246
247 Ok(wallet_config)
248 }
249
250 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
275 pub(crate) async fn get_spark_connection(
276 &self,
277 operator_id: Option<u32>,
278 ) -> Result<SparkServiceClient<Channel>, SparkSdkError> {
279 let operator_id = operator_id.unwrap_or(self.coordinator_index);
280
281 if !self.spark_clients.read().contains_key(&operator_id) {
283 let spark_operators = self.spark_operators.clone();
284 if operator_id >= spark_operators.len() as u32 {
285 drop(spark_operators);
286 return Err(SparkSdkError::from(ValidationError::InvalidArgument {
287 argument: format!("Operator index {} is out of bounds", operator_id),
288 }));
289 }
290
291 let uri = spark_operators[operator_id as usize].address.clone();
292 let spark_rpc_instance = SparkConnection::establish_connection(uri).await?;
293 self.spark_clients
294 .write()
295 .insert(operator_id, spark_rpc_instance);
296 }
297
298 let client = self.spark_clients.read().get(&operator_id).unwrap().clone();
300 let spark_client = client.get_new_spark_service_connection()?;
301
302 Ok(spark_client)
303 }
304}
305
306#[cfg(test)]
307mod test {
308 use super::SparkOperator;
309
310 #[test]
311 fn test_parse_spark_operator() {
312 let operator = SparkOperator::parse(
313 0,
314 "0322ca18fc489ae25418a0e768273c2c61cabb823edfb14feb891e9bec62016510@http://localhost:8535",
315 )
316 .unwrap();
317
318 assert_eq!(operator.id, 0);
319 assert_eq!(
320 operator.frost_identifier_str(),
321 "0000000000000000000000000000000000000000000000000000000000000001"
322 );
323 assert_eq!(operator.address.to_string(), "http://localhost:8535/");
324 assert_eq!(
325 operator.identity_public_key.to_string(),
326 "0322ca18fc489ae25418a0e768273c2c61cabb823edfb14feb891e9bec62016510"
327 );
328 }
329}