1use super::traits::derivation_path::SparkDerivationPath;
2use super::traits::derivation_path::SparkKeyType;
3use super::traits::derivation_path::SparkSignerDerivationPath;
4use super::traits::ecdsa::SparkSignerEcdsa;
5use super::traits::ecies::SparkSignerEcies;
6use super::traits::frost::SparkSignerFrost;
7use super::traits::frost_signing::SparkSignerFrostSigning;
8use super::traits::secp256k1::SparkSignerSecp256k1;
9use super::traits::shamir::SparkSignerShamir;
10use super::traits::SparkSigner;
11use crate::common_types::types::frost::FrostNonce;
12use crate::common_types::types::frost::FrostNonceCommitment;
13use crate::common_types::types::frost::FrostSigningCommitments;
14use crate::common_types::types::frost::FrostSigningNonces;
15use crate::common_types::types::hex_decode;
16use crate::common_types::types::hex_encode;
17use crate::common_types::types::HashbrownMap;
18use crate::common_types::types::RwLock;
19use crate::common_types::types::Secp256k1;
20use crate::common_types::types::Secp256k1Message;
21use crate::common_types::types::SparkRange;
22use crate::common_types::types::Transaction;
23use crate::common_types::types::Uuid;
24use crate::common_types::types::U256;
25use crate::common_types::types::{PublicKey, SecretKey};
26use crate::constants::spark::frost::FROST_USER_IDENTIFIER;
27use crate::constants::spark::frost::FROST_USER_KEY_PACKAGE_MIN_SIGNERS;
28use crate::constants::spark::frost::FROST_USER_SIGNING_ROLE;
29use crate::constants::spark::SPARK_DERIVATION_PATH_PURPOSE;
30use crate::error::{CryptoError, SparkSdkError, ValidationError};
31use crate::wallet::internal_handlers::traits::create_tree::DepositAddressTree;
32use crate::wallet::internal_handlers::traits::transfer::LeafKeyTweak;
33use crate::wallet::internal_handlers::traits::transfer::LeafRefundSigningData;
34use crate::wallet::utils::bitcoin::bitcoin_tx_from_bytes;
35use crate::wallet::utils::bitcoin::serialize_bitcoin_transaction;
36use crate::wallet::utils::bitcoin::sighash_from_tx;
37use crate::wallet::utils::bitcoin::sighash_from_tx_new;
38use crate::wallet::utils::sequence::next_sequence;
39use crate::wallet::utils::transaction::ephemeral_anchor_output;
40use crate::SparkNetwork;
41use bitcoin::bip32::{ChildNumber, Xpriv};
42use bitcoin::secp256k1::ecdsa::Signature;
43use bitcoin::Network;
44use k256::elliptic_curve::bigint::Encoding;
45use sha2::{Digest, Sha256};
46use spark_cryptography::key_arithmetic::subtract_secret_keys;
47use spark_cryptography::secp256k1::CURVE_ORDER;
48use spark_cryptography::secret_sharing::secret_sharing::split_secret_with_proofs;
49use spark_cryptography::secret_sharing::secret_sharing::VerifiableSecretShare;
50use spark_cryptography::signing::aggregate_frost;
51use spark_cryptography::signing::sign_frost;
52use spark_protos::common::SigningCommitment as SparkOperatorCommitment;
53use spark_protos::common::SigningCommitment;
54use spark_protos::frost::AggregateFrostRequest;
55use spark_protos::frost::AggregateFrostResponse;
56use spark_protos::frost::FrostSigningJob;
57use spark_protos::frost::SignFrostRequest;
58use spark_protos::frost::SignFrostResponse;
59use spark_protos::spark::LeafRefundTxSigningResult;
60use spark_protos::spark::NodeSignatures;
61use spark_protos::spark::RequestedSigningCommitments;
62use spark_protos::spark::SigningKeyshare;
63use spark_protos::spark::SigningResult;
64use std::collections::HashMap;
65use std::sync::Arc;
66use tonic::async_trait;
67
68#[derive(Clone)]
81pub struct DefaultSigner {
82 #[cfg(feature = "self-signing")]
84 master_seed: Vec<u8>,
85
86 #[cfg(not(feature = "self-signing"))]
88 pub wallet_connection_url: String,
89
90 #[cfg(not(feature = "self-signing"))]
92 pub wallet_connection_api_key: String,
93
94 pub nonce_commitments: Arc<RwLock<HashbrownMap<String, String>>>,
97
98 pub public_keys_to_secret_keys: Arc<RwLock<HashbrownMap<PublicKey, SecretKey>>>,
101
102 pub network: SparkNetwork,
104}
105
106impl SparkSignerDerivationPath for DefaultSigner {
107 fn get_deposit_signing_key(&self, network: Network) -> Result<PublicKey, SparkSdkError> {
108 let seed_bytes = self.load_master_seed()?;
109 let account_number = 0;
110
111 let secret_key = Self::derive_spark_key(
112 None,
113 account_number,
114 &seed_bytes,
115 SparkKeyType::TemporarySigning,
116 network,
117 )?;
118
119 let secp = Secp256k1::new();
120 let public_key = secret_key.public_key(&secp);
121
122 if !self
124 .public_keys_to_secret_keys
125 .read()
126 .contains_key(&public_key)
127 {
128 self.insert_to_keypair_map(&public_key, &secret_key)?;
129 }
130
131 Ok(public_key)
132 }
133
134 fn derive_spark_key(
135 leaf_id: Option<String>,
136 account: u32,
137 seed_bytes: &[u8],
138 key_type: SparkKeyType,
139 network: Network,
140 ) -> Result<SecretKey, SparkSdkError> {
141 let purpose_index = get_child_number(SPARK_DERIVATION_PATH_PURPOSE, true)?;
142
143 let account_index = get_child_number(account, true)?;
145
146 let key_type_index = get_key_type_index(key_type)?;
148
149 let leaf_index = if let Some(leaf_id) = leaf_id {
151 Some(get_leaf_index(leaf_id.as_str())?)
152 } else {
153 None
154 };
155
156 let path = prepare_path(purpose_index, account_index, key_type_index, leaf_index);
157
158 let seed = match Xpriv::new_master(network, seed_bytes) {
160 Ok(seed) => seed,
161 Err(_) => return Err(SparkSdkError::from(CryptoError::InvalidSeed)),
162 };
163
164 let secp = bitcoin::secp256k1::Secp256k1::new();
166 let extended_key = seed.derive_priv(&secp, &path).map_err(|_| {
167 SparkSdkError::from(CryptoError::ChildKeyDerivationError {
168 derivation_path: format!("{:?}", path),
169 })
170 })?;
171
172 Ok(extended_key.private_key)
173 }
174
175 fn get_identity_derivation_path(
176 account_index: u32,
177 ) -> Result<SparkDerivationPath, SparkSdkError> {
178 let purpose_index = get_child_number(SPARK_DERIVATION_PATH_PURPOSE, true)?;
179
180 let account_index = get_child_number(account_index, true)?;
182
183 let key_type = SparkKeyType::Identity;
184 let key_type_index = get_key_type_index(key_type)?;
185
186 let path = prepare_path(purpose_index, account_index, key_type_index, None);
187
188 Ok(SparkDerivationPath(path))
189 }
190}
191
192impl SparkSignerSecp256k1 for DefaultSigner {
193 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
194 fn get_identity_public_key(
195 &self,
196 account_index: u32,
197 network: Network,
198 ) -> Result<PublicKey, SparkSdkError> {
199 let seed_bytes = self.load_master_seed()?;
200 let seed = match Xpriv::new_master(network, &seed_bytes) {
201 Ok(seed) => seed,
202 Err(_) => return Err(SparkSdkError::from(CryptoError::InvalidSeed)),
203 };
204
205 let identity_derivation_path = Self::get_identity_derivation_path(account_index)?;
206
207 let secp = bitcoin::secp256k1::Secp256k1::new();
208 let identity_key = seed
209 .derive_priv(&secp, &*identity_derivation_path)
210 .map_err(|_| {
211 SparkSdkError::from(CryptoError::ChildKeyDerivationError {
212 derivation_path: format!("{:?}", identity_derivation_path),
213 })
214 })?;
215
216 let secp = Secp256k1::new();
217 let identity_key = identity_key.private_key.public_key(&secp);
218
219 Ok(identity_key)
220 }
221
222 #[allow(private_interfaces)]
223 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
224 fn new_secp256k1_keypair(
225 &self,
226 leaf_id: String,
227 key_type: SparkKeyType,
228 account_index: u32,
229 network: Network,
230 ) -> Result<PublicKey, SparkSdkError> {
231 let seed = self.load_master_seed()?;
232 let secret_key =
233 Self::derive_spark_key(Some(leaf_id), account_index, &seed, key_type, network)?;
234
235 let secp = Secp256k1::new();
236 let public_key = secret_key.public_key(&secp);
237
238 self.insert_to_keypair_map(&public_key, &secret_key)?;
239
240 Ok(public_key)
241 }
242
243 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
244 fn insert_secp256k1_keypair_from_secret_key(
245 &self,
246 secret_key: &SecretKey,
247 ) -> Result<PublicKey, SparkSdkError> {
248 let secp = Secp256k1::new();
249 let public_key = PublicKey::from_secret_key(&secp, secret_key);
250
251 self.insert_to_keypair_map(&public_key, secret_key)?;
252
253 Ok(public_key)
254 }
255
256 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
257 fn new_ephemeral_keypair(&self) -> Result<PublicKey, SparkSdkError> {
258 let secp = Secp256k1::new();
259 let secret_key = SecretKey::new(&mut SparkRange);
260 let public_key = secret_key.public_key(&secp);
261
262 self.insert_to_keypair_map(&public_key, &secret_key)?;
263
264 Ok(public_key)
265 }
266
267 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
268 fn subtract_secret_keys_given_pubkeys(
269 &self,
270 target_pubkey: &PublicKey,
271 source_pubkey: &PublicKey,
272 save_new_key: bool,
273 ) -> Result<PublicKey, SparkSdkError> {
274 if target_pubkey == source_pubkey {
275 return Err(SparkSdkError::from(ValidationError::InvalidInput {
276 field: "Target and source public keys are the same".to_string(),
277 }));
278 }
279
280 let target_secret_key = &self.get_secret_key_from_pubkey(target_pubkey)?;
281 let source_secret_key = &self.get_secret_key_from_pubkey(source_pubkey)?;
282
283 let result_secret_key = subtract_secret_keys(target_secret_key, source_secret_key)
284 .map_err(|e| SparkSdkError::from(CryptoError::Secp256k1(e)))?;
285
286 let secp = Secp256k1::new();
287 let result_public_key = PublicKey::from_secret_key(&secp, &result_secret_key);
288
289 if save_new_key {
290 self.insert_to_keypair_map(&result_public_key, &result_secret_key)?;
291 }
292
293 Ok(result_public_key)
294 }
295
296 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
297 fn sensitive_expose_secret_key_from_pubkey(
298 &self,
299 public_key: &PublicKey,
300 delete_after_exposing: bool,
301 ) -> Result<SecretKey, SparkSdkError> {
302 let secret_key = self.get_secret_key_from_pubkey(public_key)?;
303
304 if delete_after_exposing {
305 self.evict_from_keypair_map(public_key)?;
306 }
307
308 Ok(secret_key)
309 }
310
311 #[cfg_attr(feature = "telemetry", tracing::instrument(skip_all))]
312 fn expose_leaf_secret_key_for_transfer(
313 &self,
314 leaf_id: String,
315 key_type: SparkKeyType,
316 account_index: u32,
317 network: Network,
318 ) -> Result<SecretKey, SparkSdkError> {
319 let seed = self.load_master_seed()?;
320 let secret_key =
321 Self::derive_spark_key(Some(leaf_id), account_index, &seed, key_type, network)?;
322
323 let secp = Secp256k1::new();
324 let public_key = secret_key.public_key(&secp);
325
326 if !self
328 .public_keys_to_secret_keys
329 .read()
330 .contains_key(&public_key)
331 {
332 self.insert_to_keypair_map(&public_key, &secret_key)?;
333 }
334
335 Ok(secret_key)
336 }
337}
338
339#[cfg(test)]
340mod default_signer_secp256k1_tests {
341 use super::*;
342 use crate::error::SparkSdkError;
343
344 use bip32::Language;
345
346 type WrappedSigner = Arc<DefaultSigner>;
347
348 const TEST_NETWORK: SparkNetwork = SparkNetwork::Regtest;
349
350 async fn create_default_signer() -> Result<WrappedSigner, SparkSdkError> {
352 let rng = SparkRange;
353 let mnemonic = bip32::Mnemonic::random(rng, Language::English);
354 let master_seed = mnemonic.to_seed("").as_bytes().to_vec();
355 let signer = DefaultSigner::from_master_seed(&master_seed, TEST_NETWORK).await?;
356
357 Ok(signer)
358 }
359
360 #[tokio::test]
361 #[cfg(feature = "self-signing")]
362 async fn test_get_identity_public_key() -> Result<(), SparkSdkError> {
363 let signer = create_default_signer().await?;
364 signer
365 .get_identity_public_key(0, Network::Regtest)
366 .expect("failed to get identity pk");
367
368 Ok(())
369 }
370
371 #[tokio::test]
372 async fn test_new_secp256k1_keypair() -> Result<(), SparkSdkError> {
373 let signer = create_default_signer().await?;
374
375 let leaf_id = Uuid::new_v4().to_string();
376 let account_index = 0;
377 let key_type = SparkKeyType::BaseSigning;
378 let network = Network::Regtest;
379
380 let pubkey = signer.new_secp256k1_keypair(leaf_id, key_type, account_index, network)?;
381 let pubkey_bytes = pubkey.serialize().to_vec();
382 assert_eq!(pubkey_bytes.len(), 33);
383
384 let identity_pubkey = signer.get_identity_public_key(0, Network::Regtest)?;
385 assert_ne!(identity_pubkey, pubkey);
386
387 Ok(())
388 }
389
390 #[tokio::test]
391 async fn test_insert_secp256k1_keypair_from_secret_key() -> Result<(), SparkSdkError> {
392 let signer = create_default_signer().await?;
393 let secret_key = SecretKey::new(&mut SparkRange);
395 let pubkey = signer.insert_secp256k1_keypair_from_secret_key(&secret_key)?;
396
397 let retrieved_secret_key =
399 signer.sensitive_expose_secret_key_from_pubkey(&pubkey, false)?;
400 assert_eq!(retrieved_secret_key, secret_key);
401
402 Ok(())
403 }
404
405 #[tokio::test]
406 async fn test_subtract_secret_keys_given_pubkeys() -> Result<(), SparkSdkError> {
407 let account_index = 0;
408 let key_type = SparkKeyType::BaseSigning;
409 let network = Network::Regtest;
410
411 let leaf_id_1 = Uuid::new_v4().to_string();
412 let leaf_id_2 = Uuid::new_v4().to_string();
413
414 let signer = create_default_signer().await?;
415
416 let pubkey_1 = signer.new_secp256k1_keypair(leaf_id_1, key_type, account_index, network)?;
418 let pubkey_2 = signer.new_secp256k1_keypair(leaf_id_2, key_type, account_index, network)?;
419
420 let new_pubkey = signer.subtract_secret_keys_given_pubkeys(&pubkey_1, &pubkey_2, true)?;
422
423 let _ = signer.sensitive_expose_secret_key_from_pubkey(&new_pubkey, false)?;
425
426 Ok(())
427 }
428
429 }
456
457const SPLIT_SECRET_ERROR: &str = "Failed to split secret: ";
458#[async_trait]
459impl SparkSignerShamir for DefaultSigner {
460 fn split_with_verifiable_secret_sharing(
461 &self,
462 message: Vec<u8>,
463 threshold: usize,
464 num_shares: usize,
465 ) -> Result<Vec<VerifiableSecretShare>, SparkSdkError> {
466 let raw = hex_decode(CURVE_ORDER).unwrap();
469 let u = U256::from_be_slice(&raw);
470 let minus_one = u.saturating_sub(&U256::ONE); let field_modulus = minus_one.to_be_bytes().to_vec();
472
473 let shares = split_secret_with_proofs(&message, &field_modulus, threshold, num_shares)
475 .map_err(|e| {
476 SparkSdkError::from(ValidationError::InvalidInput {
477 field: format!("{} {}", SPLIT_SECRET_ERROR, e),
478 })
479 })?;
480
481 Ok(shares)
482 }
483
484 fn split_from_public_key_with_verifiable_secret_sharing(
485 &self,
486 public_key: &PublicKey,
487 threshold: usize,
488 num_shares: usize,
489 ) -> Result<Vec<VerifiableSecretShare>, SparkSdkError> {
490 let secret_key = self.get_secret_key_from_pubkey(public_key)?;
491 let shares = self.split_with_verifiable_secret_sharing(
492 secret_key.secret_bytes().to_vec(),
493 threshold,
494 num_shares,
495 )?;
496 Ok(shares)
497 }
498}
499
500#[cfg(test)]
501mod default_signer_shamir_tests {
502 use super::*;
503 use crate::error::SparkSdkError;
504 use bip32::Language;
505 use rand::rngs::OsRng;
506 use std::sync::Arc;
507
508 type WrappedSigner = Arc<DefaultSigner>;
509
510 const TEST_NETWORK: SparkNetwork = SparkNetwork::Regtest;
511
512 async fn create_shamir_test_signer() -> Result<WrappedSigner, SparkSdkError> {
515 let rng = OsRng;
516 let mnemonic = bip32::Mnemonic::random(rng, Language::English);
517 let master_seed = mnemonic.to_seed("").as_bytes().to_vec();
518 let signer = DefaultSigner::from_master_seed(&master_seed, TEST_NETWORK).await?;
519 Ok(signer)
520 }
521
522 #[tokio::test]
523 #[cfg(feature = "self-signing")]
524 #[ignore]
525 async fn test_split_with_verifiable_secret_sharing() -> Result<(), SparkSdkError> {
526 let signer = create_shamir_test_signer().await?;
527 let message = b"hello world".to_vec();
528
529 let threshold = 2;
531 let num_shares = 3;
532
533 let shares = signer.split_with_verifiable_secret_sharing(message, threshold, num_shares)?;
535
536 assert_eq!(shares.len(), num_shares);
538
539 Ok(())
540 }
541}
542
543impl SparkSignerEcdsa for DefaultSigner {
544 fn sign_message_ecdsa_with_identity_key<T: AsRef<[u8]>>(
545 &self,
546 message: T,
547 apply_hashing: bool,
548 network: Network,
549 ) -> Result<Signature, SparkSdkError> {
550 let payload_hash = if apply_hashing {
553 sha256::digest(message.as_ref())
554 } else {
555 hex::encode(message.as_ref())
556 };
557 let message = Secp256k1Message::from_digest_slice(&hex::decode(&payload_hash).unwrap())?;
558
559 let identity_secret_key = self.get_identity_secret_key(0, network)?;
561
562 let secp = Secp256k1::new();
563 Ok(secp.sign_ecdsa(&message, &identity_secret_key))
564 }
565
566 fn sign_message_ecdsa_with_key<T: AsRef<[u8]>>(
567 &self,
568 message: T,
569 public_key_for_signing_key: &PublicKey,
570 apply_hashing: bool,
571 ) -> Result<Signature, SparkSdkError> {
572 let payload_hash = if apply_hashing {
573 sha256::digest(message.as_ref())
574 } else {
575 hex::encode(message.as_ref())
576 };
577
578 let secret_key = self.get_secret_key_from_pubkey(public_key_for_signing_key)?;
579 let secp = Secp256k1::new();
580
581 let message = Secp256k1Message::from_digest_slice(&hex::decode(&payload_hash).unwrap())?;
582
583 Ok(secp.sign_ecdsa(&message, &secret_key))
584 }
585}
586
587#[cfg(test)]
588mod default_signer_ecdsa_tests {
589 use super::*;
590 use crate::common_types::types::Digest;
591 use crate::common_types::types::Secp256k1Message;
592 use crate::common_types::types::Sha256;
593 use bip32::Language;
594
595 type WrappedSigner = Arc<DefaultSigner>;
596
597 const TEST_NETWORK: SparkNetwork = SparkNetwork::Regtest;
598
599 async fn create_ecdsa_test_signer() -> Result<WrappedSigner, SparkSdkError> {
601 let rng = SparkRange;
602 let mnemonic = bip32::Mnemonic::random(rng, Language::English);
603 let master_seed = mnemonic.to_seed("").as_bytes().to_vec();
604 let signer = DefaultSigner::from_master_seed(&master_seed, TEST_NETWORK).await?;
605 Ok(signer)
606 }
607
608 #[tokio::test]
609 #[cfg(feature = "self-signing")]
610 async fn test_sign_message_ecdsa_with_identity_key() -> Result<(), SparkSdkError> {
611 let signer = create_ecdsa_test_signer().await?;
612
613 let message = b"this is a test message";
615 let signature =
616 signer.sign_message_ecdsa_with_identity_key(message, true, Network::Regtest)?;
617
618 let identity_pk = signer.get_identity_public_key(0, Network::Regtest)?;
620 let msg_hash = Sha256::digest(message);
621 let message_for_verify = Secp256k1Message::from_digest_slice(&msg_hash).map_err(|e| {
622 SparkSdkError::from(CryptoError::InvalidInput {
623 field: format!("Failed to parse message: {e}"),
624 })
625 })?;
626
627 let ctx = Secp256k1::verification_only();
629 ctx.verify_ecdsa(&message_for_verify, &signature, &identity_pk)
630 .map_err(|_e| {
631 SparkSdkError::from(CryptoError::InvalidInput {
632 field: "Signature verification failed".to_string(),
633 })
634 })?;
635
636 Ok(())
637 }
638
639 #[tokio::test]
640 #[cfg(feature = "self-signing")]
641 async fn test_sign_message_ecdsa_with_key() -> Result<(), SparkSdkError> {
642 use rand::thread_rng;
643
644 let secp = Secp256k1::new();
646 let mut rng = thread_rng();
647 let keypair = bitcoin::secp256k1::Keypair::new(&secp, &mut rng);
648
649 let signer = create_ecdsa_test_signer().await?;
651
652 signer.insert_to_keypair_map(&keypair.public_key(), &keypair.secret_key())?;
654
655 let message = b"this is a test message";
656 let _ = signer.sign_message_ecdsa_with_key(message, &keypair.public_key(), true)?;
657
658 Ok(())
659 }
660}
661
662impl SparkSignerEcies for DefaultSigner {
663 fn encrypt_secret_key_with_ecies(
664 &self,
665 receiver_public_key: &PublicKey,
666 pubkey_for_sk_to_encrypt: &PublicKey,
667 ) -> Result<Vec<u8>, SparkSdkError> {
668 let secret_key = self.get_secret_key_from_pubkey(pubkey_for_sk_to_encrypt)?;
669 let ciphertext =
670 ecies::encrypt(&receiver_public_key.serialize(), &secret_key.secret_bytes()).map_err(
671 |e| {
672 SparkSdkError::from(CryptoError::InvalidInput {
673 field: format!("Failed to encrypt: {}", e),
674 })
675 },
676 )?;
677
678 Ok(ciphertext)
679 }
680
681 fn decrypt_secret_key_with_ecies<T>(
682 &self,
683 ciphertext: T,
684 network: Network,
685 ) -> Result<SecretKey, SparkSdkError>
686 where
687 T: AsRef<[u8]>,
688 {
689 let identity_secret_key = self.get_identity_secret_key(0, network)?;
691
692 ecies::decrypt(&identity_secret_key.secret_bytes(), ciphertext.as_ref())
693 .map_err(|e| {
694 SparkSdkError::from(CryptoError::InvalidInput {
695 field: format!("Failed to decrypt: {}", e),
696 })
697 })
698 .and_then(|bytes| {
699 SecretKey::from_slice(&bytes).map_err(|err| {
700 SparkSdkError::from(CryptoError::InvalidInput {
701 field: format!("Failed to convert to SecretKey: {}", err),
702 })
703 })
704 })
705 }
706}
707
708#[cfg(test)]
709mod default_signer_ecies_tests {
710 use super::*;
711 use crate::error::SparkSdkError;
712 use bip32::Language;
713 use rand::rngs::OsRng;
714 use std::sync::Arc;
715
716 type WrappedSigner = Arc<DefaultSigner>;
717
718 const TEST_NETWORK: SparkNetwork = SparkNetwork::Regtest;
719
720 async fn create_ecies_test_signer() -> Result<WrappedSigner, SparkSdkError> {
722 let mnemonic = bip32::Mnemonic::random(OsRng, Language::English);
723 let master_seed = mnemonic.to_seed("").as_bytes().to_vec();
724 let signer = DefaultSigner::from_master_seed(&master_seed, TEST_NETWORK).await?;
725 Ok(signer)
726 }
727
728 #[tokio::test]
729 #[cfg(feature = "self-signing")]
730 async fn test_ecies_encrypt_decrypt_round_trip() -> Result<(), SparkSdkError> {
731 let signer = create_ecies_test_signer().await?;
734
735 let leaf_id = Uuid::new_v4().to_string();
736 let account_index = 0;
737 let key_type = SparkKeyType::BaseSigning;
738 let network = Network::Regtest;
739
740 let ephemeral_pubkey =
742 signer.new_secp256k1_keypair(leaf_id, key_type, account_index, network)?;
743
744 let ephemeral_privkey =
745 signer.sensitive_expose_secret_key_from_pubkey(&ephemeral_pubkey, false)?;
746
747 let receiver_pubkey = signer.get_identity_public_key(0, Network::Regtest)?;
749
750 let ciphertext =
752 signer.encrypt_secret_key_with_ecies(&receiver_pubkey, &ephemeral_pubkey)?;
753
754 let decrypted_key = signer.decrypt_secret_key_with_ecies(&ciphertext, Network::Regtest)?;
756
757 assert_eq!(
759 decrypted_key, ephemeral_privkey,
760 "Decrypted key did not match the original"
761 );
762
763 Ok(())
764 }
765}
766
767#[async_trait]
768impl SparkSignerFrost for DefaultSigner {
769 fn new_frost_signing_noncepair(&self) -> Result<FrostSigningCommitments, SparkSdkError> {
770 let mut rng = SparkRange;
771 let binding_sk = SecretKey::new(&mut rng);
772 let hiding_sk = SecretKey::new(&mut rng);
773
774 let binding = FrostNonce::deserialize(&binding_sk.secret_bytes()).unwrap();
775 let hiding = FrostNonce::deserialize(&hiding_sk.secret_bytes()).unwrap();
776
777 let nonces =
778 frost_secp256k1_tr_unofficial::round1::SigningNonces::from_nonces(hiding, binding);
779 let commitments = nonces.commitments();
780
781 let nonces_bytes = nonces.serialize().unwrap();
782 let commitment_bytes = commitments.serialize().unwrap();
783
784 self.insert_to_noncepair_map(commitment_bytes, nonces_bytes)?;
785
786 Ok(*commitments)
787 }
788
789 fn sensitive_expose_nonces_from_commitments<T>(
790 &self,
791 signing_commitments: &T,
792 ) -> Result<FrostSigningNonces, SparkSdkError>
793 where
794 T: AsRef<[u8]>,
795 {
796 let signing_commitment_hex = hex_encode(signing_commitments.as_ref());
797 let signing_nonces = self
798 .nonce_commitments
799 .read()
800 .get(&signing_commitment_hex)
801 .cloned()
802 .ok_or_else(|| {
803 SparkSdkError::from(ValidationError::InvalidInput {
804 field: "Nonce commitments not found".to_string(),
805 })
806 })?;
807
808 let signing_nonces_bytes = hex_decode(signing_nonces).unwrap();
809 let signing_nonces = FrostSigningNonces::deserialize(&signing_nonces_bytes).unwrap(); Ok(signing_nonces)
811 }
812
813 fn sensitive_create_if_not_found_expose_nonces_from_commitments(
814 &self,
815 signing_commitments: Option<&[u8]>,
816 ) -> Result<FrostSigningNonces, SparkSdkError> {
817 let commitments = if let Some(commitments) = signing_commitments {
818 commitments.to_vec()
819 } else {
820 let commitments = self.new_frost_signing_noncepair()?;
821 commitments.serialize().unwrap().clone()
822 };
823 let signing_nonces = self.sensitive_expose_nonces_from_commitments(&commitments)?;
824 Ok(signing_nonces)
825 }
826}
827
828impl SparkSignerFrostSigning for DefaultSigner {
831 fn sign_frost(
832 &self,
833 signing_jobs: Vec<FrostSigningJob>,
834 ) -> Result<SignFrostResponse, SparkSdkError> {
835 let marhsalized_request = SignFrostRequest {
836 signing_jobs,
837 role: FROST_USER_SIGNING_ROLE,
838 };
839 sign_frost(&marhsalized_request).map_err(|e| {
840 SparkSdkError::from(CryptoError::FrostSigning {
841 job_id: e.to_string(),
842 })
843 })
844 }
845
846 fn aggregate_frost(
847 &self,
848 request: AggregateFrostRequest,
849 ) -> Result<AggregateFrostResponse, SparkSdkError> {
850 aggregate_frost(&request).map_err(|_| SparkSdkError::from(CryptoError::FrostAggregation))
851 }
852
853 fn sign_created_tree_in_bfs_order(
854 &self,
855 tx: Transaction,
856 vout: u32,
857 internal_tree_root: Arc<RwLock<DepositAddressTree>>,
858 request_tree_root: spark_protos::spark::CreationNode,
859 creation_result_tree_root: spark_protos::spark::CreationResponseNode,
860 ) -> Result<(Vec<NodeSignatures>, Vec<Vec<u8>>), SparkSdkError> {
861 #[derive(Clone)]
862 struct QueueItem {
863 parent_tx: Transaction,
864 vout: u32,
865 internal_node: Arc<RwLock<DepositAddressTree>>,
866 creation_node: spark_protos::spark::CreationNode,
867 creation_response_node: spark_protos::spark::CreationResponseNode,
868 }
869
870 let mut queue = std::collections::VecDeque::new();
871 let mut node_signatures = vec![];
872
873 queue.push_back(QueueItem {
874 parent_tx: tx,
875 vout,
876 internal_node: internal_tree_root,
877 creation_node: request_tree_root,
878 creation_response_node: creation_result_tree_root,
879 });
880
881 let mut signing_public_keys = vec![];
882
883 while let Some(current) = queue.pop_front() {
884 let node_prevout_index = current.vout as usize;
885 let node_signing_input_index = 0;
886
887 let internal_node = current.internal_node.read().clone();
889 let creation_node = current.creation_node.clone();
890 let node_signing_job = creation_node.node_tx_signing_job.clone().unwrap();
891 let serialized_node_transaction = node_signing_job.raw_tx;
892 let user_node_commitments = node_signing_job.signing_nonce_commitment.clone().unwrap();
893 let (spark_node_signature_shares, spark_node_public_shares, _, spark_node_commitments) =
894 get_signature_data_from_signing_result(
895 ¤t.creation_response_node.node_tx_signing_result,
896 )?;
897
898 let signing_job = self.prepare_frost_signing_job(
899 internal_node.signing_public_key.clone(),
900 Some(serialize_marshalized_frost_commitments(
901 &user_node_commitments,
902 )?),
903 spark_node_commitments.clone(),
904 serialized_node_transaction.clone(),
905 node_prevout_index,
906 node_signing_input_index,
907 ¤t.parent_tx.clone(),
908 vec![],
909 internal_node.verification_key.clone().unwrap(),
910 )?;
911
912 let node_signature = self.sign_frost(vec![signing_job.clone()]).unwrap();
914 let user_node_signature_share = node_signature.results[&signing_job.job_id]
915 .signature_share
916 .clone();
917
918 let aggregate_response = self
919 .aggregate_frost(spark_protos::frost::AggregateFrostRequest {
920 message: signing_job.message,
921 signature_shares: spark_node_signature_shares,
922 public_shares: spark_node_public_shares,
923 verifying_key: signing_job.verifying_key.clone(),
924 commitments: spark_node_commitments,
925 user_commitments: signing_job.user_commitments,
926 user_public_key: signing_job.verifying_key,
927 user_signature_share: user_node_signature_share,
928 adaptor_public_key: vec![],
929 })
930 .unwrap();
931
932 let mut node_signature = spark_protos::spark::NodeSignatures {
933 node_id: current.creation_response_node.node_id.clone(),
934 node_tx_signature: aggregate_response.signature,
935 ..Default::default()
936 };
937
938 if let Some(refund_signing_job) = current.creation_node.refund_tx_signing_job {
940 let refund_prevout_index = 0;
942 let refund_signing_input_index = 0_usize;
943
944 let serialized_refund_transaction = refund_signing_job.raw_tx;
946 let user_refund_commitments = refund_signing_job.signing_nonce_commitment.unwrap();
947 let (
948 spark_refund_signature_shares,
949 spark_refund_public_shares,
950 _,
951 spark_refund_commitments,
952 ) = get_signature_data_from_signing_result(
953 ¤t.creation_response_node.refund_tx_signing_result,
954 )?;
955
956 let refund_signing_job = self.prepare_frost_signing_job(
957 internal_node.signing_public_key,
958 Some(serialize_marshalized_frost_commitments(
959 &user_refund_commitments,
960 )?),
961 spark_refund_commitments.clone(),
962 serialized_refund_transaction,
963 refund_prevout_index,
964 refund_signing_input_index,
965 &bitcoin_tx_from_bytes(&serialized_node_transaction)?,
966 vec![],
967 internal_node.verification_key.clone().unwrap(),
968 )?;
969
970 let refund_signature = self.sign_frost(vec![refund_signing_job.clone()])?;
971 let user_refund_signature_share = refund_signature.results
972 [&refund_signing_job.job_id]
973 .signature_share
974 .clone();
975
976 let aggregate_response = self
977 .aggregate_frost(spark_protos::frost::AggregateFrostRequest {
978 message: refund_signing_job.message,
979 signature_shares: spark_refund_signature_shares,
980 public_shares: spark_refund_public_shares,
981 verifying_key: refund_signing_job.verifying_key.clone(),
982 commitments: spark_refund_commitments,
983 user_commitments: refund_signing_job.user_commitments,
984 user_public_key: refund_signing_job.verifying_key,
985 user_signature_share: user_refund_signature_share,
986 adaptor_public_key: vec![],
987 })
988 .unwrap();
989
990 node_signature.refund_tx_signature = aggregate_response.signature;
991 }
992 node_signatures.push(node_signature);
993
994 for (i, child) in current.creation_node.children.into_iter().enumerate() {
996 queue.push_back(QueueItem {
997 parent_tx: bitcoin_tx_from_bytes(&serialized_node_transaction)?,
998 vout: i as u32,
999 internal_node: current.internal_node.read().children[i].clone(),
1000 creation_node: child,
1001 creation_response_node: current.creation_response_node.children[i].clone(),
1002 });
1003 }
1004
1005 signing_public_keys.push(current.internal_node.read().signing_public_key.clone());
1006 }
1007
1008 Ok((node_signatures, signing_public_keys))
1009 }
1010
1011 fn sign_transfer_refunds(
1012 &self,
1013 leaf_data_map: &HashMap<String, LeafRefundSigningData>,
1014 operator_signing_results: &Vec<LeafRefundTxSigningResult>,
1015 adaptor_public_key: Vec<u8>,
1016 ) -> Result<Vec<spark_protos::spark::NodeSignatures>, SparkSdkError> {
1017 let mut user_signing_jobs = Vec::new();
1018 let mut job_to_aggregate_request_map = HashMap::new();
1019 let mut job_to_leaf_map = HashMap::new();
1020
1021 for operator_result in operator_signing_results {
1022 let signing_input_index = 0;
1023 let prevout_to_use = 0;
1024
1025 let leaf_data = leaf_data_map.get(&operator_result.leaf_id).ok_or_else(|| {
1027 SparkSdkError::from(ValidationError::InvalidInput {
1028 field: "Leaf data not found".to_string(),
1029 })
1030 })?;
1031
1032 let refund_tx_ = leaf_data.refund_tx.as_ref().unwrap();
1034 let serialized_refund_tx = serialize_bitcoin_transaction(refund_tx_)?;
1035
1036 let (
1037 spark_refund_signature_shares,
1038 spark_refund_public_shares,
1039 _,
1040 spark_refund_commitments,
1041 ) = get_signature_data_from_signing_result(&operator_result.refund_tx_signing_result)?;
1042
1043 let commitment_hiding = leaf_data.commitment.hiding.clone();
1044 let commitment_binding = leaf_data.commitment.binding.clone();
1045 let signing_commitments =
1046 frost_secp256k1_tr_unofficial::round1::SigningCommitments::new(
1047 FrostNonceCommitment::deserialize(&commitment_hiding).unwrap(),
1048 FrostNonceCommitment::deserialize(&commitment_binding).unwrap(),
1049 );
1050
1051 let signing_job = self.prepare_frost_signing_job(
1053 leaf_data.signing_public_key.serialize(),
1054 Some(signing_commitments.serialize().unwrap()),
1055 spark_refund_commitments.clone(),
1056 serialized_refund_tx,
1057 prevout_to_use,
1058 signing_input_index,
1059 &leaf_data.tx,
1060 adaptor_public_key.clone(),
1061 operator_result.verifying_key.clone(),
1062 )?;
1063 let signing_job_id = signing_job.job_id.clone();
1064 user_signing_jobs.push(signing_job.clone());
1065
1066 job_to_leaf_map.insert(signing_job_id.clone(), operator_result.leaf_id.clone());
1067
1068 job_to_aggregate_request_map.insert(
1070 signing_job_id.clone(),
1071 AggregateFrostRequest {
1072 message: signing_job.message,
1073 signature_shares: spark_refund_signature_shares,
1074 public_shares: spark_refund_public_shares,
1075 verifying_key: operator_result.verifying_key.clone(),
1076 commitments: spark_refund_commitments,
1077 user_commitments: signing_job.user_commitments,
1078 user_public_key: leaf_data.signing_public_key.serialize().to_vec(),
1079 user_signature_share: vec![],
1080 adaptor_public_key: vec![],
1081 },
1082 );
1083 }
1084
1085 let user_signatures = self.sign_frost(user_signing_jobs)?;
1087
1088 let mut node_signatures = Vec::new();
1090 for (job_id, user_signature) in user_signatures.results {
1091 let mut request = job_to_aggregate_request_map
1092 .remove(&job_id)
1093 .ok_or_else(|| {
1094 SparkSdkError::from(ValidationError::InvalidInput {
1095 field: "Job ID not found".to_string(),
1096 })
1097 })?;
1098
1099 request.user_signature_share = user_signature.signature_share;
1100
1101 let response = match self.aggregate_frost(request) {
1102 Ok(response) => response,
1103 Err(e) => {
1104 return Err(SparkSdkError::from(CryptoError::InvalidInput {
1105 field: format!("Failed to aggregate refund: {}", e).to_string(),
1106 }));
1107 }
1108 };
1109
1110 node_signatures.push(spark_protos::spark::NodeSignatures {
1111 node_id: job_to_leaf_map[&job_id].clone(),
1112 refund_tx_signature: response.signature,
1113 node_tx_signature: Vec::new(), });
1115 }
1116
1117 Ok(node_signatures)
1118 }
1119
1120 fn sign_for_lightning_swap(
1121 &self,
1122 leaves: &Vec<LeafKeyTweak>,
1123 signing_commitments: &Vec<RequestedSigningCommitments>,
1124 receiver_identity_pubkey: PublicKey,
1125 ) -> Result<
1126 (
1127 SignFrostResponse,
1128 Vec<Vec<u8>>,
1129 Vec<ProtoSigningCommitments>,
1130 ),
1131 SparkSdkError,
1132 > {
1133 let mut signing_jobs = Vec::new();
1134 let mut refund_txs = vec![];
1135
1136 let mut user_commitments = Vec::with_capacity(leaves.len());
1137
1138 for (i, leaf) in leaves.iter().enumerate() {
1139 let node_tx = bitcoin_tx_from_bytes(&leaf.leaf.node_tx)?;
1140
1141 let node_outpoint = bitcoin::OutPoint {
1142 txid: node_tx.compute_txid(),
1143 vout: 0,
1144 };
1145
1146 let current_refund_tx = bitcoin_tx_from_bytes(&leaf.leaf.refund_tx)?;
1147
1148 let next_sequence = next_sequence(current_refund_tx.input[0].sequence.0);
1149
1150 let amount_sats = node_tx.output[0].value;
1151
1152 let refund_tx = create_refund_tx(
1153 next_sequence,
1154 node_outpoint,
1155 amount_sats,
1156 &receiver_identity_pubkey,
1157 self.network.to_bitcoin_network(),
1158 )?;
1159
1160 let refund_tx_buf = serialize_bitcoin_transaction(&refund_tx)?;
1161 refund_txs.push(refund_tx_buf);
1162
1163 let sighash = sighash_from_tx(&refund_tx, 0, &node_tx.output[0])?;
1164
1165 let user_commitment = self.new_frost_signing_noncepair()?;
1166 let user_nonce = self
1167 .sensitive_expose_nonces_from_commitments(&user_commitment.serialize().unwrap())?;
1168
1169 let marshalized_frost_nonces = marshal_frost_nonces(&user_nonce)?;
1170 let marshalized_frost_commitments = marshal_frost_commitments(&user_commitment)?;
1171
1172 user_commitments.push(marshalized_frost_commitments.clone());
1173
1174 let signing_secret_key =
1175 self.sensitive_expose_secret_key_from_pubkey(&leaf.new_signing_public_key, false)?;
1176 let key_package = create_user_key_package(&signing_secret_key.secret_bytes());
1177
1178 signing_jobs.push(FrostSigningJob {
1179 job_id: leaf.leaf.id.clone(),
1180 message: sighash.to_vec(),
1181 key_package: Some(key_package),
1182 verifying_key: leaf.leaf.verifying_public_key.clone(),
1183 nonce: Some(marshalized_frost_nonces),
1184 user_commitments: Some(marshalized_frost_commitments),
1185 commitments: signing_commitments[i].signing_nonce_commitments.clone(),
1186 adaptor_public_key: vec![],
1187 });
1188 }
1189
1190 let signing_results = self.sign_frost(signing_jobs)?;
1192
1193 Ok((signing_results, refund_txs, user_commitments))
1194 }
1195
1196 fn sign_root_creation(
1197 &self,
1198 signing_pubkey_bytes: Vec<u8>,
1199 verifying_pubkey_bytes: Vec<u8>,
1200 _root_tx_bytes: Vec<u8>, _refund_tx_bytes: Vec<u8>, root_tx_sighash: Vec<u8>,
1203 refund_tx_sighash: Vec<u8>,
1204 root_nonce_commitment: FrostSigningCommitments,
1205 refund_nonce_commitment: FrostSigningCommitments,
1206 tree_creation_response: spark_protos::spark::StartTreeCreationResponse,
1207 ) -> Result<Vec<Vec<u8>>, SparkSdkError> {
1208 let signature_data = tree_creation_response
1210 .root_node_signature_shares
1211 .unwrap()
1212 .clone();
1213 let root_signature_data = signature_data.node_tx_signing_result.unwrap();
1214 let root_signature_shares = root_signature_data.signature_shares.clone();
1215 let root_nonce_commitments = root_signature_data.signing_nonce_commitments.clone();
1216 let root_pubkeys = root_signature_data.public_keys.clone();
1217 let refund_signature_data = signature_data.refund_tx_signing_result.unwrap();
1218 let refund_signature_shares = refund_signature_data.signature_shares.clone();
1219 let refund_nonce_commitments = refund_signature_data.signing_nonce_commitments.clone();
1220 let refund_pubkeys = refund_signature_data.public_keys.clone();
1221
1222 let signing_key = self.sensitive_expose_secret_key_from_pubkey(
1223 &PublicKey::from_slice(&signing_pubkey_bytes)?,
1224 false,
1225 )?;
1226 let key_package = create_user_key_package(&signing_key.secret_bytes());
1227
1228 let root_nonce_commitments_bytes = root_nonce_commitment.serialize().unwrap();
1229 let refund_nonce_commitments_bytes = refund_nonce_commitment.serialize().unwrap();
1230
1231 let root_nonce =
1232 self.sensitive_expose_nonces_from_commitments(&root_nonce_commitments_bytes)?;
1233 let refund_nonce =
1234 self.sensitive_expose_nonces_from_commitments(&refund_nonce_commitments_bytes)?;
1235
1236 let node_job_id = Uuid::now_v7().to_string();
1238 let node_signing_job = FrostSigningJob {
1239 job_id: node_job_id.clone(),
1240 message: root_tx_sighash.clone(),
1241 commitments: root_nonce_commitments.clone(),
1242 key_package: Some(key_package.clone()),
1243 verifying_key: verifying_pubkey_bytes.clone(),
1244 nonce: Some(marshal_frost_nonces(&root_nonce)?),
1245 user_commitments: Some(marshal_frost_commitments(&root_nonce_commitment)?),
1246 adaptor_public_key: vec![],
1247 };
1248
1249 let refund_job_id = Uuid::now_v7().to_string();
1250 let refund_signing_job = FrostSigningJob {
1251 job_id: refund_job_id.clone(),
1252 message: refund_tx_sighash.clone(),
1253 commitments: refund_nonce_commitments.clone(),
1254 key_package: Some(key_package),
1255 verifying_key: verifying_pubkey_bytes.clone(),
1256 nonce: Some(marshal_frost_nonces(&refund_nonce)?),
1257 user_commitments: Some(marshal_frost_commitments(&refund_nonce_commitment)?),
1258 adaptor_public_key: vec![],
1259 };
1260
1261 let signing_results = self.sign_frost(vec![node_signing_job, refund_signing_job])?;
1262
1263 let signature_results = signing_results.results;
1265 let user_root_signature_share = signature_results[&node_job_id].signature_share.clone();
1266 let user_refund_signature_share = signature_results[&refund_job_id].signature_share.clone();
1267
1268 let root_aggregation_request = AggregateFrostRequest {
1269 message: root_tx_sighash,
1270 signature_shares: root_signature_shares,
1271 public_shares: root_pubkeys,
1272 verifying_key: verifying_pubkey_bytes.clone(),
1273 commitments: root_nonce_commitments,
1274 user_commitments: Some(marshal_frost_commitments(&root_nonce_commitment)?),
1275 user_public_key: signing_pubkey_bytes.clone(),
1276 user_signature_share: user_root_signature_share,
1277 adaptor_public_key: vec![],
1278 };
1279 let refund_aggregation_request = AggregateFrostRequest {
1280 message: refund_tx_sighash,
1281 signature_shares: refund_signature_shares,
1282 public_shares: refund_pubkeys,
1283 verifying_key: verifying_pubkey_bytes.clone(),
1284 commitments: refund_nonce_commitments,
1285 user_commitments: Some(marshal_frost_commitments(&refund_nonce_commitment)?),
1286 user_public_key: signing_pubkey_bytes,
1287 user_signature_share: user_refund_signature_share,
1288 adaptor_public_key: vec![],
1289 };
1290
1291 let complete_root_signature = self.aggregate_frost(root_aggregation_request)?;
1292 let complete_refund_signature = self.aggregate_frost(refund_aggregation_request)?;
1293
1294 let root_sig = complete_root_signature.signature;
1295 let refund_sig = complete_refund_signature.signature;
1296
1297 Ok(vec![root_sig, refund_sig])
1298 }
1299
1300 fn sign_frost_new(
1301 &self,
1302 message: Vec<u8>,
1303 private_as_pubkey: Vec<u8>,
1304 verifying_key: Vec<u8>,
1305 self_commitment: FrostSigningCommitments,
1306 spark_commitments: HashMap<String, SigningCommitment>,
1307 adaptor_public_key: Option<Vec<u8>>,
1308 ) -> Result<Vec<u8>, SparkSdkError> {
1309 let signing_private_key = self.sensitive_expose_secret_key_from_pubkey(
1311 &PublicKey::from_slice(&private_as_pubkey)?,
1312 false,
1313 )?;
1314 let self_commtiment_bytes = self_commitment.serialize().map_err(|e| {
1315 SparkSdkError::from(CryptoError::FrostSigning {
1316 job_id: e.to_string(),
1317 })
1318 })?;
1319 let nonce = self.sensitive_expose_nonces_from_commitments(&self_commtiment_bytes)?;
1320
1321 let key_package = create_user_key_package(&signing_private_key.secret_bytes());
1323
1324 let adaptor_public_key = adaptor_public_key.unwrap_or_default();
1326
1327 let job_id = Uuid::now_v7().to_string();
1329 let signing_job = FrostSigningJob {
1330 job_id: job_id.clone(),
1331 message,
1332 key_package: Some(key_package),
1333 verifying_key,
1334 nonce: Some(marshal_frost_nonces(&nonce)?),
1335 commitments: spark_commitments,
1336 user_commitments: Some(marshal_frost_commitments(&self_commitment)?),
1337 adaptor_public_key,
1338 };
1339
1340 let sign_frost_request = SignFrostRequest {
1341 signing_jobs: vec![signing_job],
1342 role: FROST_USER_SIGNING_ROLE,
1343 };
1344
1345 let signature = match sign_frost(&sign_frost_request) {
1347 Ok(signature) => signature,
1348 Err(e) => {
1349 return Err(SparkSdkError::from(CryptoError::FrostSigning {
1350 job_id: e.to_string(),
1351 }));
1352 }
1353 };
1354
1355 let signature = signature.results.get(&job_id);
1357 if signature.is_none() {
1358 return Err(SparkSdkError::from(CryptoError::FrostSignatureNotFound {
1359 job_id,
1360 }));
1361 }
1362
1363 Ok(signature.unwrap().signature_share.clone())
1364 }
1365}
1366
1367#[async_trait]
1368impl SparkSigner for DefaultSigner {
1369 type WrappedSigner = Arc<Self>;
1370
1371 #[cfg(feature = "self-signing")]
1375 async fn from_mnemonic(
1376 mnemonic: &str,
1377 network: SparkNetwork,
1378 ) -> Result<Self::WrappedSigner, SparkSdkError> {
1379 let seed_bytes = bip39::Mnemonic::parse(mnemonic)
1381 .map_err(|e| {
1382 SparkSdkError::from(CryptoError::InvalidInput {
1383 field: e.to_string(),
1384 })
1385 })?
1386 .to_seed("");
1387 Self::from_master_seed(&seed_bytes, network).await
1390 }
1391
1392 #[cfg(feature = "self-signing")]
1393 async fn from_master_seed(
1394 master_seed: &[u8],
1395 network: SparkNetwork,
1396 ) -> Result<Self::WrappedSigner, SparkSdkError> {
1397 let nonce_commitments = HashbrownMap::new();
1398 let public_keys_to_secret_keys = HashbrownMap::new();
1399
1400 let commitments_map = Arc::new(RwLock::new(nonce_commitments));
1401 let public_keys_map = Arc::new(RwLock::new(public_keys_to_secret_keys));
1402
1403 Ok(Arc::new(Self {
1404 master_seed: master_seed.to_vec(),
1405 nonce_commitments: commitments_map,
1406 public_keys_to_secret_keys: public_keys_map,
1407 network,
1408 }))
1409 }
1410
1411 #[cfg(not(feature = "self-signing"))]
1412 async fn new_remote(
1413 signer_url: &str,
1414 wallet_id: &str,
1415 user_public_key_hex: &str,
1416 ) -> Result<Self::WrappedSigner, SparkSdkError> {
1417 todo!()
1418 }
1419}
1420
1421const INVALID_SECRET_KEY_ERROR: &str =
1422 "Could not find secret key in the signer space. Public key used as the index: ";
1423impl DefaultSigner {
1424 pub(crate) fn load_master_seed(&self) -> Result<Vec<u8>, SparkSdkError> {
1432 Ok(self.master_seed.clone())
1433 }
1434
1435 pub(crate) fn get_identity_secret_key(
1446 &self,
1447 account_index: u32,
1448 network: Network,
1449 ) -> Result<SecretKey, SparkSdkError> {
1450 let master_seed_bytes = self.load_master_seed()?;
1451 let master_seed = Xpriv::new_master(network, &master_seed_bytes)
1452 .map_err(|_| SparkSdkError::from(CryptoError::InvalidSeed))?;
1453
1454 let identity_derivation_path = Self::get_identity_derivation_path(account_index)?;
1455
1456 let secp = Secp256k1::new();
1457 let identity_key = master_seed
1458 .derive_priv(&secp, &*identity_derivation_path)
1459 .map_err(|_| {
1460 SparkSdkError::from(CryptoError::ChildKeyDerivationError {
1461 derivation_path: format!("{:?}", identity_derivation_path),
1462 })
1463 })?;
1464
1465 Ok(identity_key.private_key)
1466 }
1467
1468 pub(crate) fn get_secret_key_from_pubkey(
1479 &self,
1480 public_key: &PublicKey,
1481 ) -> Result<SecretKey, SparkSdkError> {
1482 self.public_keys_to_secret_keys
1483 .read()
1484 .get(public_key)
1485 .cloned()
1486 .ok_or_else(|| {
1487 SparkSdkError::from(ValidationError::InvalidInput {
1488 field: format!("{} {}", INVALID_SECRET_KEY_ERROR, public_key).to_string(),
1489 })
1490 })
1491 }
1492
1493 pub(crate) fn insert_to_keypair_map(
1505 &self,
1506 public_key: &PublicKey,
1507 secret_key: &SecretKey,
1508 ) -> Result<(), SparkSdkError> {
1509 self.public_keys_to_secret_keys
1510 .write()
1511 .insert(*public_key, *secret_key);
1512
1513 Ok(())
1514 }
1515
1516 pub(crate) fn evict_from_keypair_map(
1527 &self,
1528 public_key: &PublicKey,
1529 ) -> Result<(), SparkSdkError> {
1530 self.public_keys_to_secret_keys.write().remove(public_key);
1531
1532 Ok(())
1533 }
1534
1535 pub(crate) fn insert_to_noncepair_map<T: AsRef<[u8]>, U: AsRef<[u8]>>(
1547 &self,
1548 nonce_commitment: T,
1549 nonce: U,
1550 ) -> Result<(), SparkSdkError> {
1551 let nonce_commitment_hex = hex_encode(nonce_commitment);
1552 let nonce_hex = hex_encode(nonce);
1553
1554 self.nonce_commitments
1555 .write()
1556 .insert(nonce_commitment_hex.clone(), nonce_hex.clone());
1557
1558 Ok(())
1559 }
1560
1561 #[allow(clippy::too_many_arguments)]
1591 fn prepare_frost_signing_job<T: AsRef<[u8]>>(
1592 &self,
1593 signing_public_key: T,
1594 user_frost_commitments: Option<Vec<u8>>,
1595 spark_frost_commitments: HashMap<String, SparkOperatorCommitment>,
1596 serialized_bitcoin_tx: Vec<u8>,
1597 prevout_to_use: usize,
1598 signing_input_index: usize,
1599 parent_tx: &Transaction,
1600 adaptor_public_key: Vec<u8>,
1601 verifying_key: Vec<u8>,
1602 ) -> Result<FrostSigningJob, SparkSdkError> {
1603 let job_id = generate_signing_job_id();
1604
1605 let signing_secret_key = self.sensitive_expose_secret_key_from_pubkey(
1607 &PublicKey::from_slice(signing_public_key.as_ref())?,
1608 false,
1609 )?;
1610 let frost_nonces = self.sensitive_create_if_not_found_expose_nonces_from_commitments(
1611 user_frost_commitments.as_deref(),
1612 )?;
1613 let marshalized_frost_nonces = marshal_frost_nonces(&frost_nonces)?;
1614
1615 let marshalized_frost_commitments = marshal_frost_commitments(frost_nonces.commitments())?;
1616
1617 let key_package = create_user_key_package(&signing_secret_key.secret_bytes());
1618 let transaction = bitcoin_tx_from_bytes(&serialized_bitcoin_tx)?;
1619
1620 let sighash = sighash_from_tx_new(
1621 &transaction,
1622 signing_input_index,
1623 &parent_tx.output[prevout_to_use],
1624 )?;
1625 let message = sighash.to_vec();
1626
1627 Ok(FrostSigningJob {
1628 job_id,
1629 message,
1630 key_package: Some(key_package),
1631 verifying_key,
1632 nonce: Some(marshalized_frost_nonces),
1633 commitments: spark_frost_commitments,
1634 user_commitments: Some(marshalized_frost_commitments),
1635 adaptor_public_key,
1636 })
1637 }
1638}
1639
1640pub(crate) fn create_user_key_package(
1651 signing_secret_key: &[u8],
1652) -> spark_protos::frost::KeyPackage {
1653 let user_identifier = FROST_USER_IDENTIFIER;
1654 let secp = Secp256k1::new();
1655 let secret_key = SecretKey::from_slice(signing_secret_key).unwrap();
1656 let public_key = PublicKey::from_secret_key(&secp, &secret_key);
1657
1658 let mut public_shares = HashMap::new();
1659 public_shares.insert(user_identifier.to_string(), public_key.serialize().to_vec());
1660
1661 spark_protos::frost::KeyPackage {
1662 identifier: user_identifier.to_string(),
1663 secret_share: signing_secret_key.to_vec(),
1664 public_shares,
1665 public_key: public_key.serialize().to_vec(),
1666 min_signers: FROST_USER_KEY_PACKAGE_MIN_SIGNERS,
1667 }
1668}
1669
1670use spark_protos::common::SigningCommitment as ProtoSigningCommitments;
1671pub(crate) fn marshal_frost_commitments(
1682 commitments: &FrostSigningCommitments,
1683) -> Result<ProtoSigningCommitments, SparkSdkError> {
1684 let hiding = commitments.hiding().serialize().unwrap();
1685 let binding = commitments.binding().serialize().unwrap();
1686
1687 Ok(ProtoSigningCommitments { hiding, binding })
1688}
1689
1690use spark_protos::frost::SigningNonce as ProtoSigningNonce;
1691pub(crate) fn marshal_frost_nonces(
1702 nonce: &FrostSigningNonces,
1703) -> Result<ProtoSigningNonce, SparkSdkError> {
1704 let hiding = nonce.hiding().serialize();
1705 let binding = nonce.binding().serialize();
1706
1707 Ok(ProtoSigningNonce { hiding, binding })
1708}
1709
1710pub(crate) fn _unmarshal_frost_nonces(
1721 nonce: &ProtoSigningNonce,
1722) -> Result<FrostSigningNonces, SparkSdkError> {
1723 let hiding_nonce = FrostNonce::deserialize(&nonce.hiding).unwrap();
1724 let binding_nonce = FrostNonce::deserialize(&nonce.binding).unwrap();
1725
1726 Ok(FrostSigningNonces::from_nonces(hiding_nonce, binding_nonce))
1727}
1728
1729fn serialize_marshalized_frost_commitments(
1740 commitments: &ProtoSigningCommitments,
1741) -> Result<Vec<u8>, SparkSdkError> {
1742 let hiding = commitments.hiding.clone();
1743 let binding = commitments.binding.clone();
1744
1745 let prefix_hex = "00230f8ab3";
1746 let hiding_hex = hex_encode(&hiding);
1747 let binding_hex = hex_encode(&binding);
1748
1749 let commitments_hex = format!("{}{}{}", prefix_hex, hiding_hex, binding_hex);
1750 Ok(hex_decode(&commitments_hex).unwrap())
1751}
1752
1753fn generate_signing_job_id() -> String {
1758 Uuid::now_v7().to_string()
1759}
1760
1761type SignatureSharesType = HashMap<String, Vec<u8>>;
1763type PublicSharesType = HashMap<String, Vec<u8>>;
1765type SigningKeyshareType = Option<SigningKeyshare>;
1767type SigningCommitmentsType = HashMap<String, SparkOperatorCommitment>;
1769
1770fn get_signature_data_from_signing_result(
1786 signing_result: &Option<SigningResult>,
1787) -> Result<
1788 (
1789 SignatureSharesType,
1790 PublicSharesType,
1791 SigningKeyshareType,
1792 SigningCommitmentsType,
1793 ),
1794 SparkSdkError,
1795> {
1796 let signing_result = signing_result.clone().unwrap();
1797 let signature_shares = signing_result.signature_shares;
1798 let public_shares = signing_result.public_keys;
1799 let signing_keyshare = signing_result.signing_keyshare;
1800 let commitments = signing_result.signing_nonce_commitments;
1801
1802 Ok((
1803 signature_shares,
1804 public_shares,
1805 signing_keyshare,
1806 commitments,
1807 ))
1808}
1809
1810fn create_refund_tx(
1828 sequence: u32,
1829 node_outpoint: bitcoin::OutPoint,
1830 amount_sats: bitcoin::Amount,
1831 receiving_pubkey: &bitcoin::secp256k1::PublicKey,
1832 network: bitcoin::Network,
1833) -> Result<bitcoin::Transaction, SparkSdkError> {
1834 let mut new_refund_tx = bitcoin::Transaction {
1835 version: bitcoin::transaction::Version::TWO,
1836 lock_time: bitcoin::absolute::LockTime::ZERO,
1837 input: vec![],
1838 output: vec![],
1839 };
1840
1841 new_refund_tx.input.push(bitcoin::TxIn {
1842 previous_output: node_outpoint,
1843 script_sig: bitcoin::ScriptBuf::default(),
1844 sequence: bitcoin::Sequence(sequence),
1845 witness: bitcoin::Witness::default(),
1846 });
1847
1848 let secp: bitcoin::key::Secp256k1<bitcoin::secp256k1::All> =
1849 bitcoin::secp256k1::Secp256k1::new();
1850 let addr = bitcoin::Address::p2tr(&secp, receiving_pubkey.x_only_public_key().0, None, network);
1851 let refund_pk_script = addr.script_pubkey();
1852
1853 new_refund_tx.output.push(bitcoin::TxOut {
1854 value: amount_sats,
1855 script_pubkey: refund_pk_script,
1856 });
1857
1858 new_refund_tx.output.push(ephemeral_anchor_output());
1859
1860 Ok(new_refund_tx)
1861}
1862
1863#[cfg(test)]
1864mod frost_to_proto_conversions_test {
1865 use super::*;
1866
1867 fn test_unmarshal_frost_commitments(
1868 commitment: &ProtoSigningCommitments,
1869 ) -> Result<FrostSigningCommitments, SparkSdkError> {
1870 let hiding = commitment.hiding.clone();
1871 let binding = commitment.binding.clone();
1872
1873 let hiding_nonce = FrostNonceCommitment::deserialize(&hiding).unwrap();
1874 let binding_nonce = FrostNonceCommitment::deserialize(&binding).unwrap();
1875
1876 Ok(FrostSigningCommitments::new(hiding_nonce, binding_nonce))
1877 }
1878
1879 #[test]
1880 fn test_frost_to_proto_conversions() {
1881 let hiding_sk = SecretKey::new(&mut rand::thread_rng());
1882 let binding_sk = SecretKey::new(&mut rand::thread_rng());
1883 let hiding_sk_bytes = hiding_sk.secret_bytes().to_vec();
1884 let binding_sk_bytes = binding_sk.secret_bytes().to_vec();
1885
1886 let hiding_nonce = FrostNonce::deserialize(&hiding_sk_bytes).unwrap();
1887 let binding_nonce = FrostNonce::deserialize(&binding_sk_bytes).unwrap();
1888
1889 let frost_nonces = FrostSigningNonces::from_nonces(hiding_nonce, binding_nonce);
1891 let frost_commitments = frost_nonces.commitments();
1892
1893 let marshalized_nonces = marshal_frost_nonces(&frost_nonces).unwrap();
1895 let unmarshalized_nonces = _unmarshal_frost_nonces(&marshalized_nonces).unwrap();
1896
1897 let marshalized_commitments = marshal_frost_commitments(frost_commitments).unwrap();
1899 let unmarshalized_commitments =
1900 test_unmarshal_frost_commitments(&marshalized_commitments).unwrap();
1901
1902 assert_eq!(frost_nonces, unmarshalized_nonces);
1904 assert_eq!(frost_commitments, &unmarshalized_commitments);
1905 }
1906}
1907
1908fn get_key_type_index(key_type: SparkKeyType) -> Result<ChildNumber, SparkSdkError> {
1912 let index = match key_type {
1913 SparkKeyType::Identity => 0,
1914 SparkKeyType::BaseSigning => 1,
1915 SparkKeyType::TemporarySigning => 2,
1916 };
1917
1918 let idx = get_child_number(index, true)?;
1920
1921 Ok(idx)
1922}
1923
1924fn get_leaf_index(leaf_id: &str) -> Result<ChildNumber, SparkSdkError> {
1935 let mut hasher = Sha256::new();
1937 hasher.update(leaf_id.as_bytes());
1938 let hash = hasher.finalize();
1939
1940 let chunk = &hash[0..4];
1941 let amount = u32::from_be_bytes([chunk[0], chunk[1], chunk[2], chunk[3]]) % 0x80000000;
1942
1943 get_child_number(amount, true)
1945}
1946
1947#[cfg(test)]
1948mod leaf_index_test {
1949 use super::*;
1950
1951 #[test]
1952 fn test_get_leaf_index() {
1953 let leaf_id_1 = "019534f0-f4e2-7845-87fe-c6ea2fa69f80";
1954 let leaf_id_2 = "019534f0-f4e2-7868-b3fa-d06dc10b79e7";
1955 let leaf_id_3 = "dbb5c090-dca4-47ec-9f20-41edd4594dcf";
1956
1957 let child_number_1 = get_leaf_index(leaf_id_1).unwrap();
1958 let child_number_2 = get_leaf_index(leaf_id_2).unwrap();
1959 let child_number_3 = get_leaf_index(leaf_id_3).unwrap();
1960
1961 assert_eq!(
1962 child_number_1,
1963 ChildNumber::from_hardened_idx(1137822116).unwrap()
1964 );
1965 assert_eq!(
1966 child_number_2,
1967 ChildNumber::from_hardened_idx(1199130649).unwrap()
1968 );
1969 assert_eq!(
1970 child_number_3,
1971 ChildNumber::from_hardened_idx(1743780874).unwrap()
1972 );
1973 }
1974}
1975
1976fn prepare_path(
1987 purpose_index: ChildNumber,
1988 account_index: ChildNumber,
1989 key_type_index: ChildNumber,
1990 leaf_index: Option<ChildNumber>,
1992) -> Vec<ChildNumber> {
1993 let mut path = vec![purpose_index, account_index, key_type_index];
1994
1995 if let Some(leaf_index) = leaf_index {
1996 path.push(leaf_index);
1997 }
1998
1999 path
2000}
2001
2002fn get_child_number(index: u32, hardened: bool) -> Result<ChildNumber, SparkSdkError> {
2016 if index > 0x7FFFFFFF {
2017 return Err(SparkSdkError::from(CryptoError::InvalidKeyType {
2018 key_type: format!("Index exceeds range: {}", index).to_string(),
2019 }));
2020 }
2021
2022 if hardened {
2023 ChildNumber::from_hardened_idx(index).map_err(|_| {
2024 SparkSdkError::from(CryptoError::InvalidKeyType {
2025 key_type: format!("Index exceeds hardened range: {}", index).to_string(),
2026 })
2027 })
2028 } else {
2029 ChildNumber::from_normal_idx(index).map_err(|_| {
2030 SparkSdkError::from(CryptoError::InvalidKeyType {
2031 key_type: format!("Index exceeds normal range: {}", index).to_string(),
2032 })
2033 })
2034 }
2035}