1use serde_json::Value;
29use std::env;
30use std::path::Path;
31use std::time::Duration;
32use thiserror::Error;
33
34pub mod environment_pools;
35
36pub use environment_pools::EnvironmentPoolsClient;
37
38pub use synth_ai_core as core;
40pub use synth_ai_core_types as types;
41
42pub use synth_ai_core::{
44 api::GraphEvolveClient,
45 api::PromptLearningResult,
46 api::{EvalJobRequest, EvalJobStatus, GepaJobRequest, MiproJobRequest, PolicyJobStatus},
47 api::{
48 GraphCompletionRequest, GraphCompletionResponse, RlmOptions, VerifierOptions,
49 VerifierResponse,
50 },
51 container::ContainerClient,
53 orchestration::{CandidateInfo, GEPAProgress, ProgressTracker},
54 orchestration::{PromptLearningJob, PromptResults, RankedPrompt},
56 tunnels::errors::TunnelError,
57 tunnels::open_tunnel,
58 tunnels::types::{TunnelBackend, TunnelHandle},
60 ApplicationErrorType,
61 ApplicationStatus,
62 Artifact,
63 BufferedHandler,
64 CallbackHandler,
65 ContextOverride,
66 ContextOverrideStatus,
67 Lever,
68 LeverActor,
69 LeverConstraints,
70 LeverFormat,
71 LeverKind,
72 LeverMutability,
73 LeverMutation,
74 LeverProvenance,
75 LeverSnapshot,
76 MiproLeverSummary,
77 ScopeKey,
78 ScopeKind,
79 Sensor,
80 SensorFrame,
81 SensorFrameSummary,
82 SensorKind,
83 CoreError,
85 Criterion,
86 CriterionScoreData,
87 EventObjectiveAssignment,
88 GraphEvolveJob,
90 JobStreamer,
91 JobType,
93 JsonHandler,
94 Judgement,
95 LLMCallRecord,
96 LLMContentPart,
97 LLMMessage,
98 LLMUsage,
99 ContainerDeployResponse,
100 ContainerDeploySpec,
101 ContainerDeployStatus,
102 ContainerDeploymentInfo,
103 ContainerLimits,
104 MarkovBlanketMessage,
105 MessageContent,
106 ObjectiveSpec,
107 OutcomeObjectiveAssignment,
108 RewardObservation,
109 Rubric,
110 RubricAssignment,
111 SessionTimeStep,
112 SessionTrace,
113 SessionTracer,
115 StreamConfig,
116 StreamEndpoints,
117 StreamHandler,
118 StreamMessage,
119 StreamType,
121 SynthClient as CoreClient,
123 TimeRecord,
124 ToolCallResult,
125 ToolCallSpec,
126 TraceUploadClient,
128 TracingEvent,
129 UploadUrlResponse,
130};
131
132#[cfg(feature = "libsql")]
133pub use synth_ai_core::tracing::LibsqlTraceStorage;
134
135pub use synth_ai_core::data::{
136 CalibrationExample, EventRewardRecord, GoldExample, InstanceObjectiveAssignment,
137 OutcomeRewardRecord, RewardAggregates, SynthModelName,
138};
139pub use synth_ai_core::tracing::{LLMChunk, LLMRequestParams};
140
141pub const VERSION: &str = env!("CARGO_PKG_VERSION");
143
144pub const DEFAULT_BASE_URL: &str = synth_ai_core::urls::DEFAULT_BACKEND_URL;
146
147pub const API_KEY_ENV: &str = "SYNTH_API_KEY";
149
150#[derive(Debug, Error)]
156pub enum Error {
157 #[error("API key not found. Set {API_KEY_ENV} or provide explicitly.")]
159 MissingApiKey,
160
161 #[error("configuration error: {0}")]
163 Config(String),
164
165 #[error(transparent)]
167 Core(#[from] synth_ai_core::CoreError),
168
169 #[error(transparent)]
171 Tunnel(#[from] TunnelError),
172
173 #[error("job submission failed: {0}")]
175 Submission(String),
176
177 #[error("job failed: {0}")]
179 JobFailed(String),
180
181 #[error("timeout after {0:?}")]
183 Timeout(Duration),
184}
185
186pub type Result<T> = std::result::Result<T, Error>;
188
189pub struct Synth {
208 api_key: String,
209 base_url: String,
210 client: synth_ai_core::SynthClient,
211}
212
213impl Synth {
214 pub fn new(api_key: impl Into<String>, base_url: Option<&str>) -> Result<Self> {
221 let api_key = api_key.into();
222 let base_url = base_url.unwrap_or(DEFAULT_BASE_URL).to_string();
223
224 let client =
225 synth_ai_core::SynthClient::new(&api_key, Some(&base_url)).map_err(Error::Core)?;
226
227 Ok(Self {
228 api_key,
229 base_url,
230 client,
231 })
232 }
233
234 pub fn from_env() -> Result<Self> {
236 let api_key = env::var(API_KEY_ENV).map_err(|_| Error::MissingApiKey)?;
237 let base_url = env::var("SYNTH_BASE_URL")
238 .ok()
239 .or_else(|| env::var("SYNTH_BACKEND_URL").ok())
240 .or_else(|| Some(DEFAULT_BASE_URL.to_string()));
241 Self::new(api_key, base_url.as_deref())
242 }
243
244 pub fn api_key_masked(&self) -> String {
246 synth_ai_core::auth::mask_str(&self.api_key)
247 }
248
249 pub fn base_url(&self) -> &str {
251 &self.base_url
252 }
253
254 pub fn core(&self) -> &synth_ai_core::SynthClient {
256 &self.client
257 }
258
259 pub fn environment_pools(&self) -> Result<EnvironmentPoolsClient> {
261 EnvironmentPoolsClient::new(self.api_key.clone(), Some(&self.base_url))
262 }
263
264 pub fn optimize(&self) -> OptimizeBuilder {
272 OptimizeBuilder::new(self.api_key.clone(), self.base_url.clone())
273 }
274
275 pub fn eval(&self) -> EvalBuilder {
279 EvalBuilder::new(self.api_key.clone(), self.base_url.clone())
280 }
281
282 pub async fn tunnel(&self, port: u16, backend: TunnelBackend) -> Result<TunnelHandle> {
289 synth_ai_core::tunnels::open_tunnel(
290 backend,
291 port,
292 Some(self.api_key.clone()),
293 Some(self.base_url.clone()),
294 None,
295 false,
296 true,
297 false,
298 )
299 .await
300 .map_err(Error::Tunnel)
301 }
302
303 pub async fn submit_gepa(&self, request: GepaJobRequest) -> Result<String> {
309 self.client
310 .jobs()
311 .submit_gepa(request)
312 .await
313 .map_err(Error::Core)
314 }
315
316 pub async fn submit_mipro(&self, request: MiproJobRequest) -> Result<String> {
318 self.client
319 .jobs()
320 .submit_mipro(request)
321 .await
322 .map_err(Error::Core)
323 }
324
325 pub async fn get_job_status(&self, job_id: &str) -> Result<PromptLearningResult> {
327 self.client
328 .jobs()
329 .get_status(job_id)
330 .await
331 .map_err(Error::Core)
332 }
333
334 pub async fn poll_job(
336 &self,
337 job_id: &str,
338 timeout_secs: f64,
339 interval_secs: f64,
340 ) -> Result<PromptLearningResult> {
341 self.client
342 .jobs()
343 .poll_until_complete(job_id, timeout_secs, interval_secs)
344 .await
345 .map_err(Error::Core)
346 }
347
348 pub async fn cancel_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
350 self.client
351 .jobs()
352 .cancel(job_id, reason)
353 .await
354 .map_err(Error::Core)
355 }
356
357 pub async fn pause_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
359 self.client
360 .jobs()
361 .pause(job_id, reason)
362 .await
363 .map_err(Error::Core)
364 }
365
366 pub async fn resume_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
368 self.client
369 .jobs()
370 .resume(job_id, reason)
371 .await
372 .map_err(Error::Core)
373 }
374
375 pub async fn complete(
377 &self,
378 request: GraphCompletionRequest,
379 ) -> Result<GraphCompletionResponse> {
380 self.client
381 .graphs()
382 .complete(request)
383 .await
384 .map_err(Error::Core)
385 }
386
387 pub async fn list_graphs(&self, kind: Option<&str>, limit: Option<i32>) -> Result<Value> {
389 self.client
390 .graphs()
391 .list_graphs(kind, limit)
392 .await
393 .map_err(Error::Core)
394 }
395
396 pub async fn verify(
398 &self,
399 trace: serde_json::Value,
400 rubric: serde_json::Value,
401 options: Option<VerifierOptions>,
402 ) -> Result<VerifierResponse> {
403 self.client
404 .graphs()
405 .verify(trace, rubric, options)
406 .await
407 .map_err(Error::Core)
408 }
409
410 pub async fn rlm_inference(
412 &self,
413 query: &str,
414 context: Value,
415 options: Option<RlmOptions>,
416 ) -> Result<Value> {
417 self.client
418 .graphs()
419 .rlm_inference(query, context, options)
420 .await
421 .map_err(Error::Core)
422 }
423
424 pub fn graph_evolve(&self) -> GraphEvolveClient<'_> {
426 self.client.graph_evolve()
427 }
428
429 pub fn graph_evolve_job_from_payload(&self, payload: Value) -> Result<GraphEvolveJob> {
431 GraphEvolveJob::from_payload(payload, Some(&self.api_key), Some(&self.base_url))
432 .map_err(Error::Core)
433 }
434
435 pub fn graph_evolve_job_from_id(&self, job_id: &str) -> Result<GraphEvolveJob> {
437 GraphEvolveJob::from_job_id(job_id, Some(&self.api_key), Some(&self.base_url))
438 .map_err(Error::Core)
439 }
440
441 pub async fn verify_rubric(&self, trace: Value, rubric: Value) -> Result<VerifierResponse> {
443 self.verify(trace, rubric, None).await
444 }
445
446 pub fn container_client(&self, base_url: &str, api_key: Option<&str>) -> ContainerClient {
448 let key = api_key.unwrap_or(self.api_key.as_str());
449 ContainerClient::new(base_url, Some(key))
450 }
451
452 pub async fn deploy_container_from_dir(
454 &self,
455 spec: ContainerDeploySpec,
456 context_dir: impl AsRef<Path>,
457 wait_for_ready: bool,
458 build_timeout_s: f64,
459 ) -> Result<ContainerDeployResponse> {
460 self.client
461 .container()
462 .deploy_from_dir(spec, context_dir, wait_for_ready, build_timeout_s)
463 .await
464 .map_err(Error::Core)
465 }
466
467 pub async fn list_container_deployments(&self) -> Result<Vec<ContainerDeploymentInfo>> {
469 self.client.container().list().await.map_err(Error::Core)
470 }
471
472 pub async fn get_container_deployment(
474 &self,
475 deployment_id: &str,
476 ) -> Result<ContainerDeploymentInfo> {
477 self.client
478 .container()
479 .get(deployment_id)
480 .await
481 .map_err(Error::Core)
482 }
483
484 pub async fn get_container_deployment_status(
486 &self,
487 deployment_id: &str,
488 ) -> Result<ContainerDeployStatus> {
489 self.client
490 .container()
491 .status(deployment_id)
492 .await
493 .map_err(Error::Core)
494 }
495
496 pub async fn eval_results(&self, job_id: &str) -> Result<Value> {
498 self.client
499 .eval()
500 .get_results(job_id)
501 .await
502 .map_err(Error::Core)
503 }
504
505 pub async fn download_eval_traces(&self, job_id: &str) -> Result<Vec<u8>> {
507 self.client
508 .eval()
509 .download_traces(job_id)
510 .await
511 .map_err(Error::Core)
512 }
513
514 pub fn trace_uploader(&self) -> Result<TraceUploadClient> {
516 TraceUploadClient::new(&self.base_url, &self.api_key, 120).map_err(Error::Core)
517 }
518
519 pub async fn upload_trace(
521 &self,
522 trace: Value,
523 expires_in_seconds: Option<i64>,
524 ) -> Result<String> {
525 let uploader = self.trace_uploader()?;
526 uploader
527 .upload_trace(&trace, None, expires_in_seconds)
528 .await
529 .map_err(Error::Core)
530 }
531
532 pub async fn stream_job_with_callback<F>(
534 &self,
535 job_id: &str,
536 endpoints: StreamEndpoints,
537 callback: F,
538 ) -> Result<Value>
539 where
540 F: Fn(&StreamMessage) + Send + Sync + 'static,
541 {
542 let mut streamer =
543 JobStreamer::new(&self.base_url, &self.api_key, job_id).with_endpoints(endpoints);
544 streamer.add_handler(CallbackHandler::new(callback));
545 streamer.stream_until_terminal().await.map_err(Error::Core)
546 }
547}
548
549impl std::fmt::Debug for Synth {
550 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
551 f.debug_struct("Synth")
552 .field("api_key", &self.api_key_masked())
553 .field("base_url", &self.base_url)
554 .finish()
555 }
556}
557
558pub struct OptimizeBuilder {
564 api_key: String,
565 base_url: String,
566 container_url: Option<String>,
567 model: Option<String>,
568 num_candidates: Option<u32>,
569 timeout: Duration,
570 stream: bool,
571}
572
573impl OptimizeBuilder {
574 fn new(api_key: String, base_url: String) -> Self {
575 Self {
576 api_key,
577 base_url,
578 container_url: None,
579 model: None,
580 num_candidates: None,
581 timeout: Duration::from_secs(3600),
582 stream: true,
583 }
584 }
585
586 pub fn container(mut self, url: impl Into<String>) -> Self {
588 self.container_url = Some(url.into());
589 self
590 }
591
592 pub fn model(mut self, model: impl Into<String>) -> Self {
594 self.model = Some(model.into());
595 self
596 }
597
598 pub fn num_candidates(mut self, n: u32) -> Self {
600 self.num_candidates = Some(n);
601 self
602 }
603
604 pub fn timeout(mut self, timeout: Duration) -> Self {
606 self.timeout = timeout;
607 self
608 }
609
610 pub fn stream(mut self, enabled: bool) -> Self {
612 self.stream = enabled;
613 self
614 }
615
616 pub async fn run(self) -> Result<OptimizeResult> {
618 let container_url = self
619 .container_url
620 .ok_or_else(|| Error::Config("container URL is required".into()))?;
621
622 let mut config = serde_json::json!({
624 "container_url": container_url,
625 });
626
627 if let Some(model) = &self.model {
628 config["model"] = serde_json::json!(model);
629 }
630 if let Some(n) = self.num_candidates {
631 config["num_candidates"] = serde_json::json!(n);
632 }
633
634 let mut job =
636 PromptLearningJob::from_dict(config, Some(&self.api_key), Some(&self.base_url), None)
637 .map_err(Error::Core)?;
638
639 let job_id = job.submit().await.map_err(Error::Core)?;
640
641 let status = if self.stream {
643 job.stream_until_complete::<fn(&synth_ai_core::orchestration::ParsedEvent)>(
644 self.timeout.as_secs_f64(),
645 None,
646 )
647 .await
648 .map_err(Error::Core)?
649 } else {
650 job.poll_until_complete(self.timeout.as_secs_f64(), 15.0)
651 .await
652 .map_err(Error::Core)?
653 };
654
655 let results = job.get_results().await.map_err(Error::Core)?;
657
658 Ok(OptimizeResult {
659 job_id,
660 status,
661 results,
662 })
663 }
664}
665
666#[derive(Debug, Clone)]
668pub struct OptimizeResult {
669 pub job_id: String,
671 pub status: synth_ai_core::orchestration::PromptLearningResult,
673 pub results: PromptResults,
675}
676
677impl OptimizeResult {
678 pub fn best_candidate(&self) -> Option<&str> {
680 self.results.best_candidate.as_deref()
681 }
682
683 pub fn best_reward(&self) -> Option<f64> {
685 self.results.best_reward
686 }
687
688 pub fn top_prompts(&self) -> &[RankedPrompt] {
690 &self.results.top_prompts
691 }
692
693 pub fn is_success(&self) -> bool {
695 self.status.status.is_success()
696 }
697}
698
699pub struct EvalBuilder {
705 api_key: String,
706 base_url: String,
707 container_url: Option<String>,
708 seeds: Vec<i64>,
709 timeout: Duration,
710}
711
712impl EvalBuilder {
713 fn new(api_key: String, base_url: String) -> Self {
714 Self {
715 api_key,
716 base_url,
717 container_url: None,
718 seeds: vec![],
719 timeout: Duration::from_secs(1800),
720 }
721 }
722
723 pub fn container(mut self, url: impl Into<String>) -> Self {
725 self.container_url = Some(url.into());
726 self
727 }
728
729 pub fn seeds(mut self, seeds: Vec<i64>) -> Self {
731 self.seeds = seeds;
732 self
733 }
734
735 pub fn timeout(mut self, timeout: Duration) -> Self {
737 self.timeout = timeout;
738 self
739 }
740
741 pub async fn run(self) -> Result<synth_ai_core::api::EvalResult> {
743 let container_url = self
744 .container_url
745 .ok_or_else(|| Error::Config("container URL is required".into()))?;
746
747 let client = synth_ai_core::SynthClient::new(&self.api_key, Some(&self.base_url))
748 .map_err(Error::Core)?;
749
750 let request = EvalJobRequest {
751 app_id: None,
752 container_url,
753 container_worker_token: None,
754 container_api_key: None,
755 env_name: "default".to_string(),
756 env_config: None,
757 verifier_config: None,
758 seeds: self.seeds,
759 policy: synth_ai_core::api::PolicyConfig::default(),
760 max_concurrent: None,
761 timeout: None,
762 };
763
764 let job_id = client.eval().submit(request).await.map_err(Error::Core)?;
765
766 let status = client
767 .eval()
768 .poll_until_complete(&job_id, self.timeout.as_secs_f64(), 10.0)
769 .await
770 .map_err(Error::Core)?;
771
772 Ok(status)
773 }
774}
775
776pub async fn optimize(container_url: &str) -> Result<OptimizeResult> {
784 Synth::from_env()?
785 .optimize()
786 .container(container_url)
787 .run()
788 .await
789}
790
791pub async fn eval(container_url: &str, seeds: Vec<i64>) -> Result<synth_ai_core::api::EvalResult> {
793 Synth::from_env()?
794 .eval()
795 .container(container_url)
796 .seeds(seeds)
797 .run()
798 .await
799}
800
801pub fn gepa_candidate_to_initial_prompt(seed_candidate: &Value) -> Result<Value> {
805 synth_ai_core::config::gepa_candidate_to_initial_prompt(seed_candidate).map_err(Error::Core)
806}
807
808#[cfg(test)]
809mod tests {
810 use super::*;
811
812 #[test]
813 fn test_synth_debug() {
814 let err = Synth::from_env();
816 assert!(err.is_err() || err.is_ok()); }
818
819 #[test]
820 fn test_version() {
821 assert!(!VERSION.is_empty());
822 }
823}