1use serde_json::Value;
29use std::env;
30use std::path::Path;
31use std::time::Duration;
32use thiserror::Error;
33
34pub mod environment_pools;
35
36pub use environment_pools::EnvironmentPoolsClient;
37
38pub use synth_ai_core as core;
40pub use synth_ai_core_types as types;
41
42pub use synth_ai_core::{
44 api::GraphEvolveClient,
45 api::PromptLearningResult,
46 api::{EvalJobRequest, EvalJobStatus, GepaJobRequest, MiproJobRequest, PolicyJobStatus},
47 api::{
48 GraphCompletionRequest, GraphCompletionResponse, RlmOptions, VerifierOptions,
49 VerifierResponse,
50 },
51 localapi::TaskAppClient,
53 orchestration::{CandidateInfo, GEPAProgress, ProgressTracker},
54 orchestration::{PromptLearningJob, PromptResults, RankedPrompt},
56 tunnels::errors::TunnelError,
57 tunnels::open_tunnel,
58 tunnels::types::{TunnelBackend, TunnelHandle},
60 ApplicationErrorType,
61 ApplicationStatus,
62 Artifact,
63 BufferedHandler,
64 CallbackHandler,
65 ContextOverride,
66 ContextOverrideStatus,
67 CoreError,
69 Criterion,
70 CriterionScoreData,
71 EventObjectiveAssignment,
72 GraphEvolveJob,
74 JobStreamer,
75 JobType,
77 JsonHandler,
78 Judgement,
79 LLMCallRecord,
80 LLMContentPart,
81 LLMMessage,
82 LLMUsage,
83 LocalApiDeployResponse,
84 LocalApiDeploySpec,
85 LocalApiDeployStatus,
86 LocalApiDeploymentInfo,
87 LocalApiLimits,
88 MarkovBlanketMessage,
89 MessageContent,
90 ObjectiveSpec,
91 OutcomeObjectiveAssignment,
92 RewardObservation,
93 Rubric,
94 RubricAssignment,
95 SessionTimeStep,
96 SessionTrace,
97 SessionTracer,
99 StreamConfig,
100 StreamEndpoints,
101 StreamHandler,
102 StreamMessage,
103 StreamType,
105 SynthClient as CoreClient,
107 TimeRecord,
108 ToolCallResult,
109 ToolCallSpec,
110 TraceUploadClient,
112 TracingEvent,
113 UploadUrlResponse,
114};
115
116#[cfg(feature = "libsql")]
117pub use synth_ai_core::tracing::LibsqlTraceStorage;
118
119pub use synth_ai_core::data::{
120 CalibrationExample, EventRewardRecord, GoldExample, InstanceObjectiveAssignment,
121 OutcomeRewardRecord, RewardAggregates, SynthModelName,
122};
123pub use synth_ai_core::tracing::{LLMChunk, LLMRequestParams};
124
125pub const VERSION: &str = env!("CARGO_PKG_VERSION");
127
128pub const DEFAULT_BASE_URL: &str = "https://api.usesynth.ai";
130
131pub const API_KEY_ENV: &str = "SYNTH_API_KEY";
133
134#[derive(Debug, Error)]
140pub enum Error {
141 #[error("API key not found. Set {API_KEY_ENV} or provide explicitly.")]
143 MissingApiKey,
144
145 #[error("configuration error: {0}")]
147 Config(String),
148
149 #[error(transparent)]
151 Core(#[from] synth_ai_core::CoreError),
152
153 #[error(transparent)]
155 Tunnel(#[from] TunnelError),
156
157 #[error("job submission failed: {0}")]
159 Submission(String),
160
161 #[error("job failed: {0}")]
163 JobFailed(String),
164
165 #[error("timeout after {0:?}")]
167 Timeout(Duration),
168}
169
170pub type Result<T> = std::result::Result<T, Error>;
172
173pub struct Synth {
192 api_key: String,
193 base_url: String,
194 client: synth_ai_core::SynthClient,
195}
196
197impl Synth {
198 pub fn new(api_key: impl Into<String>, base_url: Option<&str>) -> Result<Self> {
205 let api_key = api_key.into();
206 let base_url = base_url.unwrap_or(DEFAULT_BASE_URL).to_string();
207
208 let client =
209 synth_ai_core::SynthClient::new(&api_key, Some(&base_url)).map_err(Error::Core)?;
210
211 Ok(Self {
212 api_key,
213 base_url,
214 client,
215 })
216 }
217
218 pub fn from_env() -> Result<Self> {
220 let api_key = env::var(API_KEY_ENV).map_err(|_| Error::MissingApiKey)?;
221 let base_url = env::var("SYNTH_BASE_URL").ok();
222 Self::new(api_key, base_url.as_deref())
223 }
224
225 pub fn api_key_masked(&self) -> String {
227 synth_ai_core::auth::mask_str(&self.api_key)
228 }
229
230 pub fn base_url(&self) -> &str {
232 &self.base_url
233 }
234
235 pub fn core(&self) -> &synth_ai_core::SynthClient {
237 &self.client
238 }
239
240 pub fn environment_pools(&self) -> Result<EnvironmentPoolsClient> {
242 EnvironmentPoolsClient::new(self.api_key.clone(), Some(&self.base_url))
243 }
244
245 pub fn optimize(&self) -> OptimizeBuilder {
253 OptimizeBuilder::new(self.api_key.clone(), self.base_url.clone())
254 }
255
256 pub fn eval(&self) -> EvalBuilder {
260 EvalBuilder::new(self.api_key.clone(), self.base_url.clone())
261 }
262
263 pub async fn tunnel(&self, port: u16, backend: TunnelBackend) -> Result<TunnelHandle> {
270 synth_ai_core::tunnels::open_tunnel(
271 backend,
272 port,
273 Some(self.api_key.clone()),
274 Some(self.base_url.clone()),
275 None,
276 false,
277 true,
278 false,
279 )
280 .await
281 .map_err(Error::Tunnel)
282 }
283
284 pub async fn submit_gepa(&self, request: GepaJobRequest) -> Result<String> {
290 self.client
291 .jobs()
292 .submit_gepa(request)
293 .await
294 .map_err(Error::Core)
295 }
296
297 pub async fn submit_mipro(&self, request: MiproJobRequest) -> Result<String> {
299 self.client
300 .jobs()
301 .submit_mipro(request)
302 .await
303 .map_err(Error::Core)
304 }
305
306 pub async fn get_job_status(&self, job_id: &str) -> Result<PromptLearningResult> {
308 self.client
309 .jobs()
310 .get_status(job_id)
311 .await
312 .map_err(Error::Core)
313 }
314
315 pub async fn poll_job(
317 &self,
318 job_id: &str,
319 timeout_secs: f64,
320 interval_secs: f64,
321 ) -> Result<PromptLearningResult> {
322 self.client
323 .jobs()
324 .poll_until_complete(job_id, timeout_secs, interval_secs)
325 .await
326 .map_err(Error::Core)
327 }
328
329 pub async fn cancel_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
331 self.client
332 .jobs()
333 .cancel(job_id, reason)
334 .await
335 .map_err(Error::Core)
336 }
337
338 pub async fn pause_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
340 self.client
341 .jobs()
342 .pause(job_id, reason)
343 .await
344 .map_err(Error::Core)
345 }
346
347 pub async fn resume_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
349 self.client
350 .jobs()
351 .resume(job_id, reason)
352 .await
353 .map_err(Error::Core)
354 }
355
356 pub async fn complete(
358 &self,
359 request: GraphCompletionRequest,
360 ) -> Result<GraphCompletionResponse> {
361 self.client
362 .graphs()
363 .complete(request)
364 .await
365 .map_err(Error::Core)
366 }
367
368 pub async fn list_graphs(&self, kind: Option<&str>, limit: Option<i32>) -> Result<Value> {
370 self.client
371 .graphs()
372 .list_graphs(kind, limit)
373 .await
374 .map_err(Error::Core)
375 }
376
377 pub async fn verify(
379 &self,
380 trace: serde_json::Value,
381 rubric: serde_json::Value,
382 options: Option<VerifierOptions>,
383 ) -> Result<VerifierResponse> {
384 self.client
385 .graphs()
386 .verify(trace, rubric, options)
387 .await
388 .map_err(Error::Core)
389 }
390
391 pub async fn rlm_inference(
393 &self,
394 query: &str,
395 context: Value,
396 options: Option<RlmOptions>,
397 ) -> Result<Value> {
398 self.client
399 .graphs()
400 .rlm_inference(query, context, options)
401 .await
402 .map_err(Error::Core)
403 }
404
405 pub fn graph_evolve(&self) -> GraphEvolveClient<'_> {
407 self.client.graph_evolve()
408 }
409
410 pub fn graph_evolve_job_from_payload(&self, payload: Value) -> Result<GraphEvolveJob> {
412 GraphEvolveJob::from_payload(payload, Some(&self.api_key), Some(&self.base_url))
413 .map_err(Error::Core)
414 }
415
416 pub fn graph_evolve_job_from_id(&self, job_id: &str) -> Result<GraphEvolveJob> {
418 GraphEvolveJob::from_job_id(job_id, Some(&self.api_key), Some(&self.base_url))
419 .map_err(Error::Core)
420 }
421
422 pub async fn verify_rubric(&self, trace: Value, rubric: Value) -> Result<VerifierResponse> {
424 self.verify(trace, rubric, None).await
425 }
426
427 pub fn task_app_client(&self, base_url: &str, api_key: Option<&str>) -> TaskAppClient {
429 let key = api_key.unwrap_or(self.api_key.as_str());
430 TaskAppClient::new(base_url, Some(key))
431 }
432
433 pub async fn deploy_localapi_from_dir(
435 &self,
436 spec: LocalApiDeploySpec,
437 context_dir: impl AsRef<Path>,
438 wait_for_ready: bool,
439 build_timeout_s: f64,
440 ) -> Result<LocalApiDeployResponse> {
441 self.client
442 .localapi()
443 .deploy_from_dir(spec, context_dir, wait_for_ready, build_timeout_s)
444 .await
445 .map_err(Error::Core)
446 }
447
448 pub async fn list_localapi_deployments(&self) -> Result<Vec<LocalApiDeploymentInfo>> {
450 self.client.localapi().list().await.map_err(Error::Core)
451 }
452
453 pub async fn get_localapi_deployment(
455 &self,
456 deployment_id: &str,
457 ) -> Result<LocalApiDeploymentInfo> {
458 self.client
459 .localapi()
460 .get(deployment_id)
461 .await
462 .map_err(Error::Core)
463 }
464
465 pub async fn get_localapi_deployment_status(
467 &self,
468 deployment_id: &str,
469 ) -> Result<LocalApiDeployStatus> {
470 self.client
471 .localapi()
472 .status(deployment_id)
473 .await
474 .map_err(Error::Core)
475 }
476
477 pub async fn eval_results(&self, job_id: &str) -> Result<Value> {
479 self.client
480 .eval()
481 .get_results(job_id)
482 .await
483 .map_err(Error::Core)
484 }
485
486 pub async fn download_eval_traces(&self, job_id: &str) -> Result<Vec<u8>> {
488 self.client
489 .eval()
490 .download_traces(job_id)
491 .await
492 .map_err(Error::Core)
493 }
494
495 pub fn trace_uploader(&self) -> Result<TraceUploadClient> {
497 TraceUploadClient::new(&self.base_url, &self.api_key, 120).map_err(Error::Core)
498 }
499
500 pub async fn upload_trace(
502 &self,
503 trace: Value,
504 expires_in_seconds: Option<i64>,
505 ) -> Result<String> {
506 let uploader = self.trace_uploader()?;
507 uploader
508 .upload_trace(&trace, None, expires_in_seconds)
509 .await
510 .map_err(Error::Core)
511 }
512
513 pub async fn stream_job_with_callback<F>(
515 &self,
516 job_id: &str,
517 endpoints: StreamEndpoints,
518 callback: F,
519 ) -> Result<Value>
520 where
521 F: Fn(&StreamMessage) + Send + Sync + 'static,
522 {
523 let mut streamer =
524 JobStreamer::new(&self.base_url, &self.api_key, job_id).with_endpoints(endpoints);
525 streamer.add_handler(CallbackHandler::new(callback));
526 streamer.stream_until_terminal().await.map_err(Error::Core)
527 }
528}
529
530impl std::fmt::Debug for Synth {
531 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
532 f.debug_struct("Synth")
533 .field("api_key", &self.api_key_masked())
534 .field("base_url", &self.base_url)
535 .finish()
536 }
537}
538
539pub struct OptimizeBuilder {
545 api_key: String,
546 base_url: String,
547 task_app_url: Option<String>,
548 model: Option<String>,
549 num_candidates: Option<u32>,
550 timeout: Duration,
551 stream: bool,
552}
553
554impl OptimizeBuilder {
555 fn new(api_key: String, base_url: String) -> Self {
556 Self {
557 api_key,
558 base_url,
559 task_app_url: None,
560 model: None,
561 num_candidates: None,
562 timeout: Duration::from_secs(3600),
563 stream: true,
564 }
565 }
566
567 pub fn task_app(mut self, url: impl Into<String>) -> Self {
569 self.task_app_url = Some(url.into());
570 self
571 }
572
573 pub fn model(mut self, model: impl Into<String>) -> Self {
575 self.model = Some(model.into());
576 self
577 }
578
579 pub fn num_candidates(mut self, n: u32) -> Self {
581 self.num_candidates = Some(n);
582 self
583 }
584
585 pub fn timeout(mut self, timeout: Duration) -> Self {
587 self.timeout = timeout;
588 self
589 }
590
591 pub fn stream(mut self, enabled: bool) -> Self {
593 self.stream = enabled;
594 self
595 }
596
597 pub async fn run(self) -> Result<OptimizeResult> {
599 let task_app_url = self
600 .task_app_url
601 .ok_or_else(|| Error::Config("task_app URL is required".into()))?;
602
603 let mut config = serde_json::json!({
605 "task_app_url": task_app_url,
606 });
607
608 if let Some(model) = &self.model {
609 config["model"] = serde_json::json!(model);
610 }
611 if let Some(n) = self.num_candidates {
612 config["num_candidates"] = serde_json::json!(n);
613 }
614
615 let mut job =
617 PromptLearningJob::from_dict(config, Some(&self.api_key), Some(&self.base_url), None)
618 .map_err(Error::Core)?;
619
620 let job_id = job.submit().await.map_err(Error::Core)?;
621
622 let status = if self.stream {
624 job.stream_until_complete::<fn(&synth_ai_core::orchestration::ParsedEvent)>(
625 self.timeout.as_secs_f64(),
626 None,
627 )
628 .await
629 .map_err(Error::Core)?
630 } else {
631 job.poll_until_complete(self.timeout.as_secs_f64(), 15.0)
632 .await
633 .map_err(Error::Core)?
634 };
635
636 let results = job.get_results().await.map_err(Error::Core)?;
638
639 Ok(OptimizeResult {
640 job_id,
641 status,
642 results,
643 })
644 }
645}
646
647#[derive(Debug, Clone)]
649pub struct OptimizeResult {
650 pub job_id: String,
652 pub status: synth_ai_core::orchestration::PromptLearningResult,
654 pub results: PromptResults,
656}
657
658impl OptimizeResult {
659 pub fn best_prompt(&self) -> Option<&str> {
661 self.results.best_prompt.as_deref()
662 }
663
664 pub fn best_reward(&self) -> Option<f64> {
666 self.results.best_reward
667 }
668
669 pub fn top_prompts(&self) -> &[RankedPrompt] {
671 &self.results.top_prompts
672 }
673
674 pub fn is_success(&self) -> bool {
676 self.status.status.is_success()
677 }
678}
679
680pub struct EvalBuilder {
686 api_key: String,
687 base_url: String,
688 task_app_url: Option<String>,
689 seeds: Vec<i64>,
690 timeout: Duration,
691}
692
693impl EvalBuilder {
694 fn new(api_key: String, base_url: String) -> Self {
695 Self {
696 api_key,
697 base_url,
698 task_app_url: None,
699 seeds: vec![],
700 timeout: Duration::from_secs(1800),
701 }
702 }
703
704 pub fn task_app(mut self, url: impl Into<String>) -> Self {
706 self.task_app_url = Some(url.into());
707 self
708 }
709
710 pub fn seeds(mut self, seeds: Vec<i64>) -> Self {
712 self.seeds = seeds;
713 self
714 }
715
716 pub fn timeout(mut self, timeout: Duration) -> Self {
718 self.timeout = timeout;
719 self
720 }
721
722 pub async fn run(self) -> Result<synth_ai_core::api::EvalResult> {
724 let task_app_url = self
725 .task_app_url
726 .ok_or_else(|| Error::Config("task_app URL is required".into()))?;
727
728 let client = synth_ai_core::SynthClient::new(&self.api_key, Some(&self.base_url))
729 .map_err(Error::Core)?;
730
731 let request = EvalJobRequest {
732 app_id: None,
733 task_app_url,
734 task_app_worker_token: None,
735 task_app_api_key: None,
736 env_name: "default".to_string(),
737 env_config: None,
738 verifier_config: None,
739 seeds: self.seeds,
740 policy: synth_ai_core::api::PolicyConfig::default(),
741 max_concurrent: None,
742 timeout: None,
743 };
744
745 let job_id = client.eval().submit(request).await.map_err(Error::Core)?;
746
747 let status = client
748 .eval()
749 .poll_until_complete(&job_id, self.timeout.as_secs_f64(), 10.0)
750 .await
751 .map_err(Error::Core)?;
752
753 Ok(status)
754 }
755}
756
757pub async fn optimize(task_app_url: &str) -> Result<OptimizeResult> {
765 Synth::from_env()?
766 .optimize()
767 .task_app(task_app_url)
768 .run()
769 .await
770}
771
772pub async fn eval(task_app_url: &str, seeds: Vec<i64>) -> Result<synth_ai_core::api::EvalResult> {
774 Synth::from_env()?
775 .eval()
776 .task_app(task_app_url)
777 .seeds(seeds)
778 .run()
779 .await
780}
781
782#[cfg(test)]
783mod tests {
784 use super::*;
785
786 #[test]
787 fn test_synth_debug() {
788 let err = Synth::from_env();
790 assert!(err.is_err() || err.is_ok()); }
792
793 #[test]
794 fn test_version() {
795 assert!(!VERSION.is_empty());
796 }
797}