Skip to main content

synth_ai/
lib.rs

1//! # Synth AI SDK
2//!
3//! Ergonomic Rust SDK for Synth AI - serverless post-training APIs.
4//!
5//! ## Quick Start
6//!
7//! ```rust,ignore
8//! use synth_ai::Synth;
9//!
10//! #[tokio::main]
11//! async fn main() -> Result<(), synth_ai::Error> {
12//!     // Create client from SYNTH_API_KEY env var
13//!     let synth = Synth::from_env()?;
14//!
15//!     // Submit a prompt optimization job
16//!     let result = synth
17//!         .optimize()
18//!         .container("https://my-container.com")
19//!         .model("gpt-4o")
20//!         .run()
21//!         .await?;
22//!
23//!     println!("Best candidate: {:?}", result.best_candidate);
24//!     Ok(())
25//! }
26//! ```
27
28use serde_json::Value;
29use std::env;
30use std::path::Path;
31use std::time::Duration;
32use thiserror::Error;
33
34pub mod environment_pools;
35
36pub use environment_pools::EnvironmentPoolsClient;
37
38// Re-export core for advanced usage
39pub use synth_ai_core as core;
40pub use synth_ai_core_types as types;
41
42// Re-export commonly used core types
43pub use synth_ai_core::{
44    api::GraphEvolveClient,
45    api::PromptLearningResult,
46    api::{EvalJobRequest, EvalJobStatus, GepaJobRequest, MiproJobRequest, PolicyJobStatus},
47    api::{
48        GraphCompletionRequest, GraphCompletionResponse, RlmOptions, VerifierOptions,
49        VerifierResponse,
50    },
51    // Local API
52    container::ContainerClient,
53    orchestration::{CandidateInfo, GEPAProgress, ProgressTracker},
54    // Orchestration
55    orchestration::{PromptLearningJob, PromptResults, RankedPrompt},
56    tunnels::errors::TunnelError,
57    tunnels::open_tunnel,
58    // Tunnels
59    tunnels::types::{TunnelBackend, TunnelHandle},
60    ApplicationErrorType,
61    ApplicationStatus,
62    Artifact,
63    BufferedHandler,
64    CallbackHandler,
65    ContextOverride,
66    ContextOverrideStatus,
67    Lever,
68    LeverActor,
69    LeverConstraints,
70    LeverFormat,
71    LeverKind,
72    LeverMutability,
73    LeverMutation,
74    LeverProvenance,
75    LeverSnapshot,
76    MiproLeverSummary,
77    ScopeKey,
78    ScopeKind,
79    Sensor,
80    SensorFrame,
81    SensorFrameSummary,
82    SensorKind,
83    // Errors
84    CoreError,
85    Criterion,
86    CriterionScoreData,
87    EventObjectiveAssignment,
88    // Graph evolve
89    GraphEvolveJob,
90    JobStreamer,
91    // Data types
92    JobType,
93    JsonHandler,
94    Judgement,
95    LLMCallRecord,
96    LLMContentPart,
97    LLMMessage,
98    LLMUsage,
99    ContainerDeployResponse,
100    ContainerDeploySpec,
101    ContainerDeployStatus,
102    ContainerDeploymentInfo,
103    ContainerLimits,
104    MarkovBlanketMessage,
105    MessageContent,
106    ObjectiveSpec,
107    OutcomeObjectiveAssignment,
108    RewardObservation,
109    Rubric,
110    RubricAssignment,
111    SessionTimeStep,
112    SessionTrace,
113    // Tracing
114    SessionTracer,
115    StreamConfig,
116    StreamEndpoints,
117    StreamHandler,
118    StreamMessage,
119    // Streaming
120    StreamType,
121    // API types
122    SynthClient as CoreClient,
123    TimeRecord,
124    ToolCallResult,
125    ToolCallSpec,
126    // Trace upload
127    TraceUploadClient,
128    TracingEvent,
129    UploadUrlResponse,
130};
131
132#[cfg(feature = "libsql")]
133pub use synth_ai_core::tracing::LibsqlTraceStorage;
134
135pub use synth_ai_core::data::{
136    CalibrationExample, EventRewardRecord, GoldExample, InstanceObjectiveAssignment,
137    OutcomeRewardRecord, RewardAggregates, SynthModelName,
138};
139pub use synth_ai_core::tracing::{LLMChunk, LLMRequestParams};
140
141/// SDK version.
142pub const VERSION: &str = env!("CARGO_PKG_VERSION");
143
144/// Default Synth API base URL.
145pub const DEFAULT_BASE_URL: &str = synth_ai_core::urls::DEFAULT_BACKEND_URL;
146
147/// Environment variable for API key.
148pub const API_KEY_ENV: &str = "SYNTH_API_KEY";
149
150// =============================================================================
151// Error Types
152// =============================================================================
153
154/// SDK error type.
155#[derive(Debug, Error)]
156pub enum Error {
157    /// Missing API key.
158    #[error("API key not found. Set {API_KEY_ENV} or provide explicitly.")]
159    MissingApiKey,
160
161    /// Configuration error.
162    #[error("configuration error: {0}")]
163    Config(String),
164
165    /// Core error passthrough.
166    #[error(transparent)]
167    Core(#[from] synth_ai_core::CoreError),
168
169    /// Tunnel error.
170    #[error(transparent)]
171    Tunnel(#[from] TunnelError),
172
173    /// Job submission failed.
174    #[error("job submission failed: {0}")]
175    Submission(String),
176
177    /// Job execution failed.
178    #[error("job failed: {0}")]
179    JobFailed(String),
180
181    /// Timeout waiting for job.
182    #[error("timeout after {0:?}")]
183    Timeout(Duration),
184}
185
186/// Result type alias.
187pub type Result<T> = std::result::Result<T, Error>;
188
189// =============================================================================
190// Main Client
191// =============================================================================
192
193/// Main Synth AI client.
194///
195/// This is the primary entry point for interacting with Synth AI APIs.
196///
197/// # Example
198///
199/// ```rust,ignore
200/// use synth_ai::Synth;
201///
202/// let synth = Synth::from_env()?;
203///
204/// // Or with explicit credentials
205/// let synth = Synth::new("sk_live_...", None)?;
206/// ```
207pub struct Synth {
208    api_key: String,
209    base_url: String,
210    client: synth_ai_core::SynthClient,
211}
212
213impl Synth {
214    /// Create a new Synth client with explicit credentials.
215    ///
216    /// # Arguments
217    ///
218    /// * `api_key` - Your Synth API key
219    /// * `base_url` - Optional custom API base URL
220    pub fn new(api_key: impl Into<String>, base_url: Option<&str>) -> Result<Self> {
221        let api_key = api_key.into();
222        let base_url = base_url.unwrap_or(DEFAULT_BASE_URL).to_string();
223
224        let client =
225            synth_ai_core::SynthClient::new(&api_key, Some(&base_url)).map_err(Error::Core)?;
226
227        Ok(Self {
228            api_key,
229            base_url,
230            client,
231        })
232    }
233
234    /// Create a client from the `SYNTH_API_KEY` environment variable.
235    pub fn from_env() -> Result<Self> {
236        let api_key = env::var(API_KEY_ENV).map_err(|_| Error::MissingApiKey)?;
237        let base_url = env::var("SYNTH_BASE_URL")
238            .ok()
239            .or_else(|| env::var("SYNTH_BACKEND_URL").ok())
240            .or_else(|| Some(DEFAULT_BASE_URL.to_string()));
241        Self::new(api_key, base_url.as_deref())
242    }
243
244    /// Get the API key (masked for display).
245    pub fn api_key_masked(&self) -> String {
246        synth_ai_core::auth::mask_str(&self.api_key)
247    }
248
249    /// Get the base URL.
250    pub fn base_url(&self) -> &str {
251        &self.base_url
252    }
253
254    /// Access the underlying core client.
255    pub fn core(&self) -> &synth_ai_core::SynthClient {
256        &self.client
257    }
258
259    /// Create an Environment Pools client.
260    pub fn environment_pools(&self) -> Result<EnvironmentPoolsClient> {
261        EnvironmentPoolsClient::new(self.api_key.clone(), Some(&self.base_url))
262    }
263
264    // -------------------------------------------------------------------------
265    // High-level API
266    // -------------------------------------------------------------------------
267
268    /// Start a prompt optimization job.
269    ///
270    /// Returns a builder to configure the optimization.
271    pub fn optimize(&self) -> OptimizeBuilder {
272        OptimizeBuilder::new(self.api_key.clone(), self.base_url.clone())
273    }
274
275    /// Start an evaluation job.
276    ///
277    /// Returns a builder to configure the evaluation.
278    pub fn eval(&self) -> EvalBuilder {
279        EvalBuilder::new(self.api_key.clone(), self.base_url.clone())
280    }
281
282    /// Open a tunnel to a local port.
283    ///
284    /// # Arguments
285    ///
286    /// * `port` - Local port to tunnel
287    /// * `backend` - Tunnel backend (cloudflare_managed_lease recommended)
288    pub async fn tunnel(&self, port: u16, backend: TunnelBackend) -> Result<TunnelHandle> {
289        synth_ai_core::tunnels::open_tunnel(
290            backend,
291            port,
292            Some(self.api_key.clone()),
293            Some(self.base_url.clone()),
294            None,
295            false,
296            true,
297            false,
298        )
299        .await
300        .map_err(Error::Tunnel)
301    }
302
303    // -------------------------------------------------------------------------
304    // Direct API access
305    // -------------------------------------------------------------------------
306
307    /// Submit a raw GEPA job.
308    pub async fn submit_gepa(&self, request: GepaJobRequest) -> Result<String> {
309        self.client
310            .jobs()
311            .submit_gepa(request)
312            .await
313            .map_err(Error::Core)
314    }
315
316    /// Submit a raw MIPRO job.
317    pub async fn submit_mipro(&self, request: MiproJobRequest) -> Result<String> {
318        self.client
319            .jobs()
320            .submit_mipro(request)
321            .await
322            .map_err(Error::Core)
323    }
324
325    /// Get job status.
326    pub async fn get_job_status(&self, job_id: &str) -> Result<PromptLearningResult> {
327        self.client
328            .jobs()
329            .get_status(job_id)
330            .await
331            .map_err(Error::Core)
332    }
333
334    /// Poll job until complete.
335    pub async fn poll_job(
336        &self,
337        job_id: &str,
338        timeout_secs: f64,
339        interval_secs: f64,
340    ) -> Result<PromptLearningResult> {
341        self.client
342            .jobs()
343            .poll_until_complete(job_id, timeout_secs, interval_secs)
344            .await
345            .map_err(Error::Core)
346    }
347
348    /// Cancel a job.
349    pub async fn cancel_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
350        self.client
351            .jobs()
352            .cancel(job_id, reason)
353            .await
354            .map_err(Error::Core)
355    }
356
357    /// Pause a job.
358    pub async fn pause_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
359        self.client
360            .jobs()
361            .pause(job_id, reason)
362            .await
363            .map_err(Error::Core)
364    }
365
366    /// Resume a paused job.
367    pub async fn resume_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
368        self.client
369            .jobs()
370            .resume(job_id, reason)
371            .await
372            .map_err(Error::Core)
373    }
374
375    /// Run graph completion.
376    pub async fn complete(
377        &self,
378        request: GraphCompletionRequest,
379    ) -> Result<GraphCompletionResponse> {
380        self.client
381            .graphs()
382            .complete(request)
383            .await
384            .map_err(Error::Core)
385    }
386
387    /// List registered graphs.
388    pub async fn list_graphs(&self, kind: Option<&str>, limit: Option<i32>) -> Result<Value> {
389        self.client
390            .graphs()
391            .list_graphs(kind, limit)
392            .await
393            .map_err(Error::Core)
394    }
395
396    /// Run verifier on a trace.
397    pub async fn verify(
398        &self,
399        trace: serde_json::Value,
400        rubric: serde_json::Value,
401        options: Option<VerifierOptions>,
402    ) -> Result<VerifierResponse> {
403        self.client
404            .graphs()
405            .verify(trace, rubric, options)
406            .await
407            .map_err(Error::Core)
408    }
409
410    /// Run RLM (Retrieval-augmented LM) inference.
411    pub async fn rlm_inference(
412        &self,
413        query: &str,
414        context: Value,
415        options: Option<RlmOptions>,
416    ) -> Result<Value> {
417        self.client
418            .graphs()
419            .rlm_inference(query, context, options)
420            .await
421            .map_err(Error::Core)
422    }
423
424    /// Create a Graph Evolve client for advanced operations.
425    pub fn graph_evolve(&self) -> GraphEvolveClient<'_> {
426        self.client.graph_evolve()
427    }
428
429    /// Create a Graph Evolve job from a payload.
430    pub fn graph_evolve_job_from_payload(&self, payload: Value) -> Result<GraphEvolveJob> {
431        GraphEvolveJob::from_payload(payload, Some(&self.api_key), Some(&self.base_url))
432            .map_err(Error::Core)
433    }
434
435    /// Reconnect to a Graph Evolve job by ID.
436    pub fn graph_evolve_job_from_id(&self, job_id: &str) -> Result<GraphEvolveJob> {
437        GraphEvolveJob::from_job_id(job_id, Some(&self.api_key), Some(&self.base_url))
438            .map_err(Error::Core)
439    }
440
441    /// Verify a trace against a rubric with default options.
442    pub async fn verify_rubric(&self, trace: Value, rubric: Value) -> Result<VerifierResponse> {
443        self.verify(trace, rubric, None).await
444    }
445
446    /// Create a Container container client.
447    pub fn container_client(&self, base_url: &str, api_key: Option<&str>) -> ContainerClient {
448        let key = api_key.unwrap_or(self.api_key.as_str());
449        ContainerClient::new(base_url, Some(key))
450    }
451
452    /// Deploy a managed Container from a context directory.
453    pub async fn deploy_container_from_dir(
454        &self,
455        spec: ContainerDeploySpec,
456        context_dir: impl AsRef<Path>,
457        wait_for_ready: bool,
458        build_timeout_s: f64,
459    ) -> Result<ContainerDeployResponse> {
460        self.client
461            .container()
462            .deploy_from_dir(spec, context_dir, wait_for_ready, build_timeout_s)
463            .await
464            .map_err(Error::Core)
465    }
466
467    /// List managed Container deployments for the current org.
468    pub async fn list_container_deployments(&self) -> Result<Vec<ContainerDeploymentInfo>> {
469        self.client.container().list().await.map_err(Error::Core)
470    }
471
472    /// Fetch a managed Container deployment by ID.
473    pub async fn get_container_deployment(
474        &self,
475        deployment_id: &str,
476    ) -> Result<ContainerDeploymentInfo> {
477        self.client
478            .container()
479            .get(deployment_id)
480            .await
481            .map_err(Error::Core)
482    }
483
484    /// Fetch managed Container deployment status by ID.
485    pub async fn get_container_deployment_status(
486        &self,
487        deployment_id: &str,
488    ) -> Result<ContainerDeployStatus> {
489        self.client
490            .container()
491            .status(deployment_id)
492            .await
493            .map_err(Error::Core)
494    }
495
496    /// Fetch detailed eval results.
497    pub async fn eval_results(&self, job_id: &str) -> Result<Value> {
498        self.client
499            .eval()
500            .get_results(job_id)
501            .await
502            .map_err(Error::Core)
503    }
504
505    /// Download eval traces as ZIP bytes.
506    pub async fn download_eval_traces(&self, job_id: &str) -> Result<Vec<u8>> {
507        self.client
508            .eval()
509            .download_traces(job_id)
510            .await
511            .map_err(Error::Core)
512    }
513
514    /// Create a trace uploader for large traces.
515    pub fn trace_uploader(&self) -> Result<TraceUploadClient> {
516        TraceUploadClient::new(&self.base_url, &self.api_key, 120).map_err(Error::Core)
517    }
518
519    /// Upload a trace and return its trace_ref.
520    pub async fn upload_trace(
521        &self,
522        trace: Value,
523        expires_in_seconds: Option<i64>,
524    ) -> Result<String> {
525        let uploader = self.trace_uploader()?;
526        uploader
527            .upload_trace(&trace, None, expires_in_seconds)
528            .await
529            .map_err(Error::Core)
530    }
531
532    /// Stream job events with a callback and return final status.
533    pub async fn stream_job_with_callback<F>(
534        &self,
535        job_id: &str,
536        endpoints: StreamEndpoints,
537        callback: F,
538    ) -> Result<Value>
539    where
540        F: Fn(&StreamMessage) + Send + Sync + 'static,
541    {
542        let mut streamer =
543            JobStreamer::new(&self.base_url, &self.api_key, job_id).with_endpoints(endpoints);
544        streamer.add_handler(CallbackHandler::new(callback));
545        streamer.stream_until_terminal().await.map_err(Error::Core)
546    }
547}
548
549impl std::fmt::Debug for Synth {
550    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
551        f.debug_struct("Synth")
552            .field("api_key", &self.api_key_masked())
553            .field("base_url", &self.base_url)
554            .finish()
555    }
556}
557
558// =============================================================================
559// Optimize Builder
560// =============================================================================
561
562/// Builder for prompt optimization jobs.
563pub struct OptimizeBuilder {
564    api_key: String,
565    base_url: String,
566    container_url: Option<String>,
567    model: Option<String>,
568    num_candidates: Option<u32>,
569    timeout: Duration,
570    stream: bool,
571}
572
573impl OptimizeBuilder {
574    fn new(api_key: String, base_url: String) -> Self {
575        Self {
576            api_key,
577            base_url,
578            container_url: None,
579            model: None,
580            num_candidates: None,
581            timeout: Duration::from_secs(3600),
582            stream: true,
583        }
584    }
585
586    /// Set the container URL.
587    pub fn container(mut self, url: impl Into<String>) -> Self {
588        self.container_url = Some(url.into());
589        self
590    }
591
592    /// Set the model to optimize for.
593    pub fn model(mut self, model: impl Into<String>) -> Self {
594        self.model = Some(model.into());
595        self
596    }
597
598    /// Set the number of candidates to generate.
599    pub fn num_candidates(mut self, n: u32) -> Self {
600        self.num_candidates = Some(n);
601        self
602    }
603
604    /// Set the timeout for the job.
605    pub fn timeout(mut self, timeout: Duration) -> Self {
606        self.timeout = timeout;
607        self
608    }
609
610    /// Enable or disable streaming (default: true).
611    pub fn stream(mut self, enabled: bool) -> Self {
612        self.stream = enabled;
613        self
614    }
615
616    /// Run the optimization job and wait for completion.
617    pub async fn run(self) -> Result<OptimizeResult> {
618        let container_url = self
619            .container_url
620            .ok_or_else(|| Error::Config("container URL is required".into()))?;
621
622        // Build config
623        let mut config = serde_json::json!({
624            "container_url": container_url,
625        });
626
627        if let Some(model) = &self.model {
628            config["model"] = serde_json::json!(model);
629        }
630        if let Some(n) = self.num_candidates {
631            config["num_candidates"] = serde_json::json!(n);
632        }
633
634        // Create and run job
635        let mut job =
636            PromptLearningJob::from_dict(config, Some(&self.api_key), Some(&self.base_url), None)
637                .map_err(Error::Core)?;
638
639        let job_id = job.submit().await.map_err(Error::Core)?;
640
641        // Stream or poll
642        let status = if self.stream {
643            job.stream_until_complete::<fn(&synth_ai_core::orchestration::ParsedEvent)>(
644                self.timeout.as_secs_f64(),
645                None,
646            )
647            .await
648            .map_err(Error::Core)?
649        } else {
650            job.poll_until_complete(self.timeout.as_secs_f64(), 15.0)
651                .await
652                .map_err(Error::Core)?
653        };
654
655        // Get results
656        let results = job.get_results().await.map_err(Error::Core)?;
657
658        Ok(OptimizeResult {
659            job_id,
660            status,
661            results,
662        })
663    }
664}
665
666/// Result of a prompt optimization job.
667#[derive(Debug, Clone)]
668pub struct OptimizeResult {
669    /// Job ID.
670    pub job_id: String,
671    /// Final job status.
672    pub status: synth_ai_core::orchestration::PromptLearningResult,
673    /// Optimization results.
674    pub results: PromptResults,
675}
676
677impl OptimizeResult {
678    /// Get the best candidate if available.
679    pub fn best_candidate(&self) -> Option<&str> {
680        self.results.best_candidate.as_deref()
681    }
682
683    /// Get the best reward if available.
684    pub fn best_reward(&self) -> Option<f64> {
685        self.results.best_reward
686    }
687
688    /// Get all top prompts.
689    pub fn top_prompts(&self) -> &[RankedPrompt] {
690        &self.results.top_prompts
691    }
692
693    /// Check if the job succeeded.
694    pub fn is_success(&self) -> bool {
695        self.status.status.is_success()
696    }
697}
698
699// =============================================================================
700// Eval Builder
701// =============================================================================
702
703/// Builder for evaluation jobs.
704pub struct EvalBuilder {
705    api_key: String,
706    base_url: String,
707    container_url: Option<String>,
708    seeds: Vec<i64>,
709    timeout: Duration,
710}
711
712impl EvalBuilder {
713    fn new(api_key: String, base_url: String) -> Self {
714        Self {
715            api_key,
716            base_url,
717            container_url: None,
718            seeds: vec![],
719            timeout: Duration::from_secs(1800),
720        }
721    }
722
723    /// Set the container URL.
724    pub fn container(mut self, url: impl Into<String>) -> Self {
725        self.container_url = Some(url.into());
726        self
727    }
728
729    /// Set the seeds to evaluate on.
730    pub fn seeds(mut self, seeds: Vec<i64>) -> Self {
731        self.seeds = seeds;
732        self
733    }
734
735    /// Set the timeout for the job.
736    pub fn timeout(mut self, timeout: Duration) -> Self {
737        self.timeout = timeout;
738        self
739    }
740
741    /// Run the evaluation and wait for completion.
742    pub async fn run(self) -> Result<synth_ai_core::api::EvalResult> {
743        let container_url = self
744            .container_url
745            .ok_or_else(|| Error::Config("container URL is required".into()))?;
746
747        let client = synth_ai_core::SynthClient::new(&self.api_key, Some(&self.base_url))
748            .map_err(Error::Core)?;
749
750        let request = EvalJobRequest {
751            app_id: None,
752            container_url,
753            container_worker_token: None,
754            container_api_key: None,
755            env_name: "default".to_string(),
756            env_config: None,
757            verifier_config: None,
758            seeds: self.seeds,
759            policy: synth_ai_core::api::PolicyConfig::default(),
760            max_concurrent: None,
761            timeout: None,
762        };
763
764        let job_id = client.eval().submit(request).await.map_err(Error::Core)?;
765
766        let status = client
767            .eval()
768            .poll_until_complete(&job_id, self.timeout.as_secs_f64(), 10.0)
769            .await
770            .map_err(Error::Core)?;
771
772        Ok(status)
773    }
774}
775
776// =============================================================================
777// Convenience Functions
778// =============================================================================
779
780/// Create a client from environment and run a quick optimization.
781///
782/// This is a convenience function for simple use cases.
783pub async fn optimize(container_url: &str) -> Result<OptimizeResult> {
784    Synth::from_env()?
785        .optimize()
786        .container(container_url)
787        .run()
788        .await
789}
790
791/// Create a client from environment and run a quick evaluation.
792pub async fn eval(container_url: &str, seeds: Vec<i64>) -> Result<synth_ai_core::api::EvalResult> {
793    Synth::from_env()?
794        .eval()
795        .container(container_url)
796        .seeds(seeds)
797        .run()
798        .await
799}
800
801/// Convert a GEPA seed candidate mapping into a Synth prompt pattern.
802///
803/// See: specifications/tanha/master_specification.md
804pub fn gepa_candidate_to_initial_prompt(seed_candidate: &Value) -> Result<Value> {
805    synth_ai_core::config::gepa_candidate_to_initial_prompt(seed_candidate).map_err(Error::Core)
806}
807
808#[cfg(test)]
809mod tests {
810    use super::*;
811
812    #[test]
813    fn test_synth_debug() {
814        // Can't actually create without API key, but test the structure
815        let err = Synth::from_env();
816        assert!(err.is_err() || err.is_ok()); // Just check it doesn't panic
817    }
818
819    #[test]
820    fn test_version() {
821        assert!(!VERSION.is_empty());
822    }
823}