Skip to main content

synth_ai/
lib.rs

1//! # Synth AI SDK
2//!
3//! Ergonomic Rust SDK for Synth AI - serverless post-training APIs.
4//!
5//! ## Quick Start
6//!
7//! ```rust,ignore
8//! use synth_ai::Synth;
9//!
10//! #[tokio::main]
11//! async fn main() -> Result<(), synth_ai::Error> {
12//!     // Create client from SYNTH_API_KEY env var
13//!     let synth = Synth::from_env()?;
14//!
15//!     // Submit a prompt optimization job
16//!     let result = synth
17//!         .optimize()
18//!         .task_app("https://my-task-app.com")
19//!         .model("gpt-4o")
20//!         .run()
21//!         .await?;
22//!
23//!     println!("Best prompt: {:?}", result.best_prompt);
24//!     Ok(())
25//! }
26//! ```
27
28use serde_json::Value;
29use std::env;
30use std::path::Path;
31use std::time::Duration;
32use thiserror::Error;
33
34pub mod environment_pools;
35
36pub use environment_pools::EnvironmentPoolsClient;
37
38// Re-export core for advanced usage
39pub use synth_ai_core as core;
40pub use synth_ai_core_types as types;
41
42// Re-export commonly used core types
43pub use synth_ai_core::{
44    api::GraphEvolveClient,
45    api::PromptLearningResult,
46    api::{EvalJobRequest, EvalJobStatus, GepaJobRequest, MiproJobRequest, PolicyJobStatus},
47    api::{
48        GraphCompletionRequest, GraphCompletionResponse, RlmOptions, VerifierOptions,
49        VerifierResponse,
50    },
51    // Local API
52    localapi::TaskAppClient,
53    orchestration::{CandidateInfo, GEPAProgress, ProgressTracker},
54    // Orchestration
55    orchestration::{PromptLearningJob, PromptResults, RankedPrompt},
56    tunnels::errors::TunnelError,
57    tunnels::open_tunnel,
58    // Tunnels
59    tunnels::types::{TunnelBackend, TunnelHandle},
60    ApplicationErrorType,
61    ApplicationStatus,
62    Artifact,
63    BufferedHandler,
64    CallbackHandler,
65    ContextOverride,
66    ContextOverrideStatus,
67    // Errors
68    CoreError,
69    Criterion,
70    CriterionScoreData,
71    EventObjectiveAssignment,
72    // Graph evolve
73    GraphEvolveJob,
74    JobStreamer,
75    // Data types
76    JobType,
77    JsonHandler,
78    Judgement,
79    LLMCallRecord,
80    LLMContentPart,
81    LLMMessage,
82    LLMUsage,
83    LocalApiDeployResponse,
84    LocalApiDeploySpec,
85    LocalApiDeployStatus,
86    LocalApiDeploymentInfo,
87    LocalApiLimits,
88    MarkovBlanketMessage,
89    MessageContent,
90    ObjectiveSpec,
91    OutcomeObjectiveAssignment,
92    RewardObservation,
93    Rubric,
94    RubricAssignment,
95    SessionTimeStep,
96    SessionTrace,
97    // Tracing
98    SessionTracer,
99    StreamConfig,
100    StreamEndpoints,
101    StreamHandler,
102    StreamMessage,
103    // Streaming
104    StreamType,
105    // API types
106    SynthClient as CoreClient,
107    TimeRecord,
108    ToolCallResult,
109    ToolCallSpec,
110    // Trace upload
111    TraceUploadClient,
112    TracingEvent,
113    UploadUrlResponse,
114};
115
116#[cfg(feature = "libsql")]
117pub use synth_ai_core::tracing::LibsqlTraceStorage;
118
119pub use synth_ai_core::data::{
120    CalibrationExample, EventRewardRecord, GoldExample, InstanceObjectiveAssignment,
121    OutcomeRewardRecord, RewardAggregates, SynthModelName,
122};
123pub use synth_ai_core::tracing::{LLMChunk, LLMRequestParams};
124
125/// SDK version.
126pub const VERSION: &str = env!("CARGO_PKG_VERSION");
127
128/// Default Synth API base URL.
129pub const DEFAULT_BASE_URL: &str = "https://api.usesynth.ai";
130
131/// Environment variable for API key.
132pub const API_KEY_ENV: &str = "SYNTH_API_KEY";
133
134// =============================================================================
135// Error Types
136// =============================================================================
137
138/// SDK error type.
139#[derive(Debug, Error)]
140pub enum Error {
141    /// Missing API key.
142    #[error("API key not found. Set {API_KEY_ENV} or provide explicitly.")]
143    MissingApiKey,
144
145    /// Configuration error.
146    #[error("configuration error: {0}")]
147    Config(String),
148
149    /// Core error passthrough.
150    #[error(transparent)]
151    Core(#[from] synth_ai_core::CoreError),
152
153    /// Tunnel error.
154    #[error(transparent)]
155    Tunnel(#[from] TunnelError),
156
157    /// Job submission failed.
158    #[error("job submission failed: {0}")]
159    Submission(String),
160
161    /// Job execution failed.
162    #[error("job failed: {0}")]
163    JobFailed(String),
164
165    /// Timeout waiting for job.
166    #[error("timeout after {0:?}")]
167    Timeout(Duration),
168}
169
170/// Result type alias.
171pub type Result<T> = std::result::Result<T, Error>;
172
173// =============================================================================
174// Main Client
175// =============================================================================
176
177/// Main Synth AI client.
178///
179/// This is the primary entry point for interacting with Synth AI APIs.
180///
181/// # Example
182///
183/// ```rust,ignore
184/// use synth_ai::Synth;
185///
186/// let synth = Synth::from_env()?;
187///
188/// // Or with explicit credentials
189/// let synth = Synth::new("sk_live_...", None)?;
190/// ```
191pub struct Synth {
192    api_key: String,
193    base_url: String,
194    client: synth_ai_core::SynthClient,
195}
196
197impl Synth {
198    /// Create a new Synth client with explicit credentials.
199    ///
200    /// # Arguments
201    ///
202    /// * `api_key` - Your Synth API key
203    /// * `base_url` - Optional custom API base URL
204    pub fn new(api_key: impl Into<String>, base_url: Option<&str>) -> Result<Self> {
205        let api_key = api_key.into();
206        let base_url = base_url.unwrap_or(DEFAULT_BASE_URL).to_string();
207
208        let client =
209            synth_ai_core::SynthClient::new(&api_key, Some(&base_url)).map_err(Error::Core)?;
210
211        Ok(Self {
212            api_key,
213            base_url,
214            client,
215        })
216    }
217
218    /// Create a client from the `SYNTH_API_KEY` environment variable.
219    pub fn from_env() -> Result<Self> {
220        let api_key = env::var(API_KEY_ENV).map_err(|_| Error::MissingApiKey)?;
221        let base_url = env::var("SYNTH_BASE_URL").ok();
222        Self::new(api_key, base_url.as_deref())
223    }
224
225    /// Get the API key (masked for display).
226    pub fn api_key_masked(&self) -> String {
227        synth_ai_core::auth::mask_str(&self.api_key)
228    }
229
230    /// Get the base URL.
231    pub fn base_url(&self) -> &str {
232        &self.base_url
233    }
234
235    /// Access the underlying core client.
236    pub fn core(&self) -> &synth_ai_core::SynthClient {
237        &self.client
238    }
239
240    /// Create an Environment Pools client.
241    pub fn environment_pools(&self) -> Result<EnvironmentPoolsClient> {
242        EnvironmentPoolsClient::new(self.api_key.clone(), Some(&self.base_url))
243    }
244
245    // -------------------------------------------------------------------------
246    // High-level API
247    // -------------------------------------------------------------------------
248
249    /// Start a prompt optimization job.
250    ///
251    /// Returns a builder to configure the optimization.
252    pub fn optimize(&self) -> OptimizeBuilder {
253        OptimizeBuilder::new(self.api_key.clone(), self.base_url.clone())
254    }
255
256    /// Start an evaluation job.
257    ///
258    /// Returns a builder to configure the evaluation.
259    pub fn eval(&self) -> EvalBuilder {
260        EvalBuilder::new(self.api_key.clone(), self.base_url.clone())
261    }
262
263    /// Open a tunnel to a local port.
264    ///
265    /// # Arguments
266    ///
267    /// * `port` - Local port to tunnel
268    /// * `backend` - Tunnel backend (cloudflare_managed_lease recommended)
269    pub async fn tunnel(&self, port: u16, backend: TunnelBackend) -> Result<TunnelHandle> {
270        synth_ai_core::tunnels::open_tunnel(
271            backend,
272            port,
273            Some(self.api_key.clone()),
274            Some(self.base_url.clone()),
275            None,
276            false,
277            true,
278            false,
279        )
280        .await
281        .map_err(Error::Tunnel)
282    }
283
284    // -------------------------------------------------------------------------
285    // Direct API access
286    // -------------------------------------------------------------------------
287
288    /// Submit a raw GEPA job.
289    pub async fn submit_gepa(&self, request: GepaJobRequest) -> Result<String> {
290        self.client
291            .jobs()
292            .submit_gepa(request)
293            .await
294            .map_err(Error::Core)
295    }
296
297    /// Submit a raw MIPRO job.
298    pub async fn submit_mipro(&self, request: MiproJobRequest) -> Result<String> {
299        self.client
300            .jobs()
301            .submit_mipro(request)
302            .await
303            .map_err(Error::Core)
304    }
305
306    /// Get job status.
307    pub async fn get_job_status(&self, job_id: &str) -> Result<PromptLearningResult> {
308        self.client
309            .jobs()
310            .get_status(job_id)
311            .await
312            .map_err(Error::Core)
313    }
314
315    /// Poll job until complete.
316    pub async fn poll_job(
317        &self,
318        job_id: &str,
319        timeout_secs: f64,
320        interval_secs: f64,
321    ) -> Result<PromptLearningResult> {
322        self.client
323            .jobs()
324            .poll_until_complete(job_id, timeout_secs, interval_secs)
325            .await
326            .map_err(Error::Core)
327    }
328
329    /// Cancel a job.
330    pub async fn cancel_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
331        self.client
332            .jobs()
333            .cancel(job_id, reason)
334            .await
335            .map_err(Error::Core)
336    }
337
338    /// Pause a job.
339    pub async fn pause_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
340        self.client
341            .jobs()
342            .pause(job_id, reason)
343            .await
344            .map_err(Error::Core)
345    }
346
347    /// Resume a paused job.
348    pub async fn resume_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
349        self.client
350            .jobs()
351            .resume(job_id, reason)
352            .await
353            .map_err(Error::Core)
354    }
355
356    /// Run graph completion.
357    pub async fn complete(
358        &self,
359        request: GraphCompletionRequest,
360    ) -> Result<GraphCompletionResponse> {
361        self.client
362            .graphs()
363            .complete(request)
364            .await
365            .map_err(Error::Core)
366    }
367
368    /// List registered graphs.
369    pub async fn list_graphs(&self, kind: Option<&str>, limit: Option<i32>) -> Result<Value> {
370        self.client
371            .graphs()
372            .list_graphs(kind, limit)
373            .await
374            .map_err(Error::Core)
375    }
376
377    /// Run verifier on a trace.
378    pub async fn verify(
379        &self,
380        trace: serde_json::Value,
381        rubric: serde_json::Value,
382        options: Option<VerifierOptions>,
383    ) -> Result<VerifierResponse> {
384        self.client
385            .graphs()
386            .verify(trace, rubric, options)
387            .await
388            .map_err(Error::Core)
389    }
390
391    /// Run RLM (Retrieval-augmented LM) inference.
392    pub async fn rlm_inference(
393        &self,
394        query: &str,
395        context: Value,
396        options: Option<RlmOptions>,
397    ) -> Result<Value> {
398        self.client
399            .graphs()
400            .rlm_inference(query, context, options)
401            .await
402            .map_err(Error::Core)
403    }
404
405    /// Create a Graph Evolve client for advanced operations.
406    pub fn graph_evolve(&self) -> GraphEvolveClient<'_> {
407        self.client.graph_evolve()
408    }
409
410    /// Create a Graph Evolve job from a payload.
411    pub fn graph_evolve_job_from_payload(&self, payload: Value) -> Result<GraphEvolveJob> {
412        GraphEvolveJob::from_payload(payload, Some(&self.api_key), Some(&self.base_url))
413            .map_err(Error::Core)
414    }
415
416    /// Reconnect to a Graph Evolve job by ID.
417    pub fn graph_evolve_job_from_id(&self, job_id: &str) -> Result<GraphEvolveJob> {
418        GraphEvolveJob::from_job_id(job_id, Some(&self.api_key), Some(&self.base_url))
419            .map_err(Error::Core)
420    }
421
422    /// Verify a trace against a rubric with default options.
423    pub async fn verify_rubric(&self, trace: Value, rubric: Value) -> Result<VerifierResponse> {
424        self.verify(trace, rubric, None).await
425    }
426
427    /// Create a LocalAPI task app client.
428    pub fn task_app_client(&self, base_url: &str, api_key: Option<&str>) -> TaskAppClient {
429        let key = api_key.unwrap_or(self.api_key.as_str());
430        TaskAppClient::new(base_url, Some(key))
431    }
432
433    /// Deploy a managed LocalAPI from a context directory.
434    pub async fn deploy_localapi_from_dir(
435        &self,
436        spec: LocalApiDeploySpec,
437        context_dir: impl AsRef<Path>,
438        wait_for_ready: bool,
439        build_timeout_s: f64,
440    ) -> Result<LocalApiDeployResponse> {
441        self.client
442            .localapi()
443            .deploy_from_dir(spec, context_dir, wait_for_ready, build_timeout_s)
444            .await
445            .map_err(Error::Core)
446    }
447
448    /// List managed LocalAPI deployments for the current org.
449    pub async fn list_localapi_deployments(&self) -> Result<Vec<LocalApiDeploymentInfo>> {
450        self.client.localapi().list().await.map_err(Error::Core)
451    }
452
453    /// Fetch a managed LocalAPI deployment by ID.
454    pub async fn get_localapi_deployment(
455        &self,
456        deployment_id: &str,
457    ) -> Result<LocalApiDeploymentInfo> {
458        self.client
459            .localapi()
460            .get(deployment_id)
461            .await
462            .map_err(Error::Core)
463    }
464
465    /// Fetch managed LocalAPI deployment status by ID.
466    pub async fn get_localapi_deployment_status(
467        &self,
468        deployment_id: &str,
469    ) -> Result<LocalApiDeployStatus> {
470        self.client
471            .localapi()
472            .status(deployment_id)
473            .await
474            .map_err(Error::Core)
475    }
476
477    /// Fetch detailed eval results.
478    pub async fn eval_results(&self, job_id: &str) -> Result<Value> {
479        self.client
480            .eval()
481            .get_results(job_id)
482            .await
483            .map_err(Error::Core)
484    }
485
486    /// Download eval traces as ZIP bytes.
487    pub async fn download_eval_traces(&self, job_id: &str) -> Result<Vec<u8>> {
488        self.client
489            .eval()
490            .download_traces(job_id)
491            .await
492            .map_err(Error::Core)
493    }
494
495    /// Create a trace uploader for large traces.
496    pub fn trace_uploader(&self) -> Result<TraceUploadClient> {
497        TraceUploadClient::new(&self.base_url, &self.api_key, 120).map_err(Error::Core)
498    }
499
500    /// Upload a trace and return its trace_ref.
501    pub async fn upload_trace(
502        &self,
503        trace: Value,
504        expires_in_seconds: Option<i64>,
505    ) -> Result<String> {
506        let uploader = self.trace_uploader()?;
507        uploader
508            .upload_trace(&trace, None, expires_in_seconds)
509            .await
510            .map_err(Error::Core)
511    }
512
513    /// Stream job events with a callback and return final status.
514    pub async fn stream_job_with_callback<F>(
515        &self,
516        job_id: &str,
517        endpoints: StreamEndpoints,
518        callback: F,
519    ) -> Result<Value>
520    where
521        F: Fn(&StreamMessage) + Send + Sync + 'static,
522    {
523        let mut streamer =
524            JobStreamer::new(&self.base_url, &self.api_key, job_id).with_endpoints(endpoints);
525        streamer.add_handler(CallbackHandler::new(callback));
526        streamer.stream_until_terminal().await.map_err(Error::Core)
527    }
528}
529
530impl std::fmt::Debug for Synth {
531    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
532        f.debug_struct("Synth")
533            .field("api_key", &self.api_key_masked())
534            .field("base_url", &self.base_url)
535            .finish()
536    }
537}
538
539// =============================================================================
540// Optimize Builder
541// =============================================================================
542
543/// Builder for prompt optimization jobs.
544pub struct OptimizeBuilder {
545    api_key: String,
546    base_url: String,
547    task_app_url: Option<String>,
548    model: Option<String>,
549    num_candidates: Option<u32>,
550    timeout: Duration,
551    stream: bool,
552}
553
554impl OptimizeBuilder {
555    fn new(api_key: String, base_url: String) -> Self {
556        Self {
557            api_key,
558            base_url,
559            task_app_url: None,
560            model: None,
561            num_candidates: None,
562            timeout: Duration::from_secs(3600),
563            stream: true,
564        }
565    }
566
567    /// Set the task app URL.
568    pub fn task_app(mut self, url: impl Into<String>) -> Self {
569        self.task_app_url = Some(url.into());
570        self
571    }
572
573    /// Set the model to optimize for.
574    pub fn model(mut self, model: impl Into<String>) -> Self {
575        self.model = Some(model.into());
576        self
577    }
578
579    /// Set the number of candidates to generate.
580    pub fn num_candidates(mut self, n: u32) -> Self {
581        self.num_candidates = Some(n);
582        self
583    }
584
585    /// Set the timeout for the job.
586    pub fn timeout(mut self, timeout: Duration) -> Self {
587        self.timeout = timeout;
588        self
589    }
590
591    /// Enable or disable streaming (default: true).
592    pub fn stream(mut self, enabled: bool) -> Self {
593        self.stream = enabled;
594        self
595    }
596
597    /// Run the optimization job and wait for completion.
598    pub async fn run(self) -> Result<OptimizeResult> {
599        let task_app_url = self
600            .task_app_url
601            .ok_or_else(|| Error::Config("task_app URL is required".into()))?;
602
603        // Build config
604        let mut config = serde_json::json!({
605            "task_app_url": task_app_url,
606        });
607
608        if let Some(model) = &self.model {
609            config["model"] = serde_json::json!(model);
610        }
611        if let Some(n) = self.num_candidates {
612            config["num_candidates"] = serde_json::json!(n);
613        }
614
615        // Create and run job
616        let mut job =
617            PromptLearningJob::from_dict(config, Some(&self.api_key), Some(&self.base_url), None)
618                .map_err(Error::Core)?;
619
620        let job_id = job.submit().await.map_err(Error::Core)?;
621
622        // Stream or poll
623        let status = if self.stream {
624            job.stream_until_complete::<fn(&synth_ai_core::orchestration::ParsedEvent)>(
625                self.timeout.as_secs_f64(),
626                None,
627            )
628            .await
629            .map_err(Error::Core)?
630        } else {
631            job.poll_until_complete(self.timeout.as_secs_f64(), 15.0)
632                .await
633                .map_err(Error::Core)?
634        };
635
636        // Get results
637        let results = job.get_results().await.map_err(Error::Core)?;
638
639        Ok(OptimizeResult {
640            job_id,
641            status,
642            results,
643        })
644    }
645}
646
647/// Result of a prompt optimization job.
648#[derive(Debug, Clone)]
649pub struct OptimizeResult {
650    /// Job ID.
651    pub job_id: String,
652    /// Final job status.
653    pub status: synth_ai_core::orchestration::PromptLearningResult,
654    /// Optimization results.
655    pub results: PromptResults,
656}
657
658impl OptimizeResult {
659    /// Get the best prompt if available.
660    pub fn best_prompt(&self) -> Option<&str> {
661        self.results.best_prompt.as_deref()
662    }
663
664    /// Get the best reward if available.
665    pub fn best_reward(&self) -> Option<f64> {
666        self.results.best_reward
667    }
668
669    /// Get all top prompts.
670    pub fn top_prompts(&self) -> &[RankedPrompt] {
671        &self.results.top_prompts
672    }
673
674    /// Check if the job succeeded.
675    pub fn is_success(&self) -> bool {
676        self.status.status.is_success()
677    }
678}
679
680// =============================================================================
681// Eval Builder
682// =============================================================================
683
684/// Builder for evaluation jobs.
685pub struct EvalBuilder {
686    api_key: String,
687    base_url: String,
688    task_app_url: Option<String>,
689    seeds: Vec<i64>,
690    timeout: Duration,
691}
692
693impl EvalBuilder {
694    fn new(api_key: String, base_url: String) -> Self {
695        Self {
696            api_key,
697            base_url,
698            task_app_url: None,
699            seeds: vec![],
700            timeout: Duration::from_secs(1800),
701        }
702    }
703
704    /// Set the task app URL.
705    pub fn task_app(mut self, url: impl Into<String>) -> Self {
706        self.task_app_url = Some(url.into());
707        self
708    }
709
710    /// Set the seeds to evaluate on.
711    pub fn seeds(mut self, seeds: Vec<i64>) -> Self {
712        self.seeds = seeds;
713        self
714    }
715
716    /// Set the timeout for the job.
717    pub fn timeout(mut self, timeout: Duration) -> Self {
718        self.timeout = timeout;
719        self
720    }
721
722    /// Run the evaluation and wait for completion.
723    pub async fn run(self) -> Result<synth_ai_core::api::EvalResult> {
724        let task_app_url = self
725            .task_app_url
726            .ok_or_else(|| Error::Config("task_app URL is required".into()))?;
727
728        let client = synth_ai_core::SynthClient::new(&self.api_key, Some(&self.base_url))
729            .map_err(Error::Core)?;
730
731        let request = EvalJobRequest {
732            app_id: None,
733            task_app_url,
734            task_app_worker_token: None,
735            task_app_api_key: None,
736            env_name: "default".to_string(),
737            env_config: None,
738            verifier_config: None,
739            seeds: self.seeds,
740            policy: synth_ai_core::api::PolicyConfig::default(),
741            max_concurrent: None,
742            timeout: None,
743        };
744
745        let job_id = client.eval().submit(request).await.map_err(Error::Core)?;
746
747        let status = client
748            .eval()
749            .poll_until_complete(&job_id, self.timeout.as_secs_f64(), 10.0)
750            .await
751            .map_err(Error::Core)?;
752
753        Ok(status)
754    }
755}
756
757// =============================================================================
758// Convenience Functions
759// =============================================================================
760
761/// Create a client from environment and run a quick optimization.
762///
763/// This is a convenience function for simple use cases.
764pub async fn optimize(task_app_url: &str) -> Result<OptimizeResult> {
765    Synth::from_env()?
766        .optimize()
767        .task_app(task_app_url)
768        .run()
769        .await
770}
771
772/// Create a client from environment and run a quick evaluation.
773pub async fn eval(task_app_url: &str, seeds: Vec<i64>) -> Result<synth_ai_core::api::EvalResult> {
774    Synth::from_env()?
775        .eval()
776        .task_app(task_app_url)
777        .seeds(seeds)
778        .run()
779        .await
780}
781
782#[cfg(test)]
783mod tests {
784    use super::*;
785
786    #[test]
787    fn test_synth_debug() {
788        // Can't actually create without API key, but test the structure
789        let err = Synth::from_env();
790        assert!(err.is_err() || err.is_ok()); // Just check it doesn't panic
791    }
792
793    #[test]
794    fn test_version() {
795        assert!(!VERSION.is_empty());
796    }
797}