Skip to main content

synth_ai/
lib.rs

1//! # Synth AI SDK
2//!
3//! Ergonomic Rust SDK for Synth AI - serverless post-training APIs.
4//!
5//! ## Quick Start
6//!
7//! ```rust,ignore
8//! use synth_ai::Synth;
9//!
10//! #[tokio::main]
11//! async fn main() -> Result<(), synth_ai::Error> {
12//!     // Create client from SYNTH_API_KEY env var
13//!     let synth = Synth::from_env()?;
14//!
15//!     // Submit a prompt optimization job
16//!     let result = synth
17//!         .optimize()
18//!         .task_app("https://my-task-app.com")
19//!         .model("gpt-4o")
20//!         .run()
21//!         .await?;
22//!
23//!     println!("Best prompt: {:?}", result.best_prompt);
24//!     Ok(())
25//! }
26//! ```
27
28use std::env;
29use std::time::Duration;
30use thiserror::Error;
31
32// Re-export core for advanced usage
33pub use synth_ai_core as core;
34pub use synth_ai_core_types as types;
35
36// Re-export commonly used core types
37pub use synth_ai_core::{
38    // API types
39    SynthClient as CoreClient,
40    api::{PolicyJobStatus, EvalJobStatus, GepaJobRequest, MiproJobRequest, EvalJobRequest},
41    api::{GraphCompletionRequest, GraphCompletionResponse, VerifierOptions, VerifierResponse},
42    api::PromptLearningResult,
43    // Orchestration
44    orchestration::{PromptLearningJob, PromptResults, RankedPrompt},
45    orchestration::{GEPAProgress, ProgressTracker, CandidateInfo},
46    // Streaming
47    StreamType, StreamMessage, StreamConfig, StreamEndpoints,
48    StreamHandler, JobStreamer,
49    // Data types
50    JobType, Rubric, Criterion, ObjectiveSpec,
51    Artifact, ContextOverride,
52    // Tracing
53    SessionTracer, TracingEvent,
54    // Tunnels
55    tunnels::types::{TunnelBackend, TunnelHandle},
56    tunnels::open_tunnel,
57    tunnels::errors::TunnelError,
58    // Errors
59    CoreError,
60};
61
62/// SDK version.
63pub const VERSION: &str = env!("CARGO_PKG_VERSION");
64
65/// Default Synth API base URL.
66pub const DEFAULT_BASE_URL: &str = "https://api.usesynth.ai";
67
68/// Environment variable for API key.
69pub const API_KEY_ENV: &str = "SYNTH_API_KEY";
70
71// =============================================================================
72// Error Types
73// =============================================================================
74
75/// SDK error type.
76#[derive(Debug, Error)]
77pub enum Error {
78    /// Missing API key.
79    #[error("API key not found. Set {API_KEY_ENV} or provide explicitly.")]
80    MissingApiKey,
81
82    /// Configuration error.
83    #[error("configuration error: {0}")]
84    Config(String),
85
86    /// Core error passthrough.
87    #[error(transparent)]
88    Core(#[from] synth_ai_core::CoreError),
89
90    /// Tunnel error.
91    #[error(transparent)]
92    Tunnel(#[from] TunnelError),
93
94    /// Job submission failed.
95    #[error("job submission failed: {0}")]
96    Submission(String),
97
98    /// Job execution failed.
99    #[error("job failed: {0}")]
100    JobFailed(String),
101
102    /// Timeout waiting for job.
103    #[error("timeout after {0:?}")]
104    Timeout(Duration),
105}
106
107/// Result type alias.
108pub type Result<T> = std::result::Result<T, Error>;
109
110// =============================================================================
111// Main Client
112// =============================================================================
113
114/// Main Synth AI client.
115///
116/// This is the primary entry point for interacting with Synth AI APIs.
117///
118/// # Example
119///
120/// ```rust,ignore
121/// use synth_ai::Synth;
122///
123/// let synth = Synth::from_env()?;
124///
125/// // Or with explicit credentials
126/// let synth = Synth::new("sk_live_...", None)?;
127/// ```
128pub struct Synth {
129    api_key: String,
130    base_url: String,
131    client: synth_ai_core::SynthClient,
132}
133
134impl Synth {
135    /// Create a new Synth client with explicit credentials.
136    ///
137    /// # Arguments
138    ///
139    /// * `api_key` - Your Synth API key
140    /// * `base_url` - Optional custom API base URL
141    pub fn new(api_key: impl Into<String>, base_url: Option<&str>) -> Result<Self> {
142        let api_key = api_key.into();
143        let base_url = base_url.unwrap_or(DEFAULT_BASE_URL).to_string();
144
145        let client = synth_ai_core::SynthClient::new(&api_key, Some(&base_url))
146            .map_err(Error::Core)?;
147
148        Ok(Self {
149            api_key,
150            base_url,
151            client,
152        })
153    }
154
155    /// Create a client from the `SYNTH_API_KEY` environment variable.
156    pub fn from_env() -> Result<Self> {
157        let api_key = env::var(API_KEY_ENV).map_err(|_| Error::MissingApiKey)?;
158        let base_url = env::var("SYNTH_BASE_URL").ok();
159        Self::new(api_key, base_url.as_deref())
160    }
161
162    /// Get the API key (masked for display).
163    pub fn api_key_masked(&self) -> String {
164        synth_ai_core::auth::mask_str(&self.api_key)
165    }
166
167    /// Get the base URL.
168    pub fn base_url(&self) -> &str {
169        &self.base_url
170    }
171
172    /// Access the underlying core client.
173    pub fn core(&self) -> &synth_ai_core::SynthClient {
174        &self.client
175    }
176
177    // -------------------------------------------------------------------------
178    // High-level API
179    // -------------------------------------------------------------------------
180
181    /// Start a prompt optimization job.
182    ///
183    /// Returns a builder to configure the optimization.
184    pub fn optimize(&self) -> OptimizeBuilder {
185        OptimizeBuilder::new(self.api_key.clone(), self.base_url.clone())
186    }
187
188    /// Start an evaluation job.
189    ///
190    /// Returns a builder to configure the evaluation.
191    pub fn eval(&self) -> EvalBuilder {
192        EvalBuilder::new(self.api_key.clone(), self.base_url.clone())
193    }
194
195    /// Open a tunnel to a local port.
196    ///
197    /// # Arguments
198    ///
199    /// * `port` - Local port to tunnel
200    /// * `backend` - Tunnel backend (cloudflare_managed_lease recommended)
201    pub async fn tunnel(&self, port: u16, backend: TunnelBackend) -> Result<TunnelHandle> {
202        synth_ai_core::tunnels::open_tunnel(
203            backend,
204            port,
205            Some(self.api_key.clone()),
206            Some(self.base_url.clone()),
207            false,
208            true,
209            false,
210        )
211        .await
212        .map_err(Error::Tunnel)
213    }
214
215    // -------------------------------------------------------------------------
216    // Direct API access
217    // -------------------------------------------------------------------------
218
219    /// Submit a raw GEPA job.
220    pub async fn submit_gepa(&self, request: GepaJobRequest) -> Result<String> {
221        self.client
222            .jobs()
223            .submit_gepa(request)
224            .await
225            .map_err(Error::Core)
226    }
227
228    /// Submit a raw MIPRO job.
229    pub async fn submit_mipro(&self, request: MiproJobRequest) -> Result<String> {
230        self.client
231            .jobs()
232            .submit_mipro(request)
233            .await
234            .map_err(Error::Core)
235    }
236
237    /// Get job status.
238    pub async fn get_job_status(&self, job_id: &str) -> Result<PromptLearningResult> {
239        self.client
240            .jobs()
241            .get_status(job_id)
242            .await
243            .map_err(Error::Core)
244    }
245
246    /// Poll job until complete.
247    pub async fn poll_job(
248        &self,
249        job_id: &str,
250        timeout_secs: f64,
251        interval_secs: f64,
252    ) -> Result<PromptLearningResult> {
253        self.client
254            .jobs()
255            .poll_until_complete(job_id, timeout_secs, interval_secs)
256            .await
257            .map_err(Error::Core)
258    }
259
260    /// Cancel a job.
261    pub async fn cancel_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
262        self.client
263            .jobs()
264            .cancel(job_id, reason)
265            .await
266            .map_err(Error::Core)
267    }
268
269    /// Run graph completion.
270    pub async fn complete(&self, request: GraphCompletionRequest) -> Result<GraphCompletionResponse> {
271        self.client
272            .graphs()
273            .complete(request)
274            .await
275            .map_err(Error::Core)
276    }
277
278    /// Run verifier on a trace.
279    pub async fn verify(
280        &self,
281        trace: serde_json::Value,
282        rubric: serde_json::Value,
283        options: Option<VerifierOptions>,
284    ) -> Result<VerifierResponse> {
285        self.client
286            .graphs()
287            .verify(trace, rubric, options)
288            .await
289            .map_err(Error::Core)
290    }
291}
292
293impl std::fmt::Debug for Synth {
294    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
295        f.debug_struct("Synth")
296            .field("api_key", &self.api_key_masked())
297            .field("base_url", &self.base_url)
298            .finish()
299    }
300}
301
302// =============================================================================
303// Optimize Builder
304// =============================================================================
305
306/// Builder for prompt optimization jobs.
307pub struct OptimizeBuilder {
308    api_key: String,
309    base_url: String,
310    task_app_url: Option<String>,
311    model: Option<String>,
312    num_candidates: Option<u32>,
313    timeout: Duration,
314    stream: bool,
315}
316
317impl OptimizeBuilder {
318    fn new(api_key: String, base_url: String) -> Self {
319        Self {
320            api_key,
321            base_url,
322            task_app_url: None,
323            model: None,
324            num_candidates: None,
325            timeout: Duration::from_secs(3600),
326            stream: true,
327        }
328    }
329
330    /// Set the task app URL.
331    pub fn task_app(mut self, url: impl Into<String>) -> Self {
332        self.task_app_url = Some(url.into());
333        self
334    }
335
336    /// Set the model to optimize for.
337    pub fn model(mut self, model: impl Into<String>) -> Self {
338        self.model = Some(model.into());
339        self
340    }
341
342    /// Set the number of candidates to generate.
343    pub fn num_candidates(mut self, n: u32) -> Self {
344        self.num_candidates = Some(n);
345        self
346    }
347
348    /// Set the timeout for the job.
349    pub fn timeout(mut self, timeout: Duration) -> Self {
350        self.timeout = timeout;
351        self
352    }
353
354    /// Enable or disable streaming (default: true).
355    pub fn stream(mut self, enabled: bool) -> Self {
356        self.stream = enabled;
357        self
358    }
359
360    /// Run the optimization job and wait for completion.
361    pub async fn run(self) -> Result<OptimizeResult> {
362        let task_app_url = self
363            .task_app_url
364            .ok_or_else(|| Error::Config("task_app URL is required".into()))?;
365
366        // Build config
367        let mut config = serde_json::json!({
368            "task_app_url": task_app_url,
369        });
370
371        if let Some(model) = &self.model {
372            config["model"] = serde_json::json!(model);
373        }
374        if let Some(n) = self.num_candidates {
375            config["num_candidates"] = serde_json::json!(n);
376        }
377
378        // Create and run job
379        let mut job = PromptLearningJob::from_dict(
380            config,
381            Some(&self.api_key),
382            Some(&self.base_url),
383        )
384        .map_err(Error::Core)?;
385
386        let job_id = job.submit().await.map_err(Error::Core)?;
387
388        // Stream or poll
389        let status = if self.stream {
390            job.stream_until_complete::<fn(&synth_ai_core::orchestration::ParsedEvent)>(
391                self.timeout.as_secs_f64(),
392                None,
393            )
394            .await
395            .map_err(Error::Core)?
396        } else {
397            job.poll_until_complete(self.timeout.as_secs_f64(), 15.0)
398                .await
399                .map_err(Error::Core)?
400        };
401
402        // Get results
403        let results = job.get_results().await.map_err(Error::Core)?;
404
405        Ok(OptimizeResult {
406            job_id,
407            status,
408            results,
409        })
410    }
411}
412
413/// Result of a prompt optimization job.
414#[derive(Debug, Clone)]
415pub struct OptimizeResult {
416    /// Job ID.
417    pub job_id: String,
418    /// Final job status.
419    pub status: synth_ai_core::orchestration::PromptLearningResult,
420    /// Optimization results.
421    pub results: PromptResults,
422}
423
424impl OptimizeResult {
425    /// Get the best prompt if available.
426    pub fn best_prompt(&self) -> Option<&str> {
427        self.results.best_prompt.as_deref()
428    }
429
430    /// Get the best score if available.
431    pub fn best_score(&self) -> Option<f64> {
432        self.results.best_score
433    }
434
435    /// Get all top prompts.
436    pub fn top_prompts(&self) -> &[RankedPrompt] {
437        &self.results.top_prompts
438    }
439
440    /// Check if the job succeeded.
441    pub fn is_success(&self) -> bool {
442        self.status.status.is_success()
443    }
444}
445
446// =============================================================================
447// Eval Builder
448// =============================================================================
449
450/// Builder for evaluation jobs.
451pub struct EvalBuilder {
452    api_key: String,
453    base_url: String,
454    task_app_url: Option<String>,
455    seeds: Vec<i64>,
456    timeout: Duration,
457}
458
459impl EvalBuilder {
460    fn new(api_key: String, base_url: String) -> Self {
461        Self {
462            api_key,
463            base_url,
464            task_app_url: None,
465            seeds: vec![],
466            timeout: Duration::from_secs(1800),
467        }
468    }
469
470    /// Set the task app URL.
471    pub fn task_app(mut self, url: impl Into<String>) -> Self {
472        self.task_app_url = Some(url.into());
473        self
474    }
475
476    /// Set the seeds to evaluate on.
477    pub fn seeds(mut self, seeds: Vec<i64>) -> Self {
478        self.seeds = seeds;
479        self
480    }
481
482    /// Set the timeout for the job.
483    pub fn timeout(mut self, timeout: Duration) -> Self {
484        self.timeout = timeout;
485        self
486    }
487
488    /// Run the evaluation and wait for completion.
489    pub async fn run(self) -> Result<synth_ai_core::api::EvalResult> {
490        let task_app_url = self
491            .task_app_url
492            .ok_or_else(|| Error::Config("task_app URL is required".into()))?;
493
494        let client = synth_ai_core::SynthClient::new(&self.api_key, Some(&self.base_url))
495            .map_err(Error::Core)?;
496
497        let request = EvalJobRequest {
498            task_app_url,
499            task_app_api_key: None,
500            env_name: "default".to_string(),
501            seeds: self.seeds,
502            policy: synth_ai_core::api::PolicyConfig::default(),
503            max_concurrent: None,
504        };
505
506        let job_id = client
507            .eval()
508            .submit(request)
509            .await
510            .map_err(Error::Core)?;
511
512        let status = client
513            .eval()
514            .poll_until_complete(&job_id, self.timeout.as_secs_f64(), 10.0)
515            .await
516            .map_err(Error::Core)?;
517
518        Ok(status)
519    }
520}
521
522// =============================================================================
523// Convenience Functions
524// =============================================================================
525
526/// Create a client from environment and run a quick optimization.
527///
528/// This is a convenience function for simple use cases.
529pub async fn optimize(task_app_url: &str) -> Result<OptimizeResult> {
530    Synth::from_env()?.optimize().task_app(task_app_url).run().await
531}
532
533/// Create a client from environment and run a quick evaluation.
534pub async fn eval(task_app_url: &str, seeds: Vec<i64>) -> Result<synth_ai_core::api::EvalResult> {
535    Synth::from_env()?.eval().task_app(task_app_url).seeds(seeds).run().await
536}
537
538#[cfg(test)]
539mod tests {
540    use super::*;
541
542    #[test]
543    fn test_synth_debug() {
544        // Can't actually create without API key, but test the structure
545        let err = Synth::from_env();
546        assert!(err.is_err() || err.is_ok()); // Just check it doesn't panic
547    }
548
549    #[test]
550    fn test_version() {
551        assert!(!VERSION.is_empty());
552    }
553}