1use std::env;
29use std::time::Duration;
30use thiserror::Error;
31
32pub use synth_ai_core as core;
34pub use synth_ai_core_types as types;
35
36pub use synth_ai_core::{
38 SynthClient as CoreClient,
40 api::{PolicyJobStatus, EvalJobStatus, GepaJobRequest, MiproJobRequest, EvalJobRequest},
41 api::{GraphCompletionRequest, GraphCompletionResponse, VerifierOptions, VerifierResponse},
42 api::PromptLearningResult,
43 orchestration::{PromptLearningJob, PromptResults, RankedPrompt},
45 orchestration::{GEPAProgress, ProgressTracker, CandidateInfo},
46 StreamType, StreamMessage, StreamConfig, StreamEndpoints,
48 StreamHandler, JobStreamer,
49 JobType, Rubric, Criterion, ObjectiveSpec,
51 Artifact, ContextOverride,
52 SessionTracer, TracingEvent,
54 tunnels::types::{TunnelBackend, TunnelHandle},
56 tunnels::open_tunnel,
57 tunnels::errors::TunnelError,
58 CoreError,
60};
61
62pub const VERSION: &str = env!("CARGO_PKG_VERSION");
64
65pub const DEFAULT_BASE_URL: &str = "https://api.usesynth.ai";
67
68pub const API_KEY_ENV: &str = "SYNTH_API_KEY";
70
71#[derive(Debug, Error)]
77pub enum Error {
78 #[error("API key not found. Set {API_KEY_ENV} or provide explicitly.")]
80 MissingApiKey,
81
82 #[error("configuration error: {0}")]
84 Config(String),
85
86 #[error(transparent)]
88 Core(#[from] synth_ai_core::CoreError),
89
90 #[error(transparent)]
92 Tunnel(#[from] TunnelError),
93
94 #[error("job submission failed: {0}")]
96 Submission(String),
97
98 #[error("job failed: {0}")]
100 JobFailed(String),
101
102 #[error("timeout after {0:?}")]
104 Timeout(Duration),
105}
106
107pub type Result<T> = std::result::Result<T, Error>;
109
110pub struct Synth {
129 api_key: String,
130 base_url: String,
131 client: synth_ai_core::SynthClient,
132}
133
134impl Synth {
135 pub fn new(api_key: impl Into<String>, base_url: Option<&str>) -> Result<Self> {
142 let api_key = api_key.into();
143 let base_url = base_url.unwrap_or(DEFAULT_BASE_URL).to_string();
144
145 let client = synth_ai_core::SynthClient::new(&api_key, Some(&base_url))
146 .map_err(Error::Core)?;
147
148 Ok(Self {
149 api_key,
150 base_url,
151 client,
152 })
153 }
154
155 pub fn from_env() -> Result<Self> {
157 let api_key = env::var(API_KEY_ENV).map_err(|_| Error::MissingApiKey)?;
158 let base_url = env::var("SYNTH_BASE_URL").ok();
159 Self::new(api_key, base_url.as_deref())
160 }
161
162 pub fn api_key_masked(&self) -> String {
164 synth_ai_core::auth::mask_str(&self.api_key)
165 }
166
167 pub fn base_url(&self) -> &str {
169 &self.base_url
170 }
171
172 pub fn core(&self) -> &synth_ai_core::SynthClient {
174 &self.client
175 }
176
177 pub fn optimize(&self) -> OptimizeBuilder {
185 OptimizeBuilder::new(self.api_key.clone(), self.base_url.clone())
186 }
187
188 pub fn eval(&self) -> EvalBuilder {
192 EvalBuilder::new(self.api_key.clone(), self.base_url.clone())
193 }
194
195 pub async fn tunnel(&self, port: u16, backend: TunnelBackend) -> Result<TunnelHandle> {
202 synth_ai_core::tunnels::open_tunnel(
203 backend,
204 port,
205 Some(self.api_key.clone()),
206 Some(self.base_url.clone()),
207 false,
208 true,
209 false,
210 )
211 .await
212 .map_err(Error::Tunnel)
213 }
214
215 pub async fn submit_gepa(&self, request: GepaJobRequest) -> Result<String> {
221 self.client
222 .jobs()
223 .submit_gepa(request)
224 .await
225 .map_err(Error::Core)
226 }
227
228 pub async fn submit_mipro(&self, request: MiproJobRequest) -> Result<String> {
230 self.client
231 .jobs()
232 .submit_mipro(request)
233 .await
234 .map_err(Error::Core)
235 }
236
237 pub async fn get_job_status(&self, job_id: &str) -> Result<PromptLearningResult> {
239 self.client
240 .jobs()
241 .get_status(job_id)
242 .await
243 .map_err(Error::Core)
244 }
245
246 pub async fn poll_job(
248 &self,
249 job_id: &str,
250 timeout_secs: f64,
251 interval_secs: f64,
252 ) -> Result<PromptLearningResult> {
253 self.client
254 .jobs()
255 .poll_until_complete(job_id, timeout_secs, interval_secs)
256 .await
257 .map_err(Error::Core)
258 }
259
260 pub async fn cancel_job(&self, job_id: &str, reason: Option<&str>) -> Result<()> {
262 self.client
263 .jobs()
264 .cancel(job_id, reason)
265 .await
266 .map_err(Error::Core)
267 }
268
269 pub async fn complete(&self, request: GraphCompletionRequest) -> Result<GraphCompletionResponse> {
271 self.client
272 .graphs()
273 .complete(request)
274 .await
275 .map_err(Error::Core)
276 }
277
278 pub async fn verify(
280 &self,
281 trace: serde_json::Value,
282 rubric: serde_json::Value,
283 options: Option<VerifierOptions>,
284 ) -> Result<VerifierResponse> {
285 self.client
286 .graphs()
287 .verify(trace, rubric, options)
288 .await
289 .map_err(Error::Core)
290 }
291}
292
293impl std::fmt::Debug for Synth {
294 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
295 f.debug_struct("Synth")
296 .field("api_key", &self.api_key_masked())
297 .field("base_url", &self.base_url)
298 .finish()
299 }
300}
301
302pub struct OptimizeBuilder {
308 api_key: String,
309 base_url: String,
310 task_app_url: Option<String>,
311 model: Option<String>,
312 num_candidates: Option<u32>,
313 timeout: Duration,
314 stream: bool,
315}
316
317impl OptimizeBuilder {
318 fn new(api_key: String, base_url: String) -> Self {
319 Self {
320 api_key,
321 base_url,
322 task_app_url: None,
323 model: None,
324 num_candidates: None,
325 timeout: Duration::from_secs(3600),
326 stream: true,
327 }
328 }
329
330 pub fn task_app(mut self, url: impl Into<String>) -> Self {
332 self.task_app_url = Some(url.into());
333 self
334 }
335
336 pub fn model(mut self, model: impl Into<String>) -> Self {
338 self.model = Some(model.into());
339 self
340 }
341
342 pub fn num_candidates(mut self, n: u32) -> Self {
344 self.num_candidates = Some(n);
345 self
346 }
347
348 pub fn timeout(mut self, timeout: Duration) -> Self {
350 self.timeout = timeout;
351 self
352 }
353
354 pub fn stream(mut self, enabled: bool) -> Self {
356 self.stream = enabled;
357 self
358 }
359
360 pub async fn run(self) -> Result<OptimizeResult> {
362 let task_app_url = self
363 .task_app_url
364 .ok_or_else(|| Error::Config("task_app URL is required".into()))?;
365
366 let mut config = serde_json::json!({
368 "task_app_url": task_app_url,
369 });
370
371 if let Some(model) = &self.model {
372 config["model"] = serde_json::json!(model);
373 }
374 if let Some(n) = self.num_candidates {
375 config["num_candidates"] = serde_json::json!(n);
376 }
377
378 let mut job = PromptLearningJob::from_dict(
380 config,
381 Some(&self.api_key),
382 Some(&self.base_url),
383 )
384 .map_err(Error::Core)?;
385
386 let job_id = job.submit().await.map_err(Error::Core)?;
387
388 let status = if self.stream {
390 job.stream_until_complete::<fn(&synth_ai_core::orchestration::ParsedEvent)>(
391 self.timeout.as_secs_f64(),
392 None,
393 )
394 .await
395 .map_err(Error::Core)?
396 } else {
397 job.poll_until_complete(self.timeout.as_secs_f64(), 15.0)
398 .await
399 .map_err(Error::Core)?
400 };
401
402 let results = job.get_results().await.map_err(Error::Core)?;
404
405 Ok(OptimizeResult {
406 job_id,
407 status,
408 results,
409 })
410 }
411}
412
413#[derive(Debug, Clone)]
415pub struct OptimizeResult {
416 pub job_id: String,
418 pub status: synth_ai_core::orchestration::PromptLearningResult,
420 pub results: PromptResults,
422}
423
424impl OptimizeResult {
425 pub fn best_prompt(&self) -> Option<&str> {
427 self.results.best_prompt.as_deref()
428 }
429
430 pub fn best_score(&self) -> Option<f64> {
432 self.results.best_score
433 }
434
435 pub fn top_prompts(&self) -> &[RankedPrompt] {
437 &self.results.top_prompts
438 }
439
440 pub fn is_success(&self) -> bool {
442 self.status.status.is_success()
443 }
444}
445
446pub struct EvalBuilder {
452 api_key: String,
453 base_url: String,
454 task_app_url: Option<String>,
455 seeds: Vec<i64>,
456 timeout: Duration,
457}
458
459impl EvalBuilder {
460 fn new(api_key: String, base_url: String) -> Self {
461 Self {
462 api_key,
463 base_url,
464 task_app_url: None,
465 seeds: vec![],
466 timeout: Duration::from_secs(1800),
467 }
468 }
469
470 pub fn task_app(mut self, url: impl Into<String>) -> Self {
472 self.task_app_url = Some(url.into());
473 self
474 }
475
476 pub fn seeds(mut self, seeds: Vec<i64>) -> Self {
478 self.seeds = seeds;
479 self
480 }
481
482 pub fn timeout(mut self, timeout: Duration) -> Self {
484 self.timeout = timeout;
485 self
486 }
487
488 pub async fn run(self) -> Result<synth_ai_core::api::EvalResult> {
490 let task_app_url = self
491 .task_app_url
492 .ok_or_else(|| Error::Config("task_app URL is required".into()))?;
493
494 let client = synth_ai_core::SynthClient::new(&self.api_key, Some(&self.base_url))
495 .map_err(Error::Core)?;
496
497 let request = EvalJobRequest {
498 task_app_url,
499 task_app_api_key: None,
500 env_name: "default".to_string(),
501 seeds: self.seeds,
502 policy: synth_ai_core::api::PolicyConfig::default(),
503 max_concurrent: None,
504 };
505
506 let job_id = client
507 .eval()
508 .submit(request)
509 .await
510 .map_err(Error::Core)?;
511
512 let status = client
513 .eval()
514 .poll_until_complete(&job_id, self.timeout.as_secs_f64(), 10.0)
515 .await
516 .map_err(Error::Core)?;
517
518 Ok(status)
519 }
520}
521
522pub async fn optimize(task_app_url: &str) -> Result<OptimizeResult> {
530 Synth::from_env()?.optimize().task_app(task_app_url).run().await
531}
532
533pub async fn eval(task_app_url: &str, seeds: Vec<i64>) -> Result<synth_ai_core::api::EvalResult> {
535 Synth::from_env()?.eval().task_app(task_app_url).seeds(seeds).run().await
536}
537
538#[cfg(test)]
539mod tests {
540 use super::*;
541
542 #[test]
543 fn test_synth_debug() {
544 let err = Synth::from_env();
546 assert!(err.is_err() || err.is_ok()); }
548
549 #[test]
550 fn test_version() {
551 assert!(!VERSION.is_empty());
552 }
553}