1use reqwest::Client;
2use std::time::{Duration, Instant};
3use tracing::{debug, error, info, warn};
4
5use super::types::{CreatePipeRequest, CreatePipeResponse, Message, PipeRequest, PipeResponse};
6use crate::config::{LangbaseConfig, RequestConfig};
7use crate::error::{LangbaseError, LangbaseResult};
8use crate::prompts::{
9 DIVERGENT_REASONING_PROMPT, LINEAR_REASONING_PROMPT, REFLECTION_PROMPT, TREE_REASONING_PROMPT,
10};
11
12#[derive(Clone)]
14pub struct LangbaseClient {
15 client: Client,
16 base_url: String,
17 api_key: String,
18 request_config: RequestConfig,
19}
20
21impl LangbaseClient {
22 pub fn new(config: &LangbaseConfig, request_config: RequestConfig) -> LangbaseResult<Self> {
24 let client = Client::builder()
25 .timeout(Duration::from_millis(request_config.timeout_ms))
26 .build()
27 .map_err(LangbaseError::Http)?;
28
29 Ok(Self {
30 client,
31 base_url: config.base_url.trim_end_matches('/').to_string(),
32 api_key: config.api_key.clone(),
33 request_config,
34 })
35 }
36
37 pub async fn call_pipe(&self, request: PipeRequest) -> LangbaseResult<PipeResponse> {
39 let url = format!("{}/v1/pipes/run", self.base_url);
40 let pipe_name = request.name.clone();
41
42 info!(
43 pipe = %pipe_name,
44 url = %url,
45 messages_count = request.messages.len(),
46 "Calling Langbase pipe"
47 );
48
49 let mut last_error = None;
50 let mut retries = 0;
51
52 while retries <= self.request_config.max_retries {
53 if retries > 0 {
54 let delay = Duration::from_millis(
55 self.request_config.retry_delay_ms * (2_u64.pow(retries - 1)),
56 );
57 warn!(
58 pipe = %pipe_name,
59 retry = retries,
60 delay_ms = delay.as_millis(),
61 "Retrying Langbase request"
62 );
63 tokio::time::sleep(delay).await;
64 }
65
66 let start = Instant::now();
67
68 match self.execute_request(&url, &request).await {
69 Ok(response) => {
70 let latency = start.elapsed();
71 info!(
72 pipe = %pipe_name,
73 latency_ms = latency.as_millis(),
74 "Langbase pipe call succeeded"
75 );
76 return Ok(response);
77 }
78 Err(e) => {
79 let latency = start.elapsed();
80 error!(
81 pipe = %pipe_name,
82 error = %e,
83 latency_ms = latency.as_millis(),
84 retry = retries,
85 "Langbase pipe call failed"
86 );
87 last_error = Some(e);
88 retries += 1;
89 }
90 }
91 }
92
93 Err(LangbaseError::Unavailable {
94 message: last_error
95 .map(|e| e.to_string())
96 .unwrap_or_else(|| "Unknown error".to_string()),
97 retries,
98 })
99 }
100
101 async fn execute_request(
103 &self,
104 url: &str,
105 request: &PipeRequest,
106 ) -> LangbaseResult<PipeResponse> {
107 debug!(
108 pipe = %request.name,
109 messages = request.messages.len(),
110 "Calling Langbase pipe"
111 );
112
113 let response = self
114 .client
115 .post(url)
116 .header("Authorization", format!("Bearer {}", self.api_key))
117 .header("Content-Type", "application/json")
118 .json(request)
119 .send()
120 .await
121 .map_err(|e| {
122 if e.is_timeout() {
123 LangbaseError::Timeout {
124 timeout_ms: self.request_config.timeout_ms,
125 }
126 } else {
127 LangbaseError::Http(e)
128 }
129 })?;
130
131 let status = response.status();
132
133 if !status.is_success() {
134 let error_body = response.text().await.unwrap_or_else(|e| {
135 warn!(error = %e, status = %status, "Failed to read pipe run error response body");
136 "Unable to read error response".to_string()
137 });
138 return Err(LangbaseError::Api {
139 status: status.as_u16(),
140 message: error_body,
141 });
142 }
143
144 let pipe_response: PipeResponse =
145 response
146 .json()
147 .await
148 .map_err(|e| LangbaseError::InvalidResponse {
149 message: format!("Failed to parse response: {}", e),
150 })?;
151
152 Ok(pipe_response)
153 }
154
155 pub fn base_url(&self) -> &str {
157 &self.base_url
158 }
159
160 pub async fn create_pipe(
162 &self,
163 request: CreatePipeRequest,
164 ) -> LangbaseResult<CreatePipeResponse> {
165 let url = format!("{}/v1/pipes", self.base_url);
166
167 info!(pipe = %request.name, "Creating Langbase pipe");
168
169 let response = self
170 .client
171 .post(&url)
172 .header("Authorization", format!("Bearer {}", self.api_key))
173 .header("Content-Type", "application/json")
174 .json(&request)
175 .send()
176 .await
177 .map_err(LangbaseError::Http)?;
178
179 let status = response.status();
180
181 if !status.is_success() {
182 let error_body = response.text().await.unwrap_or_else(|e| {
183 warn!(error = %e, status = %status, "Failed to read pipe creation error response body");
184 "Unable to read error response".to_string()
185 });
186 return Err(LangbaseError::Api {
187 status: status.as_u16(),
188 message: error_body,
189 });
190 }
191
192 let pipe_response: CreatePipeResponse =
193 response
194 .json()
195 .await
196 .map_err(|e| LangbaseError::InvalidResponse {
197 message: format!("Failed to parse create pipe response: {}", e),
198 })?;
199
200 info!(
201 pipe = %pipe_response.name,
202 url = %pipe_response.url,
203 "Pipe created successfully"
204 );
205
206 Ok(pipe_response)
207 }
208
209 pub async fn delete_pipe(&self, owner_login: &str, pipe_name: &str) -> LangbaseResult<()> {
211 let url = format!("{}/beta/pipes/{}/{}", self.base_url, owner_login, pipe_name);
212
213 info!(pipe = %pipe_name, owner = %owner_login, "Deleting Langbase pipe");
214
215 let response = self
216 .client
217 .delete(&url)
218 .header("Authorization", format!("Bearer {}", self.api_key))
219 .send()
220 .await
221 .map_err(LangbaseError::Http)?;
222
223 let status = response.status();
224
225 if !status.is_success() {
226 let error_body = response.text().await.unwrap_or_else(|e| {
227 warn!(error = %e, status = %status, "Failed to read pipe deletion error response body");
228 "Unable to read error response".to_string()
229 });
230 return Err(LangbaseError::Api {
231 status: status.as_u16(),
232 message: error_body,
233 });
234 }
235
236 info!(pipe = %pipe_name, "Pipe deleted successfully");
237
238 Ok(())
239 }
240
241 pub async fn ensure_linear_pipe(&self, pipe_name: &str) -> LangbaseResult<()> {
243 let request = CreatePipeRequest::new(pipe_name)
244 .with_description("Linear reasoning mode for MCP server")
245 .with_model("openai:gpt-4o-mini")
246 .with_upsert(true)
247 .with_json_output(true)
248 .with_temperature(0.7)
249 .with_max_tokens(2000)
250 .with_messages(vec![Message::system(LINEAR_REASONING_PROMPT)]);
251
252 self.ensure_pipe_internal(request, "Linear reasoning").await
253 }
254
255 pub async fn ensure_tree_pipe(&self, pipe_name: &str) -> LangbaseResult<()> {
257 let request = CreatePipeRequest::new(pipe_name)
258 .with_description("Tree-based reasoning mode for exploring multiple paths")
259 .with_model("openai:gpt-4o-mini")
260 .with_upsert(true)
261 .with_json_output(true)
262 .with_temperature(0.8) .with_max_tokens(3000) .with_messages(vec![Message::system(TREE_REASONING_PROMPT)]);
265
266 self.ensure_pipe_internal(request, "Tree reasoning").await
267 }
268
269 pub async fn ensure_divergent_pipe(&self, pipe_name: &str) -> LangbaseResult<()> {
271 let request = CreatePipeRequest::new(pipe_name)
272 .with_description("Divergent reasoning mode for creative perspectives")
273 .with_model("openai:gpt-4o-mini")
274 .with_upsert(true)
275 .with_json_output(true)
276 .with_temperature(0.9) .with_max_tokens(3000) .with_messages(vec![Message::system(DIVERGENT_REASONING_PROMPT)]);
279
280 self.ensure_pipe_internal(request, "Divergent reasoning")
281 .await
282 }
283
284 pub async fn ensure_reflection_pipe(&self, pipe_name: &str) -> LangbaseResult<()> {
286 let request = CreatePipeRequest::new(pipe_name)
287 .with_description("Reflection mode for meta-cognitive analysis")
288 .with_model("openai:gpt-4o-mini")
289 .with_upsert(true)
290 .with_json_output(true)
291 .with_temperature(0.6) .with_max_tokens(2500)
293 .with_messages(vec![Message::system(REFLECTION_PROMPT)]);
294
295 self.ensure_pipe_internal(request, "Reflection").await
296 }
297
298 pub async fn ensure_all_pipes(&self) -> LangbaseResult<()> {
300 self.ensure_linear_pipe("linear-reasoning-v1").await?;
302 self.ensure_tree_pipe("tree-reasoning-v1").await?;
303 self.ensure_divergent_pipe("divergent-reasoning-v1").await?;
304 self.ensure_reflection_pipe("reflection-v1").await?;
305 info!("Core reasoning pipes ready (4)");
306
307 self.ensure_got_pipes().await?;
309
310 self.ensure_detection_pipes().await?;
312
313 self.ensure_decision_framework_pipes().await?;
315
316 self.ensure_auto_router_pipe().await?;
318
319 info!("All pipes ready (8 total)");
320 Ok(())
321 }
322
323 pub async fn ensure_auto_router_pipe(&self) -> LangbaseResult<()> {
325 let request = CreatePipeRequest::new("mode-router-v1")
326 .with_description("Auto mode router for intelligent mode selection")
327 .with_model("openai:gpt-4o-mini")
328 .with_upsert(true)
329 .with_json_output(true)
330 .with_temperature(0.5)
331 .with_max_tokens(1000);
332
333 self.ensure_pipe_internal(request, "Auto mode router").await
334 }
335
336 pub async fn ensure_consolidated_detection_pipe(&self, pipe_name: &str) -> LangbaseResult<()> {
338 let request = CreatePipeRequest::new(pipe_name)
340 .with_description("Consolidated detection mode for bias and fallacy analysis")
341 .with_model("openai:gpt-4o-mini")
342 .with_upsert(true)
343 .with_json_output(true)
344 .with_temperature(0.5) .with_max_tokens(3000);
346
347 self.ensure_pipe_internal(request, "Detection").await
348 }
349
350 pub async fn ensure_consolidated_got_pipe(&self, pipe_name: &str) -> LangbaseResult<()> {
352 let request = CreatePipeRequest::new(pipe_name)
354 .with_description("Consolidated GoT mode for graph-based reasoning operations")
355 .with_model("openai:gpt-4o-mini")
356 .with_upsert(true)
357 .with_json_output(true)
358 .with_temperature(0.7)
359 .with_max_tokens(2500);
360
361 self.ensure_pipe_internal(request, "GoT reasoning").await
362 }
363
364 pub async fn ensure_detection_pipes(&self) -> LangbaseResult<()> {
366 self.ensure_consolidated_detection_pipe("detection-v1")
367 .await?;
368 info!("Detection pipe ready");
369 Ok(())
370 }
371
372 pub async fn ensure_got_pipes(&self) -> LangbaseResult<()> {
374 self.ensure_consolidated_got_pipe("got-reasoning-v1")
375 .await?;
376 info!("GoT reasoning pipe ready");
377 Ok(())
378 }
379
380 pub async fn ensure_consolidated_decision_framework_pipe(
382 &self,
383 pipe_name: &str,
384 ) -> LangbaseResult<()> {
385 let request = CreatePipeRequest::new(pipe_name)
387 .with_description(
388 "Consolidated decision framework for decision, perspective, evidence, and Bayesian analysis",
389 )
390 .with_model("openai:gpt-4o-mini")
391 .with_upsert(true)
392 .with_json_output(true)
393 .with_temperature(0.6)
394 .with_max_tokens(4000);
395
396 self.ensure_pipe_internal(request, "Decision framework")
397 .await
398 }
399
400 pub async fn ensure_decision_framework_pipes(&self) -> LangbaseResult<()> {
402 self.ensure_consolidated_decision_framework_pipe("decision-framework-v1")
403 .await?;
404 info!("Decision framework pipe ready");
405 Ok(())
406 }
407
408 async fn ensure_pipe_internal(
410 &self,
411 request: CreatePipeRequest,
412 mode_name: &str,
413 ) -> LangbaseResult<()> {
414 let pipe_name = request.name.clone();
415
416 match self.create_pipe(request).await {
417 Ok(_) => {
418 info!(pipe = %pipe_name, mode = %mode_name, "pipe ready");
419 Ok(())
420 }
421 Err(LangbaseError::Api { status: 409, .. }) => {
422 info!(pipe = %pipe_name, mode = %mode_name, "pipe already exists");
424 Ok(())
425 }
426 Err(e) => Err(e),
427 }
428 }
429}
430
431#[cfg(test)]
432mod tests {
433 use super::*;
434
435 #[test]
436 fn test_client_creation() {
437 let config = LangbaseConfig {
438 api_key: "test_key".to_string(),
439 base_url: "https://api.langbase.com".to_string(),
440 };
441
442 let request_config = RequestConfig::default();
443
444 let client = LangbaseClient::new(&config, request_config);
445 assert!(client.is_ok());
446 }
447
448 #[test]
449 fn test_client_base_url() {
450 let config = LangbaseConfig {
451 api_key: "test_key".to_string(),
452 base_url: "https://api.langbase.com".to_string(),
453 };
454
455 let request_config = RequestConfig::default();
456 let client = LangbaseClient::new(&config, request_config).unwrap();
457
458 assert_eq!(client.base_url(), "https://api.langbase.com");
459 }
460
461 #[test]
462 fn test_client_base_url_trailing_slash_trimmed() {
463 let config = LangbaseConfig {
464 api_key: "test_key".to_string(),
465 base_url: "https://api.langbase.com/".to_string(),
466 };
467
468 let request_config = RequestConfig::default();
469 let client = LangbaseClient::new(&config, request_config).unwrap();
470
471 assert_eq!(client.base_url(), "https://api.langbase.com");
473 }
474
475 #[test]
476 fn test_client_with_custom_request_config() {
477 let config = LangbaseConfig {
478 api_key: "test_key".to_string(),
479 base_url: "https://custom.api.com".to_string(),
480 };
481
482 let request_config = RequestConfig {
483 timeout_ms: 60000,
484 max_retries: 5,
485 retry_delay_ms: 2000,
486 };
487
488 let client = LangbaseClient::new(&config, request_config);
489 assert!(client.is_ok());
490 let client = client.unwrap();
491 assert_eq!(client.base_url(), "https://custom.api.com");
492 }
493
494 #[test]
495 fn test_pipe_request_creation() {
496 let messages = vec![
497 Message::system("System prompt"),
498 Message::user("User message"),
499 ];
500 let request = PipeRequest::new("test-pipe", messages);
501 assert_eq!(request.name, "test-pipe");
502 assert_eq!(request.messages.len(), 2);
503 }
504
505 #[test]
506 fn test_pipe_request_with_thread_id() {
507 let messages = vec![Message::user("Test")];
508 let request = PipeRequest::new("pipe-1", messages).with_thread_id("thread-123");
509 assert_eq!(request.thread_id, Some("thread-123".to_string()));
510 }
511
512 #[test]
513 fn test_pipe_request_with_variables() {
514 let messages = vec![Message::user("Test")];
515 let mut vars = std::collections::HashMap::new();
516 vars.insert("key1".to_string(), "value1".to_string());
517 vars.insert("key2".to_string(), "value2".to_string());
518
519 let request = PipeRequest::new("pipe-1", messages).with_variables(vars);
520 assert!(request.variables.is_some());
521 let variables = request.variables.unwrap();
522 assert_eq!(variables.get("key1"), Some(&"value1".to_string()));
523 assert_eq!(variables.get("key2"), Some(&"value2".to_string()));
524 }
525
526 #[test]
527 fn test_create_pipe_request_basic() {
528 let request = CreatePipeRequest::new("my-pipe");
529 assert_eq!(request.name, "my-pipe");
530 assert!(request.description.is_none());
531 assert!(request.model.is_none());
532 }
533
534 #[test]
535 fn test_create_pipe_request_builder() {
536 let request = CreatePipeRequest::new("test-pipe")
537 .with_description("Test description")
538 .with_model("openai:gpt-4o-mini")
539 .with_upsert(true)
540 .with_json_output(true)
541 .with_temperature(0.7)
542 .with_max_tokens(2000)
543 .with_messages(vec![Message::system("System prompt")]);
544
545 assert_eq!(request.name, "test-pipe");
546 assert_eq!(request.description, Some("Test description".to_string()));
547 assert_eq!(request.model, Some("openai:gpt-4o-mini".to_string()));
548 assert_eq!(request.upsert, Some(true));
549 assert_eq!(request.json, Some(true));
550 assert_eq!(request.temperature, Some(0.7));
551 assert_eq!(request.max_tokens, Some(2000));
552 assert!(request.messages.is_some());
553 }
554
555 #[test]
556 fn test_message_system() {
557 use super::super::types::MessageRole;
558 let msg = Message::system("System instructions");
559 assert!(matches!(msg.role, MessageRole::System));
560 assert_eq!(msg.content, "System instructions");
561 }
562
563 #[test]
564 fn test_message_user() {
565 use super::super::types::MessageRole;
566 let msg = Message::user("User query");
567 assert!(matches!(msg.role, MessageRole::User));
568 assert_eq!(msg.content, "User query");
569 }
570
571 #[test]
572 fn test_message_assistant() {
573 use super::super::types::MessageRole;
574 let msg = Message::assistant("Assistant response");
575 assert!(matches!(msg.role, MessageRole::Assistant));
576 assert_eq!(msg.content, "Assistant response");
577 }
578
579 #[test]
581 fn test_client_base_url_multiple_trailing_slashes() {
582 let config = LangbaseConfig {
583 api_key: "test_key".to_string(),
584 base_url: "https://api.langbase.com///".to_string(),
585 };
586 let client = LangbaseClient::new(&config, RequestConfig::default()).unwrap();
587 assert_eq!(client.base_url(), "https://api.langbase.com");
588 }
589
590 #[test]
591 fn test_client_base_url_with_path() {
592 let config = LangbaseConfig {
593 api_key: "test_key".to_string(),
594 base_url: "https://api.langbase.com/v2/".to_string(),
595 };
596 let client = LangbaseClient::new(&config, RequestConfig::default()).unwrap();
597 assert_eq!(client.base_url(), "https://api.langbase.com/v2");
598 }
599
600 #[test]
601 fn test_client_base_url_no_protocol() {
602 let config = LangbaseConfig {
603 api_key: "test_key".to_string(),
604 base_url: "api.langbase.com".to_string(),
605 };
606 let client = LangbaseClient::new(&config, RequestConfig::default()).unwrap();
607 assert_eq!(client.base_url(), "api.langbase.com");
608 }
609
610 #[test]
611 fn test_client_base_url_localhost() {
612 let config = LangbaseConfig {
613 api_key: "test_key".to_string(),
614 base_url: "http://localhost:8080/".to_string(),
615 };
616 let client = LangbaseClient::new(&config, RequestConfig::default()).unwrap();
617 assert_eq!(client.base_url(), "http://localhost:8080");
618 }
619
620 #[test]
621 fn test_client_base_url_with_port() {
622 let config = LangbaseConfig {
623 api_key: "test_key".to_string(),
624 base_url: "https://api.langbase.com:443/".to_string(),
625 };
626 let client = LangbaseClient::new(&config, RequestConfig::default()).unwrap();
627 assert_eq!(client.base_url(), "https://api.langbase.com:443");
628 }
629
630 #[test]
632 fn test_pipe_request_empty_messages() {
633 let messages = vec![];
634 let request = PipeRequest::new("test-pipe", messages);
635 assert_eq!(request.name, "test-pipe");
636 assert_eq!(request.messages.len(), 0);
637 assert!(request.thread_id.is_none());
638 assert!(request.variables.is_none());
639 }
640
641 #[test]
642 fn test_pipe_request_single_message() {
643 let messages = vec![Message::user("Single message")];
644 let request = PipeRequest::new("pipe-1", messages);
645 assert_eq!(request.messages.len(), 1);
646 assert_eq!(request.messages[0].content, "Single message");
647 }
648
649 #[test]
650 fn test_pipe_request_multiple_messages_all_roles() {
651 let messages = vec![
652 Message::system("System prompt"),
653 Message::user("User query 1"),
654 Message::assistant("Assistant response 1"),
655 Message::user("User query 2"),
656 ];
657 let request = PipeRequest::new("pipe-1", messages);
658 assert_eq!(request.messages.len(), 4);
659 }
660
661 #[test]
662 fn test_pipe_request_with_empty_thread_id() {
663 let messages = vec![Message::user("Test")];
664 let request = PipeRequest::new("pipe-1", messages).with_thread_id("");
665 assert_eq!(request.thread_id, Some("".to_string()));
666 }
667
668 #[test]
669 fn test_pipe_request_with_special_chars_in_thread_id() {
670 let messages = vec![Message::user("Test")];
671 let request =
672 PipeRequest::new("pipe-1", messages).with_thread_id("thread-123-abc_!@#$%^&*()");
673 assert_eq!(
674 request.thread_id,
675 Some("thread-123-abc_!@#$%^&*()".to_string())
676 );
677 }
678
679 #[test]
680 fn test_pipe_request_with_empty_variables() {
681 let messages = vec![Message::user("Test")];
682 let vars = std::collections::HashMap::new();
683 let request = PipeRequest::new("pipe-1", messages).with_variables(vars);
684 assert!(request.variables.is_some());
685 assert_eq!(request.variables.unwrap().len(), 0);
686 }
687
688 #[test]
689 fn test_pipe_request_with_many_variables() {
690 let messages = vec![Message::user("Test")];
691 let mut vars = std::collections::HashMap::new();
692 for i in 0..100 {
693 vars.insert(format!("key{}", i), format!("value{}", i));
694 }
695 let request = PipeRequest::new("pipe-1", messages).with_variables(vars);
696 assert_eq!(request.variables.unwrap().len(), 100);
697 }
698
699 #[test]
700 fn test_pipe_request_chaining_all_options() {
701 let messages = vec![Message::user("Test")];
702 let mut vars = std::collections::HashMap::new();
703 vars.insert("key".to_string(), "value".to_string());
704 let request = PipeRequest::new("pipe-1", messages)
705 .with_thread_id("thread-123")
706 .with_variables(vars);
707 assert!(request.thread_id.is_some());
708 assert!(request.variables.is_some());
709 }
710
711 #[test]
713 fn test_create_pipe_request_empty_name() {
714 let request = CreatePipeRequest::new("");
715 assert_eq!(request.name, "");
716 }
717
718 #[test]
719 fn test_create_pipe_request_name_with_special_chars() {
720 let request = CreatePipeRequest::new("my-pipe_v1.0-test");
721 assert_eq!(request.name, "my-pipe_v1.0-test");
722 }
723
724 #[test]
725 fn test_create_pipe_request_with_empty_description() {
726 let request = CreatePipeRequest::new("pipe-1").with_description("");
727 assert_eq!(request.description, Some("".to_string()));
728 }
729
730 #[test]
731 fn test_create_pipe_request_with_long_description() {
732 let long_desc = "a".repeat(1000);
733 let request = CreatePipeRequest::new("pipe-1").with_description(&long_desc);
734 assert_eq!(request.description, Some(long_desc));
735 }
736
737 #[test]
738 fn test_create_pipe_request_with_various_models() {
739 let models = vec![
740 "openai:gpt-4o-mini",
741 "openai:gpt-4o",
742 "anthropic:claude-3-opus",
743 "custom-model",
744 ];
745 for model in models {
746 let request = CreatePipeRequest::new("pipe-1").with_model(model);
747 assert_eq!(request.model, Some(model.to_string()));
748 }
749 }
750
751 #[test]
752 fn test_create_pipe_request_upsert_false() {
753 let request = CreatePipeRequest::new("pipe-1").with_upsert(false);
754 assert_eq!(request.upsert, Some(false));
755 }
756
757 #[test]
758 fn test_create_pipe_request_json_output_false() {
759 let request = CreatePipeRequest::new("pipe-1").with_json_output(false);
760 assert_eq!(request.json, Some(false));
761 }
762
763 #[test]
764 fn test_create_pipe_request_temperature_bounds() {
765 let temps = vec![0.0, 0.5, 1.0, 1.5, 2.0];
766 for temp in temps {
767 let request = CreatePipeRequest::new("pipe-1").with_temperature(temp);
768 assert_eq!(request.temperature, Some(temp));
769 }
770 }
771
772 #[test]
773 fn test_create_pipe_request_max_tokens_various() {
774 let token_counts = vec![1, 100, 1000, 4096, 8192];
775 for tokens in token_counts {
776 let request = CreatePipeRequest::new("pipe-1").with_max_tokens(tokens);
777 assert_eq!(request.max_tokens, Some(tokens));
778 }
779 }
780
781 #[test]
782 fn test_create_pipe_request_with_empty_messages() {
783 let request = CreatePipeRequest::new("pipe-1").with_messages(vec![]);
784 assert!(request.messages.is_some());
785 assert_eq!(request.messages.unwrap().len(), 0);
786 }
787
788 #[test]
789 fn test_create_pipe_request_with_multiple_system_messages() {
790 let messages = vec![
791 Message::system("System 1"),
792 Message::system("System 2"),
793 Message::system("System 3"),
794 ];
795 let request = CreatePipeRequest::new("pipe-1").with_messages(messages);
796 assert_eq!(request.messages.unwrap().len(), 3);
797 }
798
799 #[test]
800 fn test_create_pipe_request_full_builder_chain() {
801 let messages = vec![Message::system("Test")];
802 let request = CreatePipeRequest::new("full-pipe")
803 .with_description("Full test")
804 .with_model("openai:gpt-4o-mini")
805 .with_upsert(true)
806 .with_json_output(true)
807 .with_temperature(0.8)
808 .with_max_tokens(3000)
809 .with_messages(messages);
810
811 assert_eq!(request.name, "full-pipe");
812 assert_eq!(request.description, Some("Full test".to_string()));
813 assert_eq!(request.model, Some("openai:gpt-4o-mini".to_string()));
814 assert_eq!(request.upsert, Some(true));
815 assert_eq!(request.json, Some(true));
816 assert_eq!(request.temperature, Some(0.8));
817 assert_eq!(request.max_tokens, Some(3000));
818 assert!(request.messages.is_some());
819 }
820
821 #[test]
823 fn test_message_system_empty_content() {
824 let msg = Message::system("");
825 assert_eq!(msg.content, "");
826 }
827
828 #[test]
829 fn test_message_user_empty_content() {
830 let msg = Message::user("");
831 assert_eq!(msg.content, "");
832 }
833
834 #[test]
835 fn test_message_assistant_empty_content() {
836 let msg = Message::assistant("");
837 assert_eq!(msg.content, "");
838 }
839
840 #[test]
841 fn test_message_system_long_content() {
842 let long_content = "x".repeat(10000);
843 let msg = Message::system(&long_content);
844 assert_eq!(msg.content, long_content);
845 }
846
847 #[test]
848 fn test_message_user_with_special_chars() {
849 let content = "Test\n\r\t!@#$%^&*(){}[]<>?/|\\\"'`~";
850 let msg = Message::user(content);
851 assert_eq!(msg.content, content);
852 }
853
854 #[test]
855 fn test_message_assistant_with_unicode() {
856 let content = "Hello 世界 🌍 émoji ñ ü";
857 let msg = Message::assistant(content);
858 assert_eq!(msg.content, content);
859 }
860
861 #[test]
862 fn test_message_user_with_json() {
863 let json_content = r#"{"key": "value", "nested": {"data": 123}}"#;
864 let msg = Message::user(json_content);
865 assert_eq!(msg.content, json_content);
866 }
867
868 #[test]
870 fn test_request_config_default_values() {
871 let config = RequestConfig::default();
872 assert!(config.timeout_ms > 0);
873 assert_eq!(config.max_retries, 3); assert!(config.retry_delay_ms > 0);
875 }
876
877 #[test]
878 fn test_request_config_zero_retries() {
879 let config = RequestConfig {
880 timeout_ms: 30000,
881 max_retries: 0,
882 retry_delay_ms: 1000,
883 };
884 let langbase_config = LangbaseConfig {
885 api_key: "test".to_string(),
886 base_url: "https://api.langbase.com".to_string(),
887 };
888 let client = LangbaseClient::new(&langbase_config, config);
889 assert!(client.is_ok());
890 }
891
892 #[test]
893 fn test_request_config_high_timeout() {
894 let config = RequestConfig {
895 timeout_ms: 300000,
896 max_retries: 3,
897 retry_delay_ms: 1000,
898 };
899 let langbase_config = LangbaseConfig {
900 api_key: "test".to_string(),
901 base_url: "https://api.langbase.com".to_string(),
902 };
903 let client = LangbaseClient::new(&langbase_config, config);
904 assert!(client.is_ok());
905 }
906
907 #[test]
908 fn test_request_config_many_retries() {
909 let config = RequestConfig {
910 timeout_ms: 30000,
911 max_retries: 10,
912 retry_delay_ms: 500,
913 };
914 let langbase_config = LangbaseConfig {
915 api_key: "test".to_string(),
916 base_url: "https://api.langbase.com".to_string(),
917 };
918 let client = LangbaseClient::new(&langbase_config, config);
919 assert!(client.is_ok());
920 }
921
922 #[test]
924 fn test_client_with_empty_api_key() {
925 let config = LangbaseConfig {
926 api_key: "".to_string(),
927 base_url: "https://api.langbase.com".to_string(),
928 };
929 let client = LangbaseClient::new(&config, RequestConfig::default());
930 assert!(client.is_ok());
931 }
932
933 #[test]
934 fn test_client_with_long_api_key() {
935 let long_key = "k".repeat(500);
936 let config = LangbaseConfig {
937 api_key: long_key.clone(),
938 base_url: "https://api.langbase.com".to_string(),
939 };
940 let client = LangbaseClient::new(&config, RequestConfig::default());
941 assert!(client.is_ok());
942 }
943
944 #[test]
945 fn test_client_base_url_immutability() {
946 let config = LangbaseConfig {
947 api_key: "test".to_string(),
948 base_url: "https://api.langbase.com/".to_string(),
949 };
950 let client = LangbaseClient::new(&config, RequestConfig::default()).unwrap();
951 let url1 = client.base_url();
952 let url2 = client.base_url();
953 assert_eq!(url1, url2);
954 assert_eq!(url1, "https://api.langbase.com");
955 }
956
957 #[test]
958 fn test_client_clone() {
959 let config = LangbaseConfig {
960 api_key: "test".to_string(),
961 base_url: "https://api.langbase.com".to_string(),
962 };
963 let client1 = LangbaseClient::new(&config, RequestConfig::default()).unwrap();
964 let client2 = client1.clone();
965 assert_eq!(client1.base_url(), client2.base_url());
966 }
967
968 #[test]
970 fn test_pipe_request_with_hyphenated_name() {
971 let messages = vec![Message::user("Test")];
972 let request = PipeRequest::new("my-custom-pipe-v1", messages);
973 assert_eq!(request.name, "my-custom-pipe-v1");
974 }
975
976 #[test]
977 fn test_pipe_request_with_underscored_name() {
978 let messages = vec![Message::user("Test")];
979 let request = PipeRequest::new("my_custom_pipe_v1", messages);
980 assert_eq!(request.name, "my_custom_pipe_v1");
981 }
982
983 #[test]
984 fn test_pipe_request_with_numeric_name() {
985 let messages = vec![Message::user("Test")];
986 let request = PipeRequest::new("pipe123", messages);
987 assert_eq!(request.name, "pipe123");
988 }
989
990 #[test]
991 fn test_create_pipe_request_with_version_suffix() {
992 let request = CreatePipeRequest::new("reasoning-v2.1.0");
993 assert_eq!(request.name, "reasoning-v2.1.0");
994 }
995
996 #[test]
998 fn test_pipe_request_with_very_long_name() {
999 let long_name = "pipe-".to_string() + &"a".repeat(200);
1000 let messages = vec![Message::user("Test")];
1001 let request = PipeRequest::new(&long_name, messages);
1002 assert_eq!(request.name, long_name);
1003 }
1004
1005 #[test]
1006 fn test_message_content_with_null_bytes() {
1007 let content = "Hello\0World";
1008 let msg = Message::user(content);
1009 assert_eq!(msg.content, content);
1010 }
1011
1012 #[test]
1013 fn test_create_pipe_request_temperature_negative() {
1014 let request = CreatePipeRequest::new("pipe-1").with_temperature(-0.5);
1015 assert_eq!(request.temperature, Some(-0.5));
1016 }
1017
1018 #[test]
1019 fn test_create_pipe_request_max_tokens_zero() {
1020 let request = CreatePipeRequest::new("pipe-1").with_max_tokens(0);
1021 assert_eq!(request.max_tokens, Some(0));
1022 }
1023
1024 #[test]
1025 fn test_variables_with_special_key_names() {
1026 let messages = vec![Message::user("Test")];
1027 let mut vars = std::collections::HashMap::new();
1028 vars.insert("key-with-dashes".to_string(), "value1".to_string());
1029 vars.insert("key_with_underscores".to_string(), "value2".to_string());
1030 vars.insert("keyWithCamelCase".to_string(), "value3".to_string());
1031 vars.insert("key.with.dots".to_string(), "value4".to_string());
1032
1033 let request = PipeRequest::new("pipe-1", messages).with_variables(vars.clone());
1034 let req_vars = request.variables.unwrap();
1035 assert_eq!(req_vars.len(), 4);
1036 assert_eq!(req_vars.get("key-with-dashes"), Some(&"value1".to_string()));
1037 }
1038
1039 #[test]
1040 fn test_variables_with_empty_values() {
1041 let messages = vec![Message::user("Test")];
1042 let mut vars = std::collections::HashMap::new();
1043 vars.insert("key1".to_string(), "".to_string());
1044 vars.insert("key2".to_string(), "value".to_string());
1045
1046 let request = PipeRequest::new("pipe-1", messages).with_variables(vars);
1047 let req_vars = request.variables.unwrap();
1048 assert_eq!(req_vars.get("key1"), Some(&"".to_string()));
1049 }
1050
1051 #[test]
1052 fn test_variables_with_json_values() {
1053 let messages = vec![Message::user("Test")];
1054 let mut vars = std::collections::HashMap::new();
1055 vars.insert(
1056 "json_data".to_string(),
1057 r#"{"nested": "value"}"#.to_string(),
1058 );
1059
1060 let request = PipeRequest::new("pipe-1", messages).with_variables(vars);
1061 let req_vars = request.variables.unwrap();
1062 assert_eq!(
1063 req_vars.get("json_data"),
1064 Some(&r#"{"nested": "value"}"#.to_string())
1065 );
1066 }
1067}