Skip to main content

bob_core/
native.rs

1//! # Native Async Port Traits
2//!
3//! Ergonomic native `async fn` trait alternatives that don't require
4//! `#[async_trait]`. Developers implement these for the best IDE experience
5//! and fastest compile times. The framework automatically bridges them to
6//! the dyn-compatible ports via blanket implementations.
7//!
8//! ## Why Two Sets of Traits?
9//!
10//! Rust's native `async fn in trait` (RPITIT) produces opaque `impl Future`
11//! return types, which are not object-safe. The framework's runtime uses
12//! `Arc<dyn Port>` for flexibility, requiring `#[async_trait]` (which boxes
13//! the future). These native traits let you write clean code without the
14//! macro overhead, while the blanket impls handle the bridging.
15//!
16//! ## Example
17//!
18//! ```rust,ignore
19//! use bob_core::native::NativeLlmPort;
20//!
21//! struct MyLlm;
22//!
23//! // Native async — no #[async_trait] needed
24//! impl NativeLlmPort for MyLlm {
25//!     async fn complete(&self, req: LlmRequest) -> Result<LlmResponse, LlmError> {
26//!         // ...
27//!     }
28//! }
29//!
30//! // Automatically usable as Arc<dyn LlmPort> via blanket impl
31//! let port: Arc<dyn LlmPort> = Arc::new(MyLlm);
32//! ```
33
34use std::future::Future;
35
36use crate::{
37    error::{LlmError, StoreError, ToolError},
38    types::{
39        LlmRequest, LlmResponse, LlmStream, SessionId, SessionState, ToolCall, ToolDescriptor,
40        ToolResult,
41    },
42};
43
44// ── Native LLM Port ──────────────────────────────────────────────────
45
46/// Native async LLM port — implement this for the cleanest developer experience.
47///
48/// The framework provides a blanket implementation that bridges any
49/// `NativeLlmPort` to the dyn-compatible [`LlmPort`](crate::ports::LlmPort).
50pub trait NativeLlmPort: Send + Sync {
51    /// Run a non-streaming inference call.
52    fn complete(
53        &self,
54        req: LlmRequest,
55    ) -> impl Future<Output = Result<LlmResponse, LlmError>> + Send;
56
57    /// Run a streaming inference call.
58    fn complete_stream(
59        &self,
60        req: LlmRequest,
61    ) -> impl Future<Output = Result<LlmStream, LlmError>> + Send;
62}
63
64// ── Native Tool Port ─────────────────────────────────────────────────
65
66/// Native async tool port — implement this for the cleanest developer experience.
67///
68/// The framework provides a blanket implementation that bridges any
69/// `NativeToolPort` to the dyn-compatible [`ToolPort`](crate::ports::ToolPort).
70pub trait NativeToolPort: Send + Sync {
71    /// List all available tools.
72    fn list_tools(&self) -> impl Future<Output = Result<Vec<ToolDescriptor>, ToolError>> + Send;
73
74    /// Execute a tool call.
75    fn call_tool(
76        &self,
77        call: ToolCall,
78    ) -> impl Future<Output = Result<ToolResult, ToolError>> + Send;
79}
80
81// ── Native Session Store ─────────────────────────────────────────────
82
83/// Native async session store — implement this for the cleanest developer experience.
84pub trait NativeSessionStore: Send + Sync {
85    /// Load a session by ID.
86    fn load(
87        &self,
88        id: &SessionId,
89    ) -> impl Future<Output = Result<Option<SessionState>, StoreError>> + Send;
90
91    /// Save a session by ID.
92    fn save(
93        &self,
94        id: &SessionId,
95        state: &SessionState,
96    ) -> impl Future<Output = Result<(), StoreError>> + Send;
97}
98
99// ── Tests ────────────────────────────────────────────────────────────
100
101#[cfg(test)]
102mod tests {
103    use super::*;
104
105    // Verify the traits are implementable without #[async_trait].
106    struct MyTool;
107
108    impl NativeToolPort for MyTool {
109        async fn list_tools(&self) -> Result<Vec<ToolDescriptor>, ToolError> {
110            Ok(vec![])
111        }
112
113        async fn call_tool(&self, call: ToolCall) -> Result<ToolResult, ToolError> {
114            Ok(ToolResult { name: call.name, output: serde_json::json!(null), is_error: false })
115        }
116    }
117
118    struct MyLlm;
119
120    impl NativeLlmPort for MyLlm {
121        async fn complete(&self, _req: LlmRequest) -> Result<LlmResponse, LlmError> {
122            Err(LlmError::Provider("not implemented".into()))
123        }
124
125        async fn complete_stream(&self, _req: LlmRequest) -> Result<LlmStream, LlmError> {
126            Err(LlmError::Provider("not implemented".into()))
127        }
128    }
129
130    struct MyStore;
131
132    impl NativeSessionStore for MyStore {
133        async fn load(&self, _id: &SessionId) -> Result<Option<SessionState>, StoreError> {
134            Ok(None)
135        }
136
137        async fn save(&self, _id: &SessionId, _state: &SessionState) -> Result<(), StoreError> {
138            Ok(())
139        }
140    }
141
142    #[tokio::test]
143    async fn native_tool_port_works() {
144        let tool = MyTool;
145        let tools = tool.list_tools().await.unwrap();
146        assert!(tools.is_empty());
147
148        let result = tool.call_tool(ToolCall::new("test", serde_json::json!({}))).await.unwrap();
149        assert_eq!(result.name, "test");
150    }
151
152    #[tokio::test]
153    async fn native_llm_port_works() {
154        let llm = MyLlm;
155        let req = LlmRequest {
156            model: "test".into(),
157            messages: vec![],
158            tools: vec![],
159            output_schema: None,
160        };
161        assert!(llm.complete(req).await.is_err());
162    }
163
164    #[tokio::test]
165    async fn native_session_store_works() {
166        let store = MyStore;
167        let loaded = store.load(&"test".to_string()).await.unwrap();
168        assert!(loaded.is_none());
169    }
170}