pub struct LMStudioProvider { /* private fields */ }Expand description
LM Studio local provider implementation
Uses OpenAI-compatible API endpoints for local model inference
Implementations§
Source§impl LMStudioProvider
impl LMStudioProvider
Sourcepub fn new(
config: LMStudioConfig,
default_params: DefaultLLMParams,
) -> LlmResult<Self>
pub fn new( config: LMStudioConfig, default_params: DefaultLLMParams, ) -> LlmResult<Self>
Create a new LM Studio provider instance
§Errors
Returns LlmError::ConfigurationError if:
- Base URL is missing or invalid
- Provider configuration validation fails
- HTTP client initialization fails
Trait Implementations§
Source§impl Debug for LMStudioProvider
impl Debug for LMStudioProvider
Source§impl LlmProvider for LMStudioProvider
impl LlmProvider for LMStudioProvider
Source§fn execute_llm<'life0, 'async_trait>(
&'life0 self,
request: UnifiedLLMRequest,
_current_tool_round: Option<ToolCallingRound>,
config: Option<RequestConfig>,
) -> Pin<Box<dyn Future<Output = Result<Response>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
fn execute_llm<'life0, 'async_trait>(
&'life0 self,
request: UnifiedLLMRequest,
_current_tool_round: Option<ToolCallingRound>,
config: Option<RequestConfig>,
) -> Pin<Box<dyn Future<Output = Result<Response>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
Execute an LLM request and return the response. Read more
Source§fn execute_structured_llm<'life0, 'async_trait>(
&'life0 self,
request: UnifiedLLMRequest,
current_tool_round: Option<ToolCallingRound>,
schema: Value,
config: Option<RequestConfig>,
) -> Pin<Box<dyn Future<Output = Result<Response>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
fn execute_structured_llm<'life0, 'async_trait>(
&'life0 self,
request: UnifiedLLMRequest,
current_tool_round: Option<ToolCallingRound>,
schema: Value,
config: Option<RequestConfig>,
) -> Pin<Box<dyn Future<Output = Result<Response>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
Execute an LLM request with structured JSON output. Read more
Source§fn provider_name(&self) -> &'static str
fn provider_name(&self) -> &'static str
Get the provider’s identifier. Read more
Auto Trait Implementations§
impl !Freeze for LMStudioProvider
impl !RefUnwindSafe for LMStudioProvider
impl Send for LMStudioProvider
impl Sync for LMStudioProvider
impl Unpin for LMStudioProvider
impl !UnwindSafe for LMStudioProvider
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more