pub struct OpenAIChat { /* private fields */ }Expand description
OpenAI 聊天客户端
Implementations§
Source§impl OpenAIChat
impl OpenAIChat
Sourcepub fn new(config: OpenAIConfig) -> Self
pub fn new(config: OpenAIConfig) -> Self
创建新的 OpenAI 聊天客户端
Trait Implementations§
Source§impl BaseChatModel for OpenAIChat
impl BaseChatModel for OpenAIChat
Source§fn chat<'life0, 'async_trait>(
&'life0 self,
messages: Vec<Message>,
_config: Option<RunnableConfig>,
) -> Pin<Box<dyn Future<Output = Result<LLMResult, Self::Error>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
fn chat<'life0, 'async_trait>(
&'life0 self,
messages: Vec<Message>,
_config: Option<RunnableConfig>,
) -> Pin<Box<dyn Future<Output = Result<LLMResult, Self::Error>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
与模型聊天 Read more
Source§fn stream_chat<'life0, 'async_trait>(
&'life0 self,
messages: Vec<Message>,
_config: Option<RunnableConfig>,
) -> Pin<Box<dyn Future<Output = Result<Pin<Box<dyn Stream<Item = Result<String, Self::Error>> + Send>>, Self::Error>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
fn stream_chat<'life0, 'async_trait>(
&'life0 self,
messages: Vec<Message>,
_config: Option<RunnableConfig>,
) -> Pin<Box<dyn Future<Output = Result<Pin<Box<dyn Stream<Item = Result<String, Self::Error>> + Send>>, Self::Error>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
流式聊天 Read more
Source§impl BaseLanguageModel<Vec<Message>, LLMResult> for OpenAIChat
impl BaseLanguageModel<Vec<Message>, LLMResult> for OpenAIChat
Source§fn model_name(&self) -> &str
fn model_name(&self) -> &str
获取模型名称
Source§fn temperature(&self) -> Option<f32>
fn temperature(&self) -> Option<f32>
获取温度参数
Source§fn max_tokens(&self) -> Option<usize>
fn max_tokens(&self) -> Option<usize>
获取最大 token 数
Source§fn with_temperature(self, temp: f32) -> Self
fn with_temperature(self, temp: f32) -> Self
设置温度参数
Source§fn with_max_tokens(self, max: usize) -> Self
fn with_max_tokens(self, max: usize) -> Self
设置最大 token 数
Source§impl Runnable<Vec<Message>, LLMResult> for OpenAIChat
impl Runnable<Vec<Message>, LLMResult> for OpenAIChat
Source§type Error = OpenAIError
type Error = OpenAIError
错误类型
Source§fn invoke<'life0, 'async_trait>(
&'life0 self,
input: Vec<Message>,
_config: Option<RunnableConfig>,
) -> Pin<Box<dyn Future<Output = Result<LLMResult, Self::Error>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
fn invoke<'life0, 'async_trait>(
&'life0 self,
input: Vec<Message>,
_config: Option<RunnableConfig>,
) -> Pin<Box<dyn Future<Output = Result<LLMResult, Self::Error>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
将单个输入转换为输出 Read more
Source§fn stream<'life0, 'async_trait>(
&'life0 self,
_input: Vec<Message>,
_config: Option<RunnableConfig>,
) -> Pin<Box<dyn Future<Output = Result<Pin<Box<dyn Stream<Item = Result<LLMResult, Self::Error>> + Send>>, Self::Error>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
fn stream<'life0, 'async_trait>(
&'life0 self,
_input: Vec<Message>,
_config: Option<RunnableConfig>,
) -> Pin<Box<dyn Future<Output = Result<Pin<Box<dyn Stream<Item = Result<LLMResult, Self::Error>> + Send>>, Self::Error>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
流式输出 - 用于流式响应 (LLM 等) Read more
Auto Trait Implementations§
impl Freeze for OpenAIChat
impl !RefUnwindSafe for OpenAIChat
impl Send for OpenAIChat
impl Sync for OpenAIChat
impl Unpin for OpenAIChat
impl UnsafeUnpin for OpenAIChat
impl !UnwindSafe for OpenAIChat
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more