Content

Enum Content 

Source
pub enum Content {
    Text(String),
    Json(Value),
    Image {
        url: Option<String>,
        mime: Option<String>,
        name: Option<String>,
    },
    Audio {
        url: Option<String>,
        mime: Option<String>,
    },
}
Expand description

Message content — moved to an enum to support multimodal and structured content

Variants§

§

Text(String)

§

Json(Value)

Generic JSON content for structured payloads (e.g. function call args)

§

Image

Reference to an image (url) or metadata; adapters may upload or inline as needed

Fields

§

Audio

Reference to audio content

Fields

Implementations§

Source§

impl Content

Source

pub fn as_text(&self) -> String

Return a best-effort textual representation for legacy code paths

Examples found in repository?
examples/multimodal_example.rs (line 25)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    println!("Multimodal example: image + audio content in a message");
7
8    let _client = AiClient::new(Provider::Groq)?;
9
10    let request = ChatCompletionRequest::new(
11        "multimodal-model".to_string(),
12        vec![Message {
13            role: Role::User,
14            content: Content::new_image(
15                Some("https://example.com/dog.jpg".into()),
16                Some("image/jpeg".into()),
17                Some("dog.jpg".into()),
18            ),
19            function_call: None,
20        }],
21    );
22
23    println!(
24        "Prepared multimodal request; image URL: {}",
25        request.messages[0].content.as_text()
26    );
27
28    // Note: this example demonstrates the type usage only and does not call the API.
29    Ok(())
30}
More examples
Hide additional examples
examples/basic_usage.rs (line 42)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    println!("🚀 AI-lib Basic Usage Example");
7    println!("================================");
8
9    // 切换模型提供商,只需更改 Provider 的值
10    let client = AiClient::new(Provider::Groq)?;
11    println!(
12        "✅ Created client with provider: {:?}",
13        client.current_provider()
14    );
15
16    // 获取支持的模型列表
17    let models = client.list_models().await?;
18    println!("📋 Available models: {:?}", models);
19
20    // 创建聊天请求
21    let request = ChatCompletionRequest::new(
22        "llama3-8b-8192".to_string(),
23        vec![Message {
24            role: Role::User,
25            content: Content::Text("Hello! Please introduce yourself briefly.".to_string()),
26            function_call: None,
27        }],
28    )
29    .with_temperature(0.7)
30    .with_max_tokens(100);
31
32    println!("📤 Sending request to model: {}", request.model);
33
34    // 发送请求
35    let response = client.chat_completion(request).await?;
36
37    println!("📥 Received response:");
38    println!("   ID: {}", response.id);
39    println!("   Model: {}", response.model);
40    println!(
41        "   Content: {}",
42        response.choices[0].message.content.as_text()
43    );
44    println!("   Usage: {} tokens", response.usage.total_tokens);
45
46    Ok(())
47}
examples/debug_request.rs (line 26)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    println!("🔍 调试请求格式");
7    println!("===============");
8
9    // 创建测试请求
10    let request = ChatCompletionRequest::new(
11        "gpt-3.5-turbo".to_string(),
12        vec![Message {
13            role: Role::User,
14            content: Content::Text("Hello!".to_string()),
15            function_call: None,
16        }],
17    )
18    .with_max_tokens(10);
19
20    println!("📤 原始请求:");
21    println!("   模型: {}", request.model);
22    println!("   消息数量: {}", request.messages.len());
23    println!(
24        "   消息[0]: {:?} - {}",
25        request.messages[0].role,
26        request.messages[0].content.as_text()
27    );
28    println!("   max_tokens: {:?}", request.max_tokens);
29
30    // 测试OpenAI
31    println!("\n🤖 测试OpenAI...");
32    match AiClient::new(Provider::OpenAI) {
33        Ok(client) => {
34            match client.chat_completion(request.clone()).await {
35                Ok(response) => {
36                    println!("✅ 成功!");
37                    println!("   响应: {}", response.choices[0].message.content.as_text());
38                }
39                Err(e) => {
40                    println!("❌ 失败: {}", e);
41
42                    // 如果是400错误,说明请求格式有问题
43                    if e.to_string().contains("400") {
44                        println!("   这通常表示请求格式不正确");
45                        println!("   让我们检查请求是否包含必要字段...");
46                    }
47                }
48            }
49        }
50        Err(e) => println!("❌ 客户端创建失败: {}", e),
51    }
52
53    Ok(())
54}
examples/test_without_proxy.rs (line 33)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    println!("🌐 测试不使用代理的连接");
7    println!("======================");
8
9    // 临时移除代理设置
10    std::env::remove_var("AI_PROXY_URL");
11
12    println!("ℹ️  已临时移除AI_PROXY_URL设置");
13
14    // 测试DeepSeek(国内可直连)
15    println!("\n🔍 测试DeepSeek (直连):");
16    match AiClient::new(Provider::DeepSeek) {
17        Ok(client) => {
18            let request = ChatCompletionRequest::new(
19                "deepseek-chat".to_string(),
20                vec![Message {
21                    role: Role::User,
22                    content: Content::Text(
23                        "Hello! Please respond with just 'Hi' to test.".to_string(),
24                    ),
25                    function_call: None,
26                }],
27            )
28            .with_max_tokens(5);
29
30            match client.chat_completion(request).await {
31                Ok(response) => {
32                    println!("✅ DeepSeek 直连成功!");
33                    println!("   响应: {}", response.choices[0].message.content.as_text());
34                    println!("   Token使用: {}", response.usage.total_tokens);
35                }
36                Err(e) => {
37                    println!("❌ DeepSeek 请求失败: {}", e);
38                    if e.to_string().contains("402") {
39                        println!("   (这是余额不足错误,说明连接正常)");
40                    }
41                }
42            }
43        }
44        Err(e) => println!("❌ DeepSeek 客户端创建失败: {}", e),
45    }
46
47    println!("\n💡 结论:");
48    println!("   • DeepSeek可以直连,不需要代理");
49    println!("   • OpenAI和Groq需要通过代理访问");
50    println!("   • 代理可能会修改请求内容,导致格式错误");
51    println!("   • 建议检查代理服务器的配置");
52
53    Ok(())
54}
examples/ascii_horse.rs (line 87)
24async fn main() -> Result<(), Box<dyn std::error::Error>> {
25    println!("Example: Function Calling with a local tool 'ascii_horse'");
26
27    // Define the tool
28    let tool = Tool {
29        name: "ascii_horse".to_string(),
30        description: Some("Return an ASCII art horse. Accepts size: 'small'|'large'.".to_string()),
31        parameters: Some(json!({
32            "type": "object",
33            "properties": {
34                "size": { "type": "string", "enum": ["small", "large"] }
35            },
36            "required": ["size"]
37        })),
38    };
39
40    // Build a request that offers the tool to the model
41    let mut request = ChatCompletionRequest::new(
42        "example-model".to_string(),
43        vec![Message {
44            role: Role::User,
45            content: Content::new_text("Please draw an ASCII horse for me."),
46            function_call: None,
47        }],
48    );
49    request.functions = Some(vec![tool.clone()]);
50    request.function_call = Some(FunctionCallPolicy::Auto("ascii_horse".to_string()));
51
52    println!(
53        "Prepared request with functions: {}",
54        serde_json::to_string_pretty(&request.functions).unwrap()
55    );
56
57    // Simulate model returning a function call (in a real run this would come from the provider)
58    let simulated_call = FunctionCall {
59        name: "ascii_horse".to_string(),
60        arguments: Some(json!({ "size": "small" })),
61    };
62    println!(
63        "Model requested function call: {}",
64        serde_json::to_string_pretty(&simulated_call).unwrap()
65    );
66
67    // Execute the local tool
68    let size_arg = simulated_call
69        .arguments
70        .as_ref()
71        .and_then(|v| v.get("size"))
72        .and_then(|s| s.as_str())
73        .unwrap_or("small");
74
75    let tool_output = ascii_horse(size_arg);
76
77    // Convert tool output to a Message and append to conversation
78    let tool_message = Message {
79        role: Role::Assistant,
80        content: Content::new_text(tool_output.clone()),
81        function_call: None,
82    };
83
84    // In a normal flow you'd send the updated messages back to the model to continue the conversation.
85    println!(
86        "Tool output (appended as assistant message):\n\n{}\n",
87        tool_message.content.as_text()
88    );
89
90    Ok(())
91}
examples/proxy_example.rs (line 37)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    println!("🌐 AI-lib 代理服务器支持示例");
7    println!("============================");
8
9    // 检查代理配置
10    match std::env::var("AI_PROXY_URL") {
11        Ok(proxy_url) => {
12            println!("✅ 检测到代理配置: {}", proxy_url);
13            println!("   所有HTTP请求将通过此代理服务器");
14        }
15        Err(_) => {
16            println!("ℹ️  未设置AI_PROXY_URL环境变量");
17            println!("   如需使用代理,请设置: export AI_PROXY_URL=http://proxy.example.com:8080");
18        }
19    }
20
21    println!("\n🚀 创建AI客户端...");
22    let client = AiClient::new(Provider::Groq)?;
23    println!("✅ 客户端创建成功,提供商: {:?}", client.current_provider());
24
25    // 创建测试请求
26    let request = ChatCompletionRequest::new(
27        "llama3-8b-8192".to_string(),
28        vec![Message {
29            role: Role::User,
30            content: Content::Text("Hello! This request may go through a proxy.".to_string()),
31            function_call: None,
32        }],
33    );
34
35    println!("\n📤 准备发送请求...");
36    println!("   模型: {}", request.model);
37    println!("   消息: {}", request.messages[0].content.as_text());
38
39    // 获取模型列表(这个请求也会通过代理)
40    match client.list_models().await {
41        Ok(models) => {
42            println!("\n📋 通过代理获取到的模型列表:");
43            for model in models {
44                println!("   • {}", model);
45            }
46        }
47        Err(e) => {
48            println!("\n⚠️  获取模型列表失败: {}", e);
49            println!("   这可能是由于:");
50            println!("   • 未设置GROQ_API_KEY环境变量");
51            println!("   • 代理服务器配置错误");
52            println!("   • 网络连接问题");
53        }
54    }
55
56    println!("\n💡 代理配置说明:");
57    println!("   • 设置环境变量: AI_PROXY_URL=http://your-proxy:port");
58    println!("   • 支持HTTP和HTTPS代理");
59    println!("   • 支持带认证的代理: http://user:pass@proxy:port");
60    println!("   • 所有AI提供商都会自动使用此代理配置");
61
62    Ok(())
63}
Source

pub fn new_text<S: Into<String>>(s: S) -> Self

Convenience constructor for text content

Examples found in repository?
examples/ascii_horse.rs (line 45)
24async fn main() -> Result<(), Box<dyn std::error::Error>> {
25    println!("Example: Function Calling with a local tool 'ascii_horse'");
26
27    // Define the tool
28    let tool = Tool {
29        name: "ascii_horse".to_string(),
30        description: Some("Return an ASCII art horse. Accepts size: 'small'|'large'.".to_string()),
31        parameters: Some(json!({
32            "type": "object",
33            "properties": {
34                "size": { "type": "string", "enum": ["small", "large"] }
35            },
36            "required": ["size"]
37        })),
38    };
39
40    // Build a request that offers the tool to the model
41    let mut request = ChatCompletionRequest::new(
42        "example-model".to_string(),
43        vec![Message {
44            role: Role::User,
45            content: Content::new_text("Please draw an ASCII horse for me."),
46            function_call: None,
47        }],
48    );
49    request.functions = Some(vec![tool.clone()]);
50    request.function_call = Some(FunctionCallPolicy::Auto("ascii_horse".to_string()));
51
52    println!(
53        "Prepared request with functions: {}",
54        serde_json::to_string_pretty(&request.functions).unwrap()
55    );
56
57    // Simulate model returning a function call (in a real run this would come from the provider)
58    let simulated_call = FunctionCall {
59        name: "ascii_horse".to_string(),
60        arguments: Some(json!({ "size": "small" })),
61    };
62    println!(
63        "Model requested function call: {}",
64        serde_json::to_string_pretty(&simulated_call).unwrap()
65    );
66
67    // Execute the local tool
68    let size_arg = simulated_call
69        .arguments
70        .as_ref()
71        .and_then(|v| v.get("size"))
72        .and_then(|s| s.as_str())
73        .unwrap_or("small");
74
75    let tool_output = ascii_horse(size_arg);
76
77    // Convert tool output to a Message and append to conversation
78    let tool_message = Message {
79        role: Role::Assistant,
80        content: Content::new_text(tool_output.clone()),
81        function_call: None,
82    };
83
84    // In a normal flow you'd send the updated messages back to the model to continue the conversation.
85    println!(
86        "Tool output (appended as assistant message):\n\n{}\n",
87        tool_message.content.as_text()
88    );
89
90    Ok(())
91}
Source

pub fn new_json(v: JsonValue) -> Self

Convenience constructor for JSON content

Source

pub fn new_image( url: Option<String>, mime: Option<String>, name: Option<String>, ) -> Self

Convenience constructor for image content

Examples found in repository?
examples/multimodal_example.rs (lines 14-18)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    println!("Multimodal example: image + audio content in a message");
7
8    let _client = AiClient::new(Provider::Groq)?;
9
10    let request = ChatCompletionRequest::new(
11        "multimodal-model".to_string(),
12        vec![Message {
13            role: Role::User,
14            content: Content::new_image(
15                Some("https://example.com/dog.jpg".into()),
16                Some("image/jpeg".into()),
17                Some("dog.jpg".into()),
18            ),
19            function_call: None,
20        }],
21    );
22
23    println!(
24        "Prepared multimodal request; image URL: {}",
25        request.messages[0].content.as_text()
26    );
27
28    // Note: this example demonstrates the type usage only and does not call the API.
29    Ok(())
30}
Source

pub fn new_audio(url: Option<String>, mime: Option<String>) -> Self

Convenience constructor for audio content

Trait Implementations§

Source§

impl Clone for Content

Source§

fn clone(&self) -> Content

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for Content

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl<'de> Deserialize<'de> for Content

Source§

fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>
where __D: Deserializer<'de>,

Deserialize this value from the given Serde deserializer. Read more
Source§

impl Serialize for Content

Source§

fn serialize<__S>(&self, __serializer: __S) -> Result<__S::Ok, __S::Error>
where __S: Serializer,

Serialize this value into the given Serde serializer. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> DeserializeOwned for T
where T: for<'de> Deserialize<'de>,

Source§

impl<T> ErasedDestructor for T
where T: 'static,