Content

Enum Content 

Source
pub enum Content {
    Text(String),
    Json(Value),
    Image {
        url: Option<String>,
        mime: Option<String>,
        name: Option<String>,
    },
    Audio {
        url: Option<String>,
        mime: Option<String>,
    },
}
Expand description

Message content — moved to an enum to support multimodal and structured content

Variants§

§

Text(String)

§

Json(Value)

Generic JSON content for structured payloads (e.g. function call args)

§

Image

Reference to an image (url) or metadata; adapters may upload or inline as needed

Fields

§

Audio

Reference to audio content

Fields

Implementations§

Source§

impl Content

Source

pub fn as_text(&self) -> String

Return a best-effort textual representation for legacy code paths

Examples found in repository?
examples/multimodal_example.rs (line 25)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    println!("Multimodal example: image + audio content in a message");
7
8    let _client = AiClient::new(Provider::Groq)?;
9
10    let request = ChatCompletionRequest::new(
11        "multimodal-model".to_string(),
12        vec![Message {
13            role: Role::User,
14            content: Content::new_image(
15                Some("https://example.com/dog.jpg".into()),
16                Some("image/jpeg".into()),
17                Some("dog.jpg".into()),
18            ),
19            function_call: None,
20        }],
21    );
22
23    println!(
24        "Prepared multimodal request; image URL: {}",
25        request.messages[0].content.as_text()
26    );
27
28    // Note: this example demonstrates the type usage only and does not call the API.
29    Ok(())
30}
More examples
Hide additional examples
examples/basic_usage.rs (line 43)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7    println!("🚀 AI-lib Basic Usage Example");
8    println!("================================");
9
10    // Switch model provider by changing Provider value
11    let client = AiClient::new(Provider::Groq)?;
12    println!(
13        "✅ Created client with provider: {:?}",
14        client.current_provider()
15    );
16
17    // Get list of supported models
18    let models = client.list_models().await?;
19    println!("📋 Available models: {:?}", models);
20
21    // Create chat request
22    let request = ChatCompletionRequest::new(
23        "llama3-8b-8192".to_string(),
24        vec![Message {
25            role: Role::User,
26            content: Content::Text("Hello! Please introduce yourself briefly.".to_string()),
27            function_call: None,
28        }],
29    )
30    .with_temperature(0.7)
31    .with_max_tokens(100);
32
33    println!("📤 Sending request to model: {}", request.model);
34
35    // Send request
36    let response = client.chat_completion(request).await?;
37
38    println!("📥 Received response:");
39    println!("   ID: {}", response.id);
40    println!("   Model: {}", response.model);
41    println!(
42        "   Content: {}",
43        response.choices[0].message.content.as_text()
44    );
45    println!("   Usage: {} tokens", response.usage.total_tokens);
46
47    Ok(())
48}
examples/ascii_horse.rs (line 87)
24async fn main() -> Result<(), Box<dyn std::error::Error>> {
25    println!("Example: Function Calling with a local tool 'ascii_horse'");
26
27    // Define the tool
28    let tool = Tool {
29        name: "ascii_horse".to_string(),
30        description: Some("Return an ASCII art horse. Accepts size: 'small'|'large'.".to_string()),
31        parameters: Some(json!({
32            "type": "object",
33            "properties": {
34                "size": { "type": "string", "enum": ["small", "large"] }
35            },
36            "required": ["size"]
37        })),
38    };
39
40    // Build a request that offers the tool to the model
41    let mut request = ChatCompletionRequest::new(
42        "example-model".to_string(),
43        vec![Message {
44            role: Role::User,
45            content: Content::new_text("Please draw an ASCII horse for me."),
46            function_call: None,
47        }],
48    );
49    request.functions = Some(vec![tool.clone()]);
50    request.function_call = Some(FunctionCallPolicy::Auto("ascii_horse".to_string()));
51
52    println!(
53        "Prepared request with functions: {}",
54        serde_json::to_string_pretty(&request.functions).unwrap()
55    );
56
57    // Simulate model returning a function call (in a real run this would come from the provider)
58    let simulated_call = FunctionCall {
59        name: "ascii_horse".to_string(),
60        arguments: Some(json!({ "size": "small" })),
61    };
62    println!(
63        "Model requested function call: {}",
64        serde_json::to_string_pretty(&simulated_call).unwrap()
65    );
66
67    // Execute the local tool
68    let size_arg = simulated_call
69        .arguments
70        .as_ref()
71        .and_then(|v| v.get("size"))
72        .and_then(|s| s.as_str())
73        .unwrap_or("small");
74
75    let tool_output = ascii_horse(size_arg);
76
77    // Convert tool output to a Message and append to conversation
78    let tool_message = Message {
79        role: Role::Assistant,
80        content: Content::new_text(tool_output.clone()),
81        function_call: None,
82    };
83
84    // In a normal flow you'd send the updated messages back to the model to continue the conversation.
85    println!(
86        "Tool output (appended as assistant message):\n\n{}\n",
87        tool_message.content.as_text()
88    );
89
90    Ok(())
91}
examples/proxy_example.rs (line 41)
6async fn main() -> Result<(), Box<dyn std::error::Error>> {
7    println!("🌐 AI-lib Proxy Server Support Example");
8    println!("=====================================");
9
10    // Check proxy configuration
11    match std::env::var("AI_PROXY_URL") {
12        Ok(proxy_url) => {
13            println!("✅ Proxy configuration detected: {}", proxy_url);
14            println!("   All HTTP requests will go through this proxy server");
15        }
16        Err(_) => {
17            println!("ℹ️  AI_PROXY_URL environment variable not set");
18            println!("   To use proxy, set: export AI_PROXY_URL=http://proxy.example.com:8080");
19        }
20    }
21
22    println!("\n🚀 Creating AI client...");
23    let client = AiClient::new(Provider::Groq)?;
24    println!(
25        "✅ Client created successfully, provider: {:?}",
26        client.current_provider()
27    );
28
29    // Create test request
30    let request = ChatCompletionRequest::new(
31        "llama3-8b-8192".to_string(),
32        vec![Message {
33            role: Role::User,
34            content: Content::Text("Hello! This request may go through a proxy.".to_string()),
35            function_call: None,
36        }],
37    );
38
39    println!("\n📤 Preparing to send request...");
40    println!("   Model: {}", request.model);
41    println!("   Message: {}", request.messages[0].content.as_text());
42
43    // Get model list (this request will also go through proxy)
44    match client.list_models().await {
45        Ok(models) => {
46            println!("\n📋 Model list obtained through proxy:");
47            for model in models {
48                println!("   • {}", model);
49            }
50        }
51        Err(e) => {
52            println!("\n⚠️  Failed to get model list: {}", e);
53            println!("   This may be due to:");
54            println!("   • GROQ_API_KEY environment variable not set");
55            println!("   • Proxy server configuration error");
56            println!("   • Network connection issue");
57        }
58    }
59
60    println!("\n💡 Proxy Configuration Instructions:");
61    println!("   • Set environment variable: AI_PROXY_URL=http://your-proxy:port");
62    println!("   • Supports HTTP and HTTPS proxies");
63    println!("   • Supports authenticated proxies: http://user:pass@proxy:port");
64    println!("   • All AI providers will automatically use this proxy configuration");
65
66    Ok(())
67}
examples/function_call_openai.rs (line 71)
7async fn main() -> Result<(), Box<dyn std::error::Error>> {
8    println!("🔧 OpenAI Function Calling example (ai-lib)");
9
10    // Ensure OPENAI_API_KEY is set in env before running
11    let client = AiClient::new(Provider::OpenAI)?;
12
13    // Build a simple user message
14    let user_msg = Message {
15        role: Role::User,
16        content: Content::Text("Please call the ascii_horse tool with size=3".to_string()),
17        function_call: None,
18    };
19
20    // Define a Tool (JSON Schema for parameters)
21    let ascii_horse_tool = Tool {
22        name: "ascii_horse".to_string(),
23        description: Some("Draws an ASCII horse of given size".to_string()),
24        parameters: Some(json!({
25            "type": "object",
26            "properties": {
27                "size": { "type": "integer", "description": "Size of the horse" }
28            },
29            "required": ["size"]
30        })),
31    };
32
33    let mut req = ChatCompletionRequest::new("gpt-4o-mini".to_string(), vec![user_msg]);
34    req.functions = Some(vec![ascii_horse_tool]);
35    req.function_call = Some(FunctionCallPolicy::Auto("auto".to_string()));
36    req = req.with_max_tokens(200).with_temperature(0.0);
37
38    println!("📤 Sending request to OpenAI (model={})", req.model);
39
40    let resp = client.chat_completion(req).await?;
41
42    // Handle a possible function call from the model: execute locally and send the result back
43    for choice in resp.choices {
44        let msg = choice.message;
45        if let Some(fc) = msg.function_call {
46            println!("🛠️  Model invoked function: {}", fc.name);
47            let args = fc.arguments.unwrap_or(serde_json::json!(null));
48            println!("   arguments: {}", args);
49
50            // Simple local tool: ascii_horse
51            if fc.name == "ascii_horse" {
52                // Parse size param
53                let size = args.get("size").and_then(|v| v.as_i64()).unwrap_or(3) as usize;
54                let horse = generate_ascii_horse(size);
55                println!("⚙️ Executed ascii_horse locally, output:\n{}", horse);
56
57                // Send follow-up message with tool result as assistant message
58                let tool_msg = Message {
59                    role: Role::Assistant,
60                    content: Content::Text(horse.clone()),
61                    function_call: None,
62                };
63
64                let mut followup =
65                    ChatCompletionRequest::new("gpt-4o-mini".to_string(), vec![tool_msg]);
66                followup = followup.with_max_tokens(200).with_temperature(0.0);
67                let follow_resp = client.chat_completion(followup).await?;
68                for fc_choice in follow_resp.choices {
69                    println!(
70                        "🗨️ Final model response: {}",
71                        fc_choice.message.content.as_text()
72                    );
73                }
74            }
75        } else {
76            println!("💬 Model message: {}", msg.content.as_text());
77        }
78    }
79
80    Ok(())
81}
examples/model_override_demo.rs (line 43)
7async fn main() -> Result<(), Box<dyn std::error::Error>> {
8    // Check environment variables
9    if std::env::var("GROQ_API_KEY").is_err() {
10        println!("❌ Please set GROQ_API_KEY environment variable");
11        println!("   Example: export GROQ_API_KEY=your_api_key_here");
12        return Ok(());
13    }
14    
15    println!("🚀 Model Override Feature Demo");
16    println!("==============================");
17    println!();
18    
19    // 1. Basic usage - maintain original simplicity
20    println!("📋 1. Basic Usage - Using Default Model");
21    let reply = AiClient::quick_chat_text(Provider::Groq, "Hello!").await?;
22    println!("   ✅ Response: {}", reply);
23    println!();
24    
25    // 2. Explicitly specify model
26    println!("📋 2. Explicitly Specify Model");
27    let reply = AiClient::quick_chat_text_with_model(
28        Provider::Groq, 
29        "Hello!", 
30        "llama-3.1-8b-instant"
31    ).await?;
32    println!("   ✅ Response: {}", reply);
33    println!();
34    
35    // 3. Using ModelOptions
36    println!("📋 3. Using ModelOptions");
37    let client = AiClient::new(Provider::Groq)?;
38    let mut request = client.build_simple_request("Hello!");
39    request.model = "llama-3.1-70b-versatile".to_string();
40    
41    let response = client.chat_completion(request).await?;
42    
43    let reply = response.choices[0].message.content.as_text();
44    println!("   ✅ Response: {}", reply);
45    println!();
46    
47    // 4. AiClientBuilder custom default model
48    println!("📋 4. AiClientBuilder Custom Default Model");
49    let client = AiClient::builder(Provider::Groq)
50        .with_default_chat_model("llama-3.1-8b-instant")
51        .build()?;
52    
53    let request = client.build_simple_request("Hello!");
54    println!("   Using model: {}", request.model);
55    
56    let response = client.chat_completion(request).await?;
57    match &response.choices[0].message.content {
58        Content::Text(text) => {
59            println!("   ✅ Response: {}", text);
60        }
61        _ => println!("   ✅ Response: {:?}", response.choices[0].message.content),
62    }
63    println!();
64    
65    // 5. Explicitly specify model in build_simple_request
66    println!("📋 5. Explicitly Specify Model in build_simple_request");
67    let client = AiClient::new(Provider::Groq)?;
68    let request = client.build_simple_request_with_model("Hello!", "llama-3.1-70b-versatile");
69    
70    println!("   Using model: {}", request.model);
71    
72    let response = client.chat_completion(request).await?;
73    match &response.choices[0].message.content {
74        Content::Text(text) => {
75            println!("   ✅ Response: {}", text);
76        }
77        _ => println!("   ✅ Response: {:?}", response.choices[0].message.content),
78    }
79    println!();
80    
81    println!("🎉 Demo completed!");
82    println!("==================");
83    println!("✅ All model override features are working correctly");
84    println!("✅ Backward compatibility is guaranteed");
85    println!("✅ Flexible model specification methods are provided");
86    
87    Ok(())
88}
Source

pub fn new_text<S: Into<String>>(s: S) -> Self

Convenience constructor for text content

Examples found in repository?
examples/ascii_horse.rs (line 45)
24async fn main() -> Result<(), Box<dyn std::error::Error>> {
25    println!("Example: Function Calling with a local tool 'ascii_horse'");
26
27    // Define the tool
28    let tool = Tool {
29        name: "ascii_horse".to_string(),
30        description: Some("Return an ASCII art horse. Accepts size: 'small'|'large'.".to_string()),
31        parameters: Some(json!({
32            "type": "object",
33            "properties": {
34                "size": { "type": "string", "enum": ["small", "large"] }
35            },
36            "required": ["size"]
37        })),
38    };
39
40    // Build a request that offers the tool to the model
41    let mut request = ChatCompletionRequest::new(
42        "example-model".to_string(),
43        vec![Message {
44            role: Role::User,
45            content: Content::new_text("Please draw an ASCII horse for me."),
46            function_call: None,
47        }],
48    );
49    request.functions = Some(vec![tool.clone()]);
50    request.function_call = Some(FunctionCallPolicy::Auto("ascii_horse".to_string()));
51
52    println!(
53        "Prepared request with functions: {}",
54        serde_json::to_string_pretty(&request.functions).unwrap()
55    );
56
57    // Simulate model returning a function call (in a real run this would come from the provider)
58    let simulated_call = FunctionCall {
59        name: "ascii_horse".to_string(),
60        arguments: Some(json!({ "size": "small" })),
61    };
62    println!(
63        "Model requested function call: {}",
64        serde_json::to_string_pretty(&simulated_call).unwrap()
65    );
66
67    // Execute the local tool
68    let size_arg = simulated_call
69        .arguments
70        .as_ref()
71        .and_then(|v| v.get("size"))
72        .and_then(|s| s.as_str())
73        .unwrap_or("small");
74
75    let tool_output = ascii_horse(size_arg);
76
77    // Convert tool output to a Message and append to conversation
78    let tool_message = Message {
79        role: Role::Assistant,
80        content: Content::new_text(tool_output.clone()),
81        function_call: None,
82    };
83
84    // In a normal flow you'd send the updated messages back to the model to continue the conversation.
85    println!(
86        "Tool output (appended as assistant message):\n\n{}\n",
87        tool_message.content.as_text()
88    );
89
90    Ok(())
91}
Source

pub fn new_json(v: JsonValue) -> Self

Convenience constructor for JSON content

Source

pub fn new_image( url: Option<String>, mime: Option<String>, name: Option<String>, ) -> Self

Convenience constructor for image content

Examples found in repository?
examples/multimodal_example.rs (lines 14-18)
5async fn main() -> Result<(), Box<dyn std::error::Error>> {
6    println!("Multimodal example: image + audio content in a message");
7
8    let _client = AiClient::new(Provider::Groq)?;
9
10    let request = ChatCompletionRequest::new(
11        "multimodal-model".to_string(),
12        vec![Message {
13            role: Role::User,
14            content: Content::new_image(
15                Some("https://example.com/dog.jpg".into()),
16                Some("image/jpeg".into()),
17                Some("dog.jpg".into()),
18            ),
19            function_call: None,
20        }],
21    );
22
23    println!(
24        "Prepared multimodal request; image URL: {}",
25        request.messages[0].content.as_text()
26    );
27
28    // Note: this example demonstrates the type usage only and does not call the API.
29    Ok(())
30}
Source

pub fn new_audio(url: Option<String>, mime: Option<String>) -> Self

Convenience constructor for audio content

Trait Implementations§

Source§

impl Clone for Content

Source§

fn clone(&self) -> Content

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for Content

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl<'de> Deserialize<'de> for Content

Source§

fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>
where __D: Deserializer<'de>,

Deserialize this value from the given Serde deserializer. Read more
Source§

impl Serialize for Content

Source§

fn serialize<__S>(&self, __serializer: __S) -> Result<__S::Ok, __S::Error>
where __S: Serializer,

Serialize this value into the given Serde serializer. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> DeserializeOwned for T
where T: for<'de> Deserialize<'de>,

Source§

impl<T> ErasedDestructor for T
where T: 'static,