pub enum ChatMessage {
Developer {
content: Content,
name: Option<String>,
},
System {
content: Content,
name: Option<String>,
},
User {
content: UserContent,
name: Option<String>,
},
Assistant {
content: Option<AssistantContent>,
refusal: Option<String>,
name: Option<String>,
audio: Option<AssistantAudio>,
tool_calls: Option<Vec<AssistantToolCall>>,
function_call: Option<AssistantFunctionCall>,
},
Tool {
content: Content,
tool_call_id: String,
},
}
Variants§
Developer
Fields
System
Fields
User
Fields
§
content: UserContent
The contents of the user message.
Assistant
Fields
§
content: Option<AssistantContent>
The contents of the assistant message. Required unless tool_calls
or function_call
is specified.
§
name: Option<String>
An optional name for the participant. Provides the model information to differentiate between participants of the same role.
§
audio: Option<AssistantAudio>
Data about a previous audio response from the model. Learn more.
§
tool_calls: Option<Vec<AssistantToolCall>>
The tool calls generated by the model, such as function calls.
§
function_call: Option<AssistantFunctionCall>
👎Deprecated
Deprecated and replaced by tool_calls. The name and arguments of a function that should be called, as generated by the model.
Tool
Implementations§
Source§impl ChatMessage
impl ChatMessage
Sourcepub fn system(content: impl Into<Content>) -> Self
pub fn system(content: impl Into<Content>) -> Self
Examples found in repository?
examples/gemini.rs (line 15)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
More examples
examples/openai.rs (line 15)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/ollama.rs (line 16)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
],
)
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openrouter.rs (line 17)
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/mistral-7b-instruct:free",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
/// Note: SambaNova Provider returns error: `image_url` must start with 'data:image/<jpeg|jpg|png|webp>;base64,'\"
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-11b-vision-instruct",
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
pub fn developer(content: impl Into<Content>) -> Self
Sourcepub fn user(content: impl Into<String>) -> Self
pub fn user(content: impl Into<String>) -> Self
Examples found in repository?
examples/ollama.rs (line 17)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
],
)
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
More examples
examples/gemini.rs (line 57)
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openai.rs (line 57)
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openrouter.rs (line 60)
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/mistral-7b-instruct:free",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
/// Note: SambaNova Provider returns error: `image_url` must start with 'data:image/<jpeg|jpg|png|webp>;base64,'\"
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/generate.rs (line 136)
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193
async fn generate<H: HttpClient>(
client: &Option<Client<RawProvider, H>>,
provider_name: impl Into<String>,
model_name: impl Into<String>,
test_name: impl Into<String>,
prompt: impl Into<String>,
) -> Result<(), Error> {
let test_name: String = test_name.into();
let provider_name: String = provider_name.into();
let model_name: String = model_name.into();
match client {
None => tracing::debug!(
"Skip {}/{}/{} because client is None",
provider_name,
model_name,
test_name
),
Some(client) => {
let request = ChatRequest::new(&model_name, vec![ChatMessage::user(prompt)]);
let request = serde_json::to_value(request)?;
tracing::debug!("Sending request: {:?}", request);
let response = client.chat().create(request.clone()).await?;
let provider_model_name =
format!("{}_{}", provider_name, sanitize_folder_name(&model_name));
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("data");
d.push(&test_name);
d.push(&provider_model_name);
let output_path = d
.to_str()
.map(ToString::to_string)
.ok_or(Error::InvalidArgument(format!(
"Failed to get path for data/{}/{}",
test_name, provider_model_name
)))?;
match fs::create_dir_all(&output_path) {
Err(e) => tracing::error!("Failed to create folder: {:?}", e),
Ok(_) => {
tracing::info!("Successfully created folder: {:?}", output_path);
// info.json
let mut info_path = PathBuf::from_str(&output_path).unwrap();
info_path.push("info.json");
let info = json!({
"provider_name": provider_name,
"test_name": test_name,
"model_name": model_name
});
match save_json_to_file(&info, &info_path) {
Ok(_) => tracing::info!("Succesfully created file: {:?}", info_path),
Err(e) => tracing::error!("Failed to create file: {:?}", e),
}
// request.json
let mut request_path = PathBuf::from_str(&output_path).unwrap();
request_path.push("request.json");
match save_json_to_file(&request, &request_path) {
Ok(_) => tracing::info!("Succesfully created file: {:?}", request_path),
Err(e) => tracing::error!("Failed to create file: {:?}", e),
}
// response.json
let mut response_path = PathBuf::from_str(&output_path).unwrap();
response_path.push("response.json");
match save_json_to_file(&response, &response_path) {
Ok(_) => tracing::info!("Succesfully created file: {:?}", response_path),
Err(e) => tracing::error!("Failed to create file: {:?}", e),
}
}
}
}
}
Ok(())
}
Sourcepub fn user_image(image_url: impl Into<ImageUrl>) -> Self
pub fn user_image(image_url: impl Into<ImageUrl>) -> Self
Examples found in repository?
examples/gemini.rs (line 175)
170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
More examples
examples/ollama.rs (line 179)
174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openai.rs (line 175)
170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openrouter.rs (line 186)
180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
Sourcepub fn user_image_with_text(
text: impl Into<String>,
image_url: impl Into<ImageUrl>,
) -> Self
pub fn user_image_with_text( text: impl Into<String>, image_url: impl Into<ImageUrl>, ) -> Self
Examples found in repository?
examples/gemini.rs (line 194)
189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
More examples
examples/ollama.rs (line 198)
193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openai.rs (line 194)
189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openrouter.rs (line 207)
200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-11b-vision-instruct",
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
pub fn user_parts(parts: Vec<UserContentPart>) -> Self
Sourcepub fn assistant(content: impl Into<AssistantContent>) -> Self
pub fn assistant(content: impl Into<AssistantContent>) -> Self
Examples found in repository?
examples/gemini.rs (line 58)
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
More examples
examples/ollama.rs (line 62)
56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"llama3.2:3b",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openai.rs (line 58)
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openrouter.rs (line 61)
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/mistral-7b-instruct:free",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
pub fn tool( content: impl Into<Content>, tool_call_id: impl Into<String>, ) -> Self
Trait Implementations§
Source§impl Clone for ChatMessage
impl Clone for ChatMessage
Source§fn clone(&self) -> ChatMessage
fn clone(&self) -> ChatMessage
Returns a copy of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source
. Read moreSource§impl Debug for ChatMessage
impl Debug for ChatMessage
Source§impl<'de> Deserialize<'de> for ChatMessage
impl<'de> Deserialize<'de> for ChatMessage
Source§fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
Deserialize this value from the given Serde deserializer. Read more
Source§impl PartialEq for ChatMessage
impl PartialEq for ChatMessage
Source§impl Serialize for ChatMessage
impl Serialize for ChatMessage
impl StructuralPartialEq for ChatMessage
Auto Trait Implementations§
impl Freeze for ChatMessage
impl RefUnwindSafe for ChatMessage
impl Send for ChatMessage
impl Sync for ChatMessage
impl Unpin for ChatMessage
impl UnwindSafe for ChatMessage
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more