pub struct ChatRequest { /* private fields */ }
Expand description
https://platform.openai.com/docs/api-reference/chat/create
Implementations§
Source§impl ChatRequest
impl ChatRequest
Sourcepub fn new(model: impl Into<String>, messages: Vec<ChatMessage>) -> Self
pub fn new(model: impl Into<String>, messages: Vec<ChatMessage>) -> Self
Examples found in repository?
examples/gemini.rs (lines 13-16)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
More examples
examples/openai.rs (lines 13-16)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openrouter.rs (lines 14-18)
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/mistral-7b-instruct:free",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
/// Note: SambaNova Provider returns error: `image_url` must start with 'data:image/<jpeg|jpg|png|webp>;base64,'\"
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-11b-vision-instruct",
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/generate.rs (line 136)
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193
async fn generate<H: HttpClient>(
client: &Option<Client<RawProvider, H>>,
provider_name: impl Into<String>,
model_name: impl Into<String>,
test_name: impl Into<String>,
prompt: impl Into<String>,
) -> Result<(), Error> {
let test_name: String = test_name.into();
let provider_name: String = provider_name.into();
let model_name: String = model_name.into();
match client {
None => tracing::debug!(
"Skip {}/{}/{} because client is None",
provider_name,
model_name,
test_name
),
Some(client) => {
let request = ChatRequest::new(&model_name, vec![ChatMessage::user(prompt)]);
let request = serde_json::to_value(request)?;
tracing::debug!("Sending request: {:?}", request);
let response = client.chat().create(request.clone()).await?;
let provider_model_name =
format!("{}_{}", provider_name, sanitize_folder_name(&model_name));
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("data");
d.push(&test_name);
d.push(&provider_model_name);
let output_path = d
.to_str()
.map(ToString::to_string)
.ok_or(Error::InvalidArgument(format!(
"Failed to get path for data/{}/{}",
test_name, provider_model_name
)))?;
match fs::create_dir_all(&output_path) {
Err(e) => tracing::error!("Failed to create folder: {:?}", e),
Ok(_) => {
tracing::info!("Successfully created folder: {:?}", output_path);
// info.json
let mut info_path = PathBuf::from_str(&output_path).unwrap();
info_path.push("info.json");
let info = json!({
"provider_name": provider_name,
"test_name": test_name,
"model_name": model_name
});
match save_json_to_file(&info, &info_path) {
Ok(_) => tracing::info!("Succesfully created file: {:?}", info_path),
Err(e) => tracing::error!("Failed to create file: {:?}", e),
}
// request.json
let mut request_path = PathBuf::from_str(&output_path).unwrap();
request_path.push("request.json");
match save_json_to_file(&request, &request_path) {
Ok(_) => tracing::info!("Succesfully created file: {:?}", request_path),
Err(e) => tracing::error!("Failed to create file: {:?}", e),
}
// response.json
let mut response_path = PathBuf::from_str(&output_path).unwrap();
response_path.push("response.json");
match save_json_to_file(&response, &response_path) {
Ok(_) => tracing::info!("Succesfully created file: {:?}", response_path),
Err(e) => tracing::error!("Failed to create file: {:?}", e),
}
}
}
}
}
Ok(())
}
pub fn from_system(message: impl Into<Content>) -> Self
pub fn from_model(model: impl Into<String>) -> Self
pub fn iter_messages(&self) -> impl Iterator<Item = &ChatMessage>
Sourcepub async fn send(self) -> Result<ChatResponse, Error>
pub async fn send(self) -> Result<ChatResponse, Error>
Examples found in repository?
examples/gemini.rs (line 20)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
More examples
examples/openai.rs (line 20)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openrouter.rs (line 22)
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/mistral-7b-instruct:free",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
/// Note: SambaNova Provider returns error: `image_url` must start with 'data:image/<jpeg|jpg|png|webp>;base64,'\"
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-11b-vision-instruct",
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
Sourcepub async fn send_stream(
self,
) -> Result<Pin<Box<dyn Stream<Item = Result<ChatResponseStream, Error>> + Send>>, Error>
pub async fn send_stream( self, ) -> Result<Pin<Box<dyn Stream<Item = Result<ChatResponseStream, Error>> + Send>>, Error>
Examples found in repository?
examples/gemini.rs (line 36)
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
More examples
examples/openai.rs (line 36)
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
examples/openrouter.rs (line 39)
29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
Source§impl ChatRequest
Chainable setters
impl ChatRequest
Chainable setters
pub fn system(self, message: impl Into<Content>) -> Self
Sourcepub fn user(self, message: impl Into<String>) -> Self
pub fn user(self, message: impl Into<String>) -> Self
Examples found in repository?
examples/gemini.rs (line 17)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
More examples
examples/openai.rs (line 17)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
examples/openrouter.rs (line 19)
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
pub fn developer(self, message: impl Into<Content>) -> Self
pub fn assistant(self, message: impl Into<AssistantContent>) -> Self
pub fn tool( self, message: impl Into<Content>, tool_call_id: impl Into<String>, ) -> Self
pub fn model(self, model: impl Into<String>) -> Self
Sourcepub fn stream(self) -> Self
pub fn stream(self) -> Self
Examples found in repository?
examples/gemini.rs (line 33)
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
More examples
examples/openai.rs (line 33)
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
examples/openrouter.rs (line 36)
29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
Sourcepub fn tools(self, tools: Vec<impl Into<ChatTool>>) -> Self
pub fn tools(self, tools: Vec<impl Into<ChatTool>>) -> Self
Examples found in repository?
examples/gemini.rs (lines 138-160)
130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
More examples
examples/openai.rs (lines 138-160)
130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openrouter.rs (lines 147-169)
137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
Sourcepub fn response_format(
self,
response_format: impl Into<ChatResponseFormat>,
) -> Self
pub fn response_format( self, response_format: impl Into<ChatResponseFormat>, ) -> Self
Examples found in repository?
examples/gemini.rs (line 84)
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
More examples
examples/openai.rs (line 84)
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openrouter.rs (line 89)
73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
Source§impl ChatRequest
impl ChatRequest
Sourcepub fn to_string_pretty(&self) -> Result<String, Error>
pub fn to_string_pretty(&self) -> Result<String, Error>
Examples found in repository?
examples/gemini.rs (line 18)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
More examples
examples/openai.rs (line 18)
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
"gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", utils::BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
examples/openrouter.rs (line 20)
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217
async fn example_basic() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ");
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_basic_stream() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-3b-instruct:free",
"mistralai/mistral-7b-instruct:free",
vec![ChatMessage::system("You are a helpful assistant")],
)
.user("1 + 1 = ")
.stream();
tracing::info!("request: \n{}", request.to_string_pretty()?);
let mut response = request.send_stream().await?;
while let Some(result) = response.next().await {
match result {
Ok(response) => {
tracing::info!("response: \n{}", response.to_string_pretty()?);
}
Err(e) => {
tracing::error!("error = \n {e}");
}
}
}
Ok(())
}
#[allow(unused)]
async fn example_assistant_prefill() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/mistral-7b-instruct:free",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user("Who are you?"),
ChatMessage::assistant("I'm not sure, but my best guess is"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_object() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(
r#"What's the weather like in Vietnam? Reply in json as following:
{
"temperature": "Temperature in Celsius",
"location": "City or location name"
}"#,
),
],
)
.response_format(ChatResponseFormat::JsonObject);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_structured_outputs_json_schema() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.response_format(JsonSchema::new("weather").strict(true).schema(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City or location name"
},
"temperature": {
"type": "number",
"description": "Temperature in Celsius"
},
"conditions": {
"type": "string",
"description": "Weather conditions description"
}
},
"required": ["location", "temperature", "conditions"],
"additionalProperties": false
})));
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_tool_calls() -> Result<(), Error> {
let request = ChatRequest::new(
"mistralai/ministral-8b",
// "openai/gpt-4o-mini",
// "google/gemini-flash-1.5-8b", // error
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user(r#"What's the weather like in Vietnam?"#),
],
)
.tools(vec![ChatToolFunction::new("get_current_weather")
.strict(true)
.description("Get the current weather in a given location")
.parameters(json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit"
]
}
},
"required": [
"location"
],
"additionalProperties": false
}))]);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
/// Note: SambaNova Provider returns error: `image_url` must start with 'data:image/<jpeg|jpg|png|webp>;base64,'\"
async fn example_image_url() -> Result<(), Error> {
let request = ChatRequest::new(
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"),
ChatMessage::user("What's in this image?"),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
#[allow(unused)]
async fn example_image_base64() -> Result<(), Error> {
let request = ChatRequest::new(
// "meta-llama/llama-3.2-11b-vision-instruct",
"meta-llama/llama-3.2-11b-vision-instruct:free",
// "openai/gpt-4o-mini",
vec![
ChatMessage::system("You are a helpful assistant"),
ChatMessage::user_image_with_text("What's in this image?", BASE64_EXAMPLE_IMAGE),
],
);
tracing::info!("request: \n{}", request.to_string_pretty()?);
let response = request.send().await?;
tracing::info!("response: \n{}", response.to_string_pretty()?);
Ok(())
}
Trait Implementations§
Source§impl Clone for ChatRequest
impl Clone for ChatRequest
Source§fn clone(&self) -> ChatRequest
fn clone(&self) -> ChatRequest
Returns a copy of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source
. Read moreSource§impl Debug for ChatRequest
impl Debug for ChatRequest
Source§impl Default for ChatRequest
impl Default for ChatRequest
Source§fn default() -> ChatRequest
fn default() -> ChatRequest
Returns the “default value” for a type. Read more
Source§impl<'de> Deserialize<'de> for ChatRequest
impl<'de> Deserialize<'de> for ChatRequest
Source§fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error>where
__D: Deserializer<'de>,
Deserialize this value from the given Serde deserializer. Read more
Source§impl PartialEq for ChatRequest
impl PartialEq for ChatRequest
Source§impl Printable for ChatRequest
impl Printable for ChatRequest
Source§impl Requestable for ChatRequest
impl Requestable for ChatRequest
Source§impl Serialize for ChatRequest
impl Serialize for ChatRequest
impl StructuralPartialEq for ChatRequest
Auto Trait Implementations§
impl Freeze for ChatRequest
impl RefUnwindSafe for ChatRequest
impl Send for ChatRequest
impl Sync for ChatRequest
impl Unpin for ChatRequest
impl UnwindSafe for ChatRequest
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more