async_llm::utils

Function init_tracing

Source
pub fn init_tracing()
Examples found in repository?
examples/openai.rs (line 209)
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
async fn main() -> Result<(), Error> {
    dotenvy::dotenv().ok();
    init_tracing();

    example_basic().await?;
    // example_basic_stream().await?;

    // Assitant Prefill
    // example_assistant_prefill().await?;

    // Images & Multimodel: image_url
    // example_image_url().await?;
    // Images & Multimodel: base64 image
    // example_image_base64().await?;

    // Tool Calls
    // example_tool_calls().await?;

    // Structured outputs
    // example_structured_outputs_json_object().await?;
    // example_structured_outputs_json_schema().await?;

    Ok(())
}
More examples
Hide additional examples
examples/gemini.rs (line 211)
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
async fn main() -> Result<(), Error> {
    dotenvy::dotenv().ok();
    std::env::set_var("OPENAI_API_KEY", std::env::var("GEMINI_API_KEY").unwrap());
    std::env::set_var("OPENAI_BASE_URL", std::env::var("GEMINI_BASE_URL").unwrap());
    init_tracing();

    example_basic().await?;
    // example_basic_stream().await?;

    // Assitant Prefill
    // example_assistant_prefill().await?;

    // Images & Multimodel: image_url
    // example_image_url().await?;
    // Images & Multimodel: base64 image
    // example_image_base64().await?;

    // Tool Calls
    // example_tool_calls().await?;

    // Structured outputs
    // example_structured_outputs_json_object().await?;
    // example_structured_outputs_json_schema().await?;

    Ok(())
}
examples/ollama.rs (line 215)
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
async fn main() -> Result<(), Error> {
    dotenvy::dotenv().ok();
    std::env::set_var("OPENAI_API_KEY", std::env::var("OLLAMA_API_KEY").unwrap());
    std::env::set_var("OPENAI_BASE_URL", std::env::var("OLLAMA_BASE_URL").unwrap());
    init_tracing();

    example_basic().await?;
    // example_basic_stream().await?;

    // Assitant Prefill
    // example_assistant_prefill().await?;

    // Images & Multimodel: image_url
    // example_image_url().await?;
    // Images & Multimodel: base64 image
    // example_image_base64().await?;

    // Tool Calls
    // example_tool_calls().await?;

    // Structured outputs
    // example_structured_outputs_json_object().await?;
    // example_structured_outputs_json_schema().await?;

    Ok(())
}
examples/openrouter.rs (line 230)
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
async fn main() -> Result<(), Error> {
    dotenvy::dotenv().ok();
    std::env::set_var(
        "OPENAI_API_KEY",
        std::env::var("OPENROUTER_API_KEY").unwrap(),
    );
    std::env::set_var(
        "OPENAI_BASE_URL",
        std::env::var("OPENROUTER_BASE_URL").unwrap(),
    );
    init_tracing();

    example_basic().await?;
    // example_basic_stream().await?;

    // Assitant Prefill
    // example_assistant_prefill().await?;

    // Images & Multimodel: image_url
    // example_image_url().await?;
    // Images & Multimodel: base64 image
    // example_image_base64().await?;

    // Tool Calls
    // example_tool_calls().await?;

    // Structured outputs
    // example_structured_outputs_json_object().await?;
    // example_structured_outputs_json_schema().await?;

    // TODO: Prompt Caching with `cache_control` for Anthropic
    // TODO: Transforms: https://openrouter.ai/docs/transforms

    Ok(())
}
examples/generate.rs (line 22)
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
async fn main() -> Result<(), Error> {
    dotenvy::dotenv().ok();
    init_tracing();
    tracing::info!("This program will loop through each available providers and save outputs into JSON files for testing purpose");

    let openai_client = create_client("OPENAI_BASE_URL", "OPENAI_API_KEY");
    let gemini_client = create_client("GEMINI_BASE_URL", "GEMINI_API_KEY");
    let together_client = create_client("TOGETHER_BASE_URL", "TOGETHER_API_KEY");
    let openrouter_client = create_client("OPENROUTER_BASE_URL", "OPENROUTER_API_KEY");

    // generate(
    //     &gemini_client,
    //     "gemini",
    //     "gemini-2.0-flash-exp",
    //     "who_are_you",
    //     "who are you?",
    // )
    // .await?;

    generate(
        &together_client,
        "together",
        "meta-llama/Llama-3.3-70B-Instruct-Turbo-Free",
        "who_are_you",
        "who are you?",
    )
    .await?;

    generate(
        &openai_client,
        "openai",
        "gpt-4o-mini",
        "who_are_you",
        "who are you?",
    )
    .await?;

    generate(
        &openrouter_client,
        "openrouter",
        "mistralai/mistral-7b-instruct:free",
        "who_are_you",
        "who are you?",
    )
    .await?;

    Ok(())
}