pub struct LM {
pub base_url: Option<String>,
pub api_key: Option<String>,
pub model: String,
pub temperature: f32,
pub max_tokens: u32,
pub max_tool_iterations: u32,
pub cache: bool,
pub cache_handler: Option<Arc<Mutex<ResponseCache>>>,
/* private fields */
}Fields§
§base_url: Option<String>§api_key: Option<String>§model: String§temperature: f32§max_tokens: u32§max_tool_iterations: u32§cache: bool§cache_handler: Option<Arc<Mutex<ResponseCache>>>Implementations§
Source§impl LM
impl LM
Sourcepub fn builder() -> LMBuilder
pub fn builder() -> LMBuilder
Create an instance of LM using the builder syntax
Examples found in repository?
examples/01-simple.rs (line 73)
71async fn main() -> Result<()> {
72 configure(
73 LM::builder()
74 .model("openai:gpt-4o-mini".to_string())
75 .build()
76 .await
77 .unwrap(),
78 ChatAdapter,
79 );
80
81 let example = example! {
82 "question": "input" => "What is the capital of France?",
83 };
84
85 let qa_rater = QARater::builder().build();
86 let prediction = qa_rater.forward(example).await.unwrap();
87 println!("{prediction:?}");
88
89 Ok(())
90}More examples
examples/05-heterogenous-examples.rs (line 15)
13async fn main() -> anyhow::Result<()> {
14 configure(
15 LM::builder()
16 .model("openai:gpt-4o-mini".to_string())
17 .build()
18 .await
19 .unwrap(),
20 ChatAdapter {},
21 );
22
23 let exp = example! {
24 "number": "input" => 10,
25 };
26 let predict = Predict::new(sign! {
27 (number: i32) -> number_squared: i32, number_cubed: i32
28 });
29
30 let prediction = predict.forward(exp).await?;
31 println!("{prediction:?}");
32
33 Ok(())
34}examples/07-inspect-history.rs (line 31)
30async fn main() {
31 let lm = LM::builder()
32 .model("openai:gpt-4o-mini".to_string())
33 .build()
34 .await
35 .unwrap();
36 configure(lm, ChatAdapter);
37
38 let example = example! {
39 "question": "input" => "What is the capital of France?",
40 };
41
42 let qa_rater = QARater::builder().build();
43 let prediction = qa_rater.forward(example.clone()).await.unwrap();
44 println!("Prediction: {prediction:?}");
45
46 let history = get_lm().inspect_history(1).await;
47 println!("History: {history:?}");
48}examples/03-evaluate-hotpotqa.rs (line 66)
64async fn main() -> anyhow::Result<()> {
65 configure(
66 LM::builder()
67 .model("openai:gpt-4o-mini".to_string())
68 .build()
69 .await?,
70 ChatAdapter {},
71 );
72
73 let examples = DataLoader::load_hf(
74 "hotpotqa/hotpot_qa",
75 vec!["question".to_string()],
76 vec!["answer".to_string()],
77 "fullwiki",
78 "validation",
79 true,
80 )?[..128]
81 .to_vec();
82
83 let evaluator = QARater::builder().build();
84 let metric = evaluator.evaluate(examples).await;
85
86 println!("Metric: {metric}");
87 Ok(())
88}examples/04-optimize-hotpotqa.rs (line 62)
60async fn main() -> anyhow::Result<()> {
61 configure(
62 LM::builder()
63 .model("openai:gpt-4o-mini".to_string())
64 .build()
65 .await
66 .unwrap(),
67 ChatAdapter {},
68 );
69
70 let examples = DataLoader::load_hf(
71 "hotpotqa/hotpot_qa",
72 vec!["question".to_string()],
73 vec!["answer".to_string()],
74 "fullwiki",
75 "validation",
76 true,
77 )?[..10]
78 .to_vec();
79
80 let mut rater = QARater::builder().build();
81 let optimizer = COPRO::builder().breadth(10).depth(1).build();
82
83 println!("Rater: {:?}", rater.answerer.get_signature().instruction());
84
85 optimizer.compile(&mut rater, examples.clone()).await?;
86
87 println!("Rater: {:?}", rater.answerer.get_signature().instruction());
88
89 Ok(())
90}examples/06-other-providers-batch.rs (line 80)
77async fn main() {
78 // Anthropic
79 configure(
80 LM::builder()
81 .model("anthropic:claude-sonnet-4-5-20250929".to_string())
82 .build()
83 .await
84 .unwrap(),
85 ChatAdapter,
86 );
87
88 let example = vec![
89 example! {
90 "question": "input" => "What is the capital of France?",
91 },
92 example! {
93 "question": "input" => "What is the capital of Germany?",
94 },
95 example! {
96 "question": "input" => "What is the capital of Italy?",
97 },
98 ];
99
100 let qa_rater = QARater::builder().build();
101 let prediction = qa_rater.batch(example.clone(), 2, true).await.unwrap();
102 println!("Anthropic: {prediction:?}");
103
104 // Gemini
105 configure(
106 LM::builder()
107 .model("gemini:gemini-2.0-flash".to_string())
108 .build()
109 .await
110 .unwrap(),
111 ChatAdapter,
112 );
113
114 let prediction = qa_rater.batch(example, 2, true).await.unwrap();
115 println!("Gemini: {prediction:?}");
116}Additional examples can be found in:
Source§impl LM
impl LM
Sourcepub async fn with_client(self, client: LMClient) -> Result<Self>
pub async fn with_client(self, client: LMClient) -> Result<Self>
Examples found in repository?
examples/11-custom-client.rs (line 44)
29async fn main() -> Result<()> {
30 // Create a custom OpenAI completion model directly
31 let api_key = env::var("OPENAI_API_KEY").unwrap_or_else(|_| "dummy-key".to_string());
32
33 let openai_client: openai::Client<reqwest::Client> =
34 openai::ClientBuilder::new(&api_key).build();
35 let openai_model = openai::completion::CompletionModel::new(openai_client, "gpt-4o-mini");
36
37 // Convert to LMClient using Into trait (enum_dispatch generates From implementations)
38 let custom_lm_client: LMClient = openai_model.into();
39
40 // Create LM with the custom client
41 let lm = LM::builder()
42 .build()
43 .await?
44 .with_client(custom_lm_client)
45 .await?;
46
47 // Configure the global settings with our custom LM
48 configure(lm, ChatAdapter);
49
50 let example = example! {
51 "question": "input" => "What is the capital of France?",
52 };
53
54 let qa_predictor = Predict::new(QASignature::new());
55 let prediction = qa_predictor.forward(example).await?;
56 println!("{prediction:?}");
57
58 Ok(())
59}Source§impl LM
impl LM
pub async fn call( &self, messages: Chat, tools: Vec<Arc<dyn ToolDyn>>, ) -> Result<LMResponse>
Sourcepub async fn inspect_history(&self, n: usize) -> Vec<CallResult>
pub async fn inspect_history(&self, n: usize) -> Vec<CallResult>
Returns the n most recent cached calls.
Panics if caching is disabled for this LM.
Examples found in repository?
examples/07-inspect-history.rs (line 46)
30async fn main() {
31 let lm = LM::builder()
32 .model("openai:gpt-4o-mini".to_string())
33 .build()
34 .await
35 .unwrap();
36 configure(lm, ChatAdapter);
37
38 let example = example! {
39 "question": "input" => "What is the capital of France?",
40 };
41
42 let qa_rater = QARater::builder().build();
43 let prediction = qa_rater.forward(example.clone()).await.unwrap();
44 println!("Prediction: {prediction:?}");
45
46 let history = get_lm().inspect_history(1).await;
47 println!("History: {history:?}");
48}Trait Implementations§
Auto Trait Implementations§
impl Freeze for LM
impl !RefUnwindSafe for LM
impl Send for LM
impl Sync for LM
impl Unpin for LM
impl !UnwindSafe for LM
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read more