pub struct Task<M: CreateChatSession, Constraints = NoConstraints> { /* private fields */ }
Expand description
A task session lets you efficiently run a task with a model. The task session will reuse the model’s cache to avoid re-feeding the task prompt repeatedly.
§Example
use kalosm::language::*;
#[tokio::main]
async fn main() {
let mut llm = Llama::new_chat().await.unwrap();
let mut task = llm.task("You are a math assistant who helps students with their homework. You solve equations and answer questions. When solving problems, you will always solve problems step by step.");
println!("question 1");
// The first time we use the task, it will load the model and prompt.
task.run("What is 2 + 2?")
.to_std_out()
.await
.unwrap();
println!("question 2");
// After the first time, the model and prompt are cached.
task.run("What is 4 + 4?")
.to_std_out()
.await
.unwrap();
}
Implementations§
Source§impl<M: CreateChatSession> Task<M>
impl<M: CreateChatSession> Task<M>
Source§impl<M: CreateChatSession, Constraints> Task<M, Constraints>
impl<M: CreateChatSession, Constraints> Task<M, Constraints>
Sourcepub fn with_example(self, input: impl ToString, output: impl ToString) -> Self
pub fn with_example(self, input: impl ToString, output: impl ToString) -> Self
Add an example to the task. Examples help the model perform better by allowing it to mimic the format of the examples.
§Example
use kalosm::language::*;
#[tokio::main]
async fn main() {
let model = Llama::new_chat().await.unwrap();
let task = model.task("You are a math assistant who helps students with their homework. You solve equations and answer questions. When solving problems, you will always solve problems step by step.")
.with_example("What is 1 + 2?", "Step 1: 1 + 2 = 3\nOutput: 3");
let mut stream = task("What is 2 + 2?");
stream.to_std_out().await.unwrap();
}
Sourcepub fn with_examples(
self,
examples: impl IntoIterator<Item = (impl ToString, impl ToString)>,
) -> Self
pub fn with_examples( self, examples: impl IntoIterator<Item = (impl ToString, impl ToString)>, ) -> Self
Add multiple examples to the task. Examples help the model perform better by allowing it to mimic the format of the examples.
§Example
use kalosm::language::*;
#[tokio::main]
async fn main() {
let model = Llama::new_chat().await.unwrap();
let task = model.task("You are a math assistant who helps students with their homework. You solve equations and answer questions. When solving problems, you will always solve problems step by step.")
.with_examples([
("What is 1 + 2?", "Step 1: 1 + 2 = 3\nOutput: 3"),
("What is 3 + 4?", "Step 1: 3 + 4 = 7\nOutput: 7"),
("What is (4 + 8) / 3?", "Step 1: 4 + 8 = 12\nStep 2: 12 / 3 = 4\nOutput: 4"),
]);
let mut stream = task("What is 3 + 4?");
stream.to_std_out().await.unwrap();
}
Sourcepub fn with_constraints<NewConstraints>(
self,
constraints: NewConstraints,
) -> Task<M, NewConstraints>
pub fn with_constraints<NewConstraints>( self, constraints: NewConstraints, ) -> Task<M, NewConstraints>
Set the constraints for the task. The constraints force the format of all outputs of the task to fit
the constraints. This can be used to make the model return a specific type. This method does the same thing
as ChatResponseBuilder::with_constraints
except it is called once on the task instead of any time you
run the task.
§Example
use kalosm::language::*;
use std::sync::Arc;
#[tokio::main]
async fn main() {
let model = Llama::new_chat().await.unwrap();
let task = model
.task("You are a math assistant. Respond with just the number answer and nothing else.")
.with_constraints(Arc::new(i32::new_parser()));
let mut stream = task("What is 2 + 2?");
stream.to_std_out().await.unwrap();
let result: i32 = stream.await.unwrap();
println!("{result}");
}
Sourcepub fn typed<T>(
self,
) -> Task<M, <M as CreateDefaultChatConstraintsForType<T>>::DefaultConstraints>where
M: CreateDefaultChatConstraintsForType<T>,
pub fn typed<T>(
self,
) -> Task<M, <M as CreateDefaultChatConstraintsForType<T>>::DefaultConstraints>where
M: CreateDefaultChatConstraintsForType<T>,
Create a task with the default constraints for the given type. This is the same as calling Task::with_constraints
with the default constraints for the given type.
§Example
use kalosm::language::*;
#[tokio::main]
async fn main() {
let model = Llama::new_chat().await.unwrap();
let task = model
.task("You are a math assistant. Respond with just the number answer and nothing else.")
.typed();
let mut stream = task("What is 2 + 2?");
stream.to_std_out().await.unwrap();
let result: i32 = stream.await.unwrap();
println!("{result}");
}
Source§impl<M: CreateChatSession, Constraints: Clone> Task<M, Constraints>
impl<M: CreateChatSession, Constraints: Clone> Task<M, Constraints>
Sourcepub fn run(
&self,
message: impl ToString,
) -> ChatResponseBuilder<'static, M, Constraints>
pub fn run( &self, message: impl ToString, ) -> ChatResponseBuilder<'static, M, Constraints>
Run the task with a message.
§Example
use kalosm::language::*;
#[tokio::main]
async fn main() {
let mut llm = Llama::new_chat().await.unwrap();
let task = llm.task("You are a math assistant who helps students with their homework. You solve equations and answer questions. When solving problems, you will always solve problems step by step.");
let result = task("What is 2 + 2?").await.unwrap();
println!("{result}");
}