use tracing_subscriber::EnvFilter;
const MODEL: &str = "deepseek-chat";
const MAGIC_NUMBER: u32 = 31597;
const NUM_LINES: usize = 1_000_000;
const SYSTEM_PROMPT: &str = r#"You are an assistant with access to a Lua runtime. You can use Lua to process information and compute answers.
A variable called 'context' is available in the Lua environment containing a large text document (string). When answering questions, use Lua string functions to search through and extract information from the context.
Use the Lua tool to write code that processes the context variable to find the information needed."#;
fn generate_context(num_lines: usize, magic_number: u32) -> String {
let mut lines = Vec::with_capacity(num_lines);
for i in 0..num_lines {
if i == num_lines / 2 {
lines.push(format!("The magic number is {}", magic_number));
} else {
lines.push(format!("Line {} contains random filler text data", i));
}
}
lines.join("\n")
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.init();
println!("\n=== Needle in Haystack Example ===\n");
println!("Generating context with {} lines...", NUM_LINES);
let context = generate_context(NUM_LINES, MAGIC_NUMBER);
println!("Context generated: ~{} MB\n", context.len() / (1024 * 1024));
let repl = onetool::Repl::new()?;
repl.with_runtime(|lua| {
lua.globals().set("context", context.as_str())?;
Ok(())
})?;
let genai_client = genai::Client::default();
let lua_repl = onetool::genai::LuaRepl::new(&repl);
let question = "I'm looking for a magic number. What is it?";
let chat_req = genai::chat::ChatRequest::new(vec![
genai::chat::ChatMessage::system(SYSTEM_PROMPT),
genai::chat::ChatMessage::user(question),
])
.with_tools(vec![lua_repl.definition()]);
println!("Asking: \"{}\"\n", question);
let chat_res = genai_client
.exec_chat(MODEL, chat_req.clone(), None)
.await?;
let tool_calls = chat_res.into_tool_calls();
if tool_calls.is_empty() {
return Err("Expected tool calls in the response".into());
}
let tool_response = lua_repl.call(&tool_calls[0]);
let chat_req = chat_req
.append_message(tool_calls)
.append_message(tool_response);
let chat_res = genai_client.exec_chat(MODEL, chat_req, None).await?;
let answer = chat_res.first_text().unwrap_or("");
println!("=== LLM Response ===");
println!("{}\n", answer);
println!("=== Verification ===");
println!("Expected magic number: {}", MAGIC_NUMBER);
if answer.contains(&MAGIC_NUMBER.to_string()) {
println!("✓ SUCCESS: Answer contains the expected magic number!");
} else {
println!("✗ FAILURE: Answer does not contain the expected magic number");
}
Ok(())
}