use oris_runtime::{
chain::{Chain, ConversationalRetrieverChainBuilder},
llm::{OpenAI, OpenAIModel},
memory::SimpleMemory,
prompt_args,
retrievers::WikipediaRetriever,
schemas::Retriever,
};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let wikipedia_retriever = WikipediaRetriever::new()
.with_language("en")
.with_max_docs(3);
let retriever: Box<dyn Retriever> = Box::new(wikipedia_retriever);
let llm = OpenAI::default().with_model(OpenAIModel::Gpt35.to_string());
let chain = ConversationalRetrieverChainBuilder::new()
.llm(llm)
.retriever(retriever)
.memory(SimpleMemory::new().into())
.rephrase_question(true)
.build()?;
let result = chain
.invoke(prompt_args! {
"question" => "What is Rust programming language?",
})
.await?;
println!("Answer: {}", result);
Ok(())
}