#[cfg(feature = "qdrant")]
use oris_runtime::{
embedding::openai::openai_embedder::OpenAiEmbedder,
schemas::Document,
vectorstore::qdrant::{Qdrant, StoreBuilder},
vectorstore::VectorStore,
};
#[cfg(feature = "qdrant")]
use std::io::Write;
#[cfg(feature = "qdrant")]
#[tokio::main]
async fn main() {
use oris_runtime::vectorstore::VecStoreOptions;
let embedder = OpenAiEmbedder::default();
let client = Qdrant::from_url("http://localhost:6334").build().unwrap();
let store = StoreBuilder::new()
.embedder(embedder)
.client(client)
.collection_name("oris")
.build()
.await
.unwrap();
let doc1 = Document::new("oris is a programmable AI execution runtime in Rust.");
let doc2 = Document::new(
"langchaingo is a port of the langchain python library to go language and was written in 2023."
);
let doc3 = Document::new(
"Capital of United States of America (USA) is Washington D.C. and the capital of France is Paris."
);
let doc4 = Document::new("Capital of France is Paris.");
store
.add_documents(&vec![doc1, doc2, doc3, doc4], &VecStoreOptions::default())
.await
.unwrap();
print!("Query> ");
std::io::stdout().flush().unwrap();
let mut query = String::new();
std::io::stdin().read_line(&mut query).unwrap();
let results = store
.similarity_search(&query, 2, &VecStoreOptions::default())
.await
.unwrap();
if results.is_empty() {
println!("No results found.");
return;
} else {
results.iter().for_each(|r| {
println!("Document: {}", r.page_content);
});
}
}
#[cfg(not(feature = "qdrant"))]
fn main() {
println!("This example requires the 'qdrant' feature to be enabled.");
println!("Please run the command as follows:");
println!("cargo run --example vector_store_qdrant --features qdrant");
}