use futures::StreamExt;
use oramacore_client::collection::{CollectionManager, CollectionManagerConfig, NlpSearchParams};
use oramacore_client::error::Result;
use oramacore_client::stream_manager::{AnswerConfig, CreateAiSessionConfig};
use oramacore_client::types::{LlmConfig, LlmProvider, Message, Role};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
struct Document {
id: String,
title: String,
content: String,
}
#[tokio::main]
async fn main() -> Result<()> {
let config = CollectionManagerConfig::new("your-collection-id", "your-api-key");
let client = CollectionManager::new(config).await?;
println!("=== NLP Search ===");
let nlp_params = NlpSearchParams {
query: "What are the benefits of machine learning?".to_string(),
llm_config: Some(LlmConfig {
provider: LlmProvider::OpenAI,
model: "gpt-4".to_string(),
}),
user_id: Some("user-123".to_string()),
};
let nlp_results = client.ai.nlp_search::<Document>(nlp_params).await?;
println!("NLP search found {} results", nlp_results.len());
for (i, result) in nlp_results.iter().enumerate() {
println!("{}. Generated query: {:?}", i + 1, result.generated_query);
println!(" Found {} results", result.results.len());
}
println!("\n=== Creating AI Session ===");
let initial_messages = vec![Message {
role: Role::System,
content: "You are a helpful AI assistant specializing in technology and science."
.to_string(),
}];
let _session_config = CreateAiSessionConfig::new()
.with_llm_config(LlmConfig {
provider: LlmProvider::OpenAI,
model: "gpt-4".to_string(),
})
.with_initial_messages(initial_messages);
let ai_session = client.ai.create_ai_session().await?;
println!("AI session created with ID: {}", ai_session.session_id());
println!("\n=== Simple AI Answer ===");
let answer_config = AnswerConfig::new("Explain quantum computing in simple terms")
.with_max_documents(5)
.with_min_similarity(0.8);
let answer = ai_session.answer(answer_config).await?;
println!("AI Response: {answer}");
println!("\n=== Streaming AI Answer ===");
let stream_config =
AnswerConfig::new("What are the latest developments in artificial intelligence?")
.with_visitor_id("demo-user".to_string())
.with_max_documents(3);
let mut answer_stream = ai_session.answer_stream(stream_config).await?;
println!("Streaming response:");
print!("AI: ");
while let Some(chunk_result) = answer_stream.next().await {
match chunk_result {
Ok(chunk) => {
match chunk {
oramacore_client::stream_manager::StreamChunk::Content(content) => {
print!("{content}");
use std::io::{self, Write};
io::stdout().flush().unwrap();
}
oramacore_client::stream_manager::StreamChunk::StatusUpdate(status) => {
println!("\n[Status: {status}]");
}
oramacore_client::stream_manager::StreamChunk::Done => {
println!("\n[Stream completed]");
break;
}
_ => {} }
}
Err(e) => {
eprintln!("\nStream error: {e}");
break;
}
}
}
println!();
println!("\n=== Multi-turn Conversation ===");
let questions = [
"What is machine learning?",
"How does it differ from traditional programming?",
"Can you give me a practical example?",
];
for (i, question) in questions.iter().enumerate() {
println!("\n--- Turn {} ---", i + 1);
println!("User: {question}");
let turn_config = AnswerConfig::new(*question).with_visitor_id("demo-user".to_string());
let response = ai_session.answer(turn_config).await?;
println!("AI: {response}");
}
println!("\n=== Regenerating Last Response ===");
let regenerated = ai_session.regenerate_last(false).await?;
println!("Regenerated response: {regenerated}");
println!("\n=== Conversation History ===");
let messages = ai_session.get_messages().await;
println!("Conversation has {} messages:", messages.len());
for (i, message) in messages.iter().enumerate() {
let role = match message.role {
Role::System => "System",
Role::User => "User",
Role::Assistant => "Assistant",
};
println!(
"{}. {}: {}",
i + 1,
role,
if message.content.len() > 100 {
format!("{}...", &message.content[..100])
} else {
message.content.clone()
}
);
}
println!("\n=== Session State ===");
let state = ai_session.get_state().await;
println!("Session has {} interactions:", state.len());
for (i, interaction) in state.iter().enumerate() {
println!("{}. Query: {}", i + 1, interaction.query);
println!(" Response length: {} chars", interaction.response.len());
println!(
" Loading: {}, Error: {}",
interaction.loading, interaction.error
);
}
println!("\n=== Clearing Session ===");
ai_session.clear_session().await;
println!("Session cleared");
let final_messages = ai_session.get_messages().await;
println!("Messages after clearing: {}", final_messages.len());
Ok(())
}