use floxide::{node, workflow, FloxideError, Node, Transition, Workflow, WorkflowCtx};
use rllm::{
builder::{LLMBackend, LLMBuilder},
chat::{ChatMessage, ChatRole, MessageType},
LLMProvider,
};
use std::sync::Arc;
use std::{
env,
fmt::{self, Debug},
};
use tracing::Level;
#[derive(Clone)]
pub struct LLMProviderWrapper {
inner: Arc<Box<dyn LLMProvider>>,
}
impl LLMProviderWrapper {
pub fn new(inner: Arc<Box<dyn LLMProvider>>) -> Self {
Self { inner }
}
}
impl Debug for LLMProviderWrapper {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "LLMProviderWrapper")
}
}
node! {
pub struct OutlineNode {
llm: LLMProviderWrapper,
};
context = ();
input = String; output = String; | _ctx, topic | {
println!("OutlineNode: Generating outline for topic: '{}'", topic);
let prompt = format!("Generate an outline for an article about: {}", topic);
let messages = vec![ChatMessage { role: ChatRole::User, content: prompt, message_type: MessageType::Text }];
let outline: String = self.llm.inner.chat(&messages).await.map_err(|e| FloxideError::Generic(e.to_string()))?.text().unwrap_or_default();
Ok(Transition::Next(outline))
}
}
node! {
pub struct DraftNode {
llm: LLMProviderWrapper,
};
context = ();
input = String; output = String; | _ctx, outline | {
println!("DraftNode: Drafting article based on outline...");
let prompt = format!("Write a detailed draft article based on the following outline:\n{}", outline);
let messages = vec![ChatMessage { role: ChatRole::User, content: prompt, message_type: MessageType::Text }];
let draft: String = self.llm.inner.chat(&messages).await.map_err(|e| FloxideError::Generic(e.to_string()))?.text().unwrap_or_default();
Ok(Transition::Next(draft))
}
}
node! {
pub struct ReviewNode {
llm: LLMProviderWrapper,
};
context = ();
input = String; output = String; | _ctx, draft | {
println!("ReviewNode: Reviewing and finalizing draft...");
let prompt = format!("Review and finalize the following article draft. Provide the final polished version without any additional text:\n{}", draft);
let messages = vec![ChatMessage { role: ChatRole::User, content: prompt, message_type: MessageType::Text }];
let final_article: String = self.llm.inner.chat(&messages).await.map_err(|e| FloxideError::Generic(e.to_string()))?.text().unwrap_or_default();
Ok(Transition::Next(final_article))
}
}
workflow! {
pub struct ArticleWriterWorkflow {
outline: OutlineNode,
draft: DraftNode,
review: ReviewNode,
}
start = outline; context = ();
edges {
outline => { [draft] };
draft => { [review] };
review => {}; }
}
#[tokio::main]
async fn main() -> Result<(), FloxideError> {
tracing_subscriber::fmt()
.with_max_level(Level::DEBUG)
.init();
let llm = LLMBuilder::new()
.backend(LLMBackend::OpenAI)
.api_key(env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not set"))
.model("gpt-4o")
.temperature(0.7)
.build()
.expect("Failed to build LLM");
let llm = LLMProviderWrapper::new(Arc::new(llm));
let workflow = ArticleWriterWorkflow {
outline: OutlineNode { llm: llm.clone() },
draft: DraftNode { llm: llm.clone() },
review: ReviewNode { llm: llm.clone() },
};
let ctx = WorkflowCtx::new(());
let result = workflow
.run(&ctx, "Rust Programming Language".to_string())
.await?;
println!("Generated article: {}", result);
Ok(())
}