Module context

Module context 

Source
Expand description

Context and state management for workflows.

This module provides thread-safe state management across workflow tasks, including regular data storage and specialized chat history management.

§Examples

§Basic Context Usage

use graph_flow::Context;

let context = Context::new();

// Store different types of data
context.set("user_id", 12345).await;
context.set("name", "Alice".to_string()).await;
context.set("active", true).await;

// Retrieve data with type safety
let user_id: Option<i32> = context.get("user_id").await;
let name: Option<String> = context.get("name").await;
let active: Option<bool> = context.get("active").await;

// Synchronous access (useful in edge conditions)
let name_sync: Option<String> = context.get_sync("name");

§Chat History Management

use graph_flow::Context;

let context = Context::new();

// Add messages to chat history
context.add_user_message("Hello, assistant!".to_string()).await;
context.add_assistant_message("Hello! How can I help you?".to_string()).await;
context.add_system_message("User session started".to_string()).await;

// Get chat history
let history = context.get_chat_history().await;
let all_messages = context.get_all_messages().await;
let last_5 = context.get_last_messages(5).await;

// Check history status
let count = context.chat_history_len().await;
let is_empty = context.is_chat_history_empty().await;

§Context with Message Limits

use graph_flow::Context;

// Create context with maximum 100 messages
let context = Context::with_max_chat_messages(100);

// Messages will be automatically pruned when limit is exceeded
for i in 0..150 {
    context.add_user_message(format!("Message {}", i)).await;
}

// Only the last 100 messages are kept
assert_eq!(context.chat_history_len().await, 100);

§LLM Integration (with rig feature)

use graph_flow::Context;

let context = Context::new();

context.add_user_message("What is the capital of France?".to_string()).await;
context.add_assistant_message("The capital of France is Paris.".to_string()).await;

// Get messages in rig format for LLM calls
let rig_messages = context.get_rig_messages().await;
let recent_messages = context.get_last_rig_messages(10).await;

// Use with rig's completion API
// let response = agent.completion(&rig_messages).await?;

Structs§

ChatHistory
Container for managing chat history with serialization support.
Context
Context for sharing data between tasks in a graph execution.
SerializableMessage
A serializable message that can be converted to/from rig::completion::Message.

Enums§

MessageRole
Represents the role of a message in a conversation.