use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Arc;
use std::time::Instant;
use parking_lot::RwLock; use tokio::sync::broadcast;
use serde::{Deserialize, Serialize};
use serde_json::Value;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct ContextSource {
pub node: String,
pub tokens: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct ExcludedItem {
pub node: String,
pub reason: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct AgentTurnMetadata {
#[serde(skip_serializing_if = "Option::is_none")]
pub thinking: Option<String>,
pub response_text: String,
pub input_tokens: u32,
pub output_tokens: u32,
#[serde(default)]
pub cache_read_tokens: u32,
pub stop_reason: String,
}
impl AgentTurnMetadata {
pub fn text_only(response: impl Into<String>, stop_reason: impl Into<String>) -> Self {
Self {
thinking: None,
response_text: response.into(),
input_tokens: 0,
output_tokens: 0,
cache_read_tokens: 0,
stop_reason: stop_reason.into(),
}
}
pub fn with_usage(
response: impl Into<String>,
input_tokens: u32,
output_tokens: u32,
stop_reason: impl Into<String>,
) -> Self {
Self {
thinking: None,
response_text: response.into(),
input_tokens,
output_tokens,
cache_read_tokens: 0,
stop_reason: stop_reason.into(),
}
}
pub fn total_tokens(&self) -> u32 {
self.input_tokens + self.output_tokens
}
pub fn has_thinking(&self) -> bool {
self.thinking.is_some()
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Event {
pub id: u64,
pub timestamp_ms: u64,
pub kind: EventKind,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum EventKind {
WorkflowStarted {
task_count: usize,
generation_id: String,
workflow_hash: String,
nika_version: String,
},
WorkflowCompleted {
final_output: Arc<Value>,
total_duration_ms: u64,
},
WorkflowFailed {
error: String,
failed_task: Option<Arc<str>>,
},
WorkflowAborted {
reason: String,
duration_ms: u64,
running_tasks: Vec<Arc<str>>,
},
WorkflowPaused,
WorkflowResumed,
TaskScheduled {
task_id: Arc<str>,
dependencies: Vec<Arc<str>>,
},
TaskStarted {
task_id: Arc<str>,
verb: Arc<str>,
inputs: Value,
},
TaskCompleted {
task_id: Arc<str>,
output: Arc<Value>,
duration_ms: u64,
},
TaskFailed {
task_id: Arc<str>,
error: String,
duration_ms: u64,
},
TemplateResolved {
task_id: Arc<str>,
template: String,
result: String,
},
ProviderCalled {
task_id: Arc<str>,
provider: String,
model: String,
prompt_len: usize,
},
ProviderResponded {
task_id: Arc<str>,
request_id: Option<String>,
input_tokens: u32,
output_tokens: u32,
cache_read_tokens: u32,
ttft_ms: Option<u64>,
finish_reason: String,
cost_usd: f64,
},
ContextAssembled {
task_id: Arc<str>,
sources: Vec<ContextSource>,
excluded: Vec<ExcludedItem>,
total_tokens: u32,
budget_used_pct: f32,
truncated: bool,
},
McpInvoke {
task_id: Arc<str>,
call_id: String,
mcp_server: String,
tool: Option<String>,
resource: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
params: Option<Value>,
},
McpResponse {
task_id: Arc<str>,
call_id: String,
output_len: usize,
duration_ms: u64,
cached: bool,
is_error: bool,
#[serde(skip_serializing_if = "Option::is_none")]
response: Option<Value>,
},
McpConnected {
server_name: String,
},
McpError {
server_name: String,
error: String,
},
McpRetry {
task_id: Arc<str>,
server_name: String,
operation: String,
attempt: u32,
max_attempts: u32,
error: String,
},
AgentStart {
task_id: Arc<str>,
max_turns: u32,
mcp_servers: Vec<String>,
},
AgentTurn {
task_id: Arc<str>,
turn_index: u32,
kind: String,
#[serde(skip_serializing_if = "Option::is_none")]
metadata: Option<AgentTurnMetadata>,
},
AgentComplete {
task_id: Arc<str>,
turns: u32,
stop_reason: String,
},
AgentSpawned {
parent_task_id: Arc<str>,
child_task_id: Arc<str>,
depth: u32,
},
Log {
level: String,
message: String,
#[serde(skip_serializing_if = "Option::is_none")]
task_id: Option<Arc<str>>,
},
Custom {
name: String,
payload: Value,
#[serde(skip_serializing_if = "Option::is_none")]
task_id: Option<Arc<str>>,
},
ArtifactWritten {
task_id: Arc<str>,
path: String,
size: u64,
format: String,
},
ArtifactFailed {
task_id: Arc<str>,
path: String,
reason: String,
},
}
impl EventKind {
#[allow(dead_code)] pub fn task_id(&self) -> Option<&str> {
match self {
Self::TaskScheduled { task_id, .. }
| Self::TaskStarted { task_id, .. }
| Self::TaskCompleted { task_id, .. }
| Self::TaskFailed { task_id, .. }
| Self::TemplateResolved { task_id, .. }
| Self::ProviderCalled { task_id, .. }
| Self::ProviderResponded { task_id, .. }
| Self::ContextAssembled { task_id, .. }
| Self::McpInvoke { task_id, .. }
| Self::McpResponse { task_id, .. }
| Self::McpRetry { task_id, .. } | Self::AgentStart { task_id, .. }
| Self::AgentTurn { task_id, .. }
| Self::AgentComplete { task_id, .. }
| Self::ArtifactWritten { task_id, .. }
| Self::ArtifactFailed { task_id, .. } => Some(task_id),
Self::AgentSpawned { parent_task_id, .. } => Some(parent_task_id),
Self::Log { task_id, .. } | Self::Custom { task_id, .. } => {
task_id.as_ref().map(|s| s.as_ref())
}
Self::WorkflowStarted { .. }
| Self::WorkflowCompleted { .. }
| Self::WorkflowFailed { .. }
| Self::WorkflowAborted { .. }
| Self::WorkflowPaused
| Self::WorkflowResumed
| Self::McpConnected { .. }
| Self::McpError { .. } => None,
}
}
#[allow(dead_code)] pub fn is_workflow_event(&self) -> bool {
matches!(
self,
Self::WorkflowStarted { .. }
| Self::WorkflowCompleted { .. }
| Self::WorkflowFailed { .. }
| Self::WorkflowAborted { .. }
| Self::WorkflowPaused
| Self::WorkflowResumed
)
}
}
#[derive(Clone)]
pub struct EventLog {
events: Arc<RwLock<Vec<Event>>>,
start_time: Instant,
next_id: Arc<AtomicU64>,
broadcast_tx: Option<broadcast::Sender<Event>>,
}
impl EventLog {
pub fn new() -> Self {
Self {
events: Arc::new(RwLock::new(Vec::new())),
start_time: Instant::now(),
next_id: Arc::new(AtomicU64::new(0)),
broadcast_tx: None,
}
}
pub fn new_with_broadcast() -> (Self, broadcast::Receiver<Event>) {
let (tx, rx) = broadcast::channel(512);
let event_log = Self {
events: Arc::new(RwLock::new(Vec::new())),
start_time: Instant::now(),
next_id: Arc::new(AtomicU64::new(0)),
broadcast_tx: Some(tx),
};
(event_log, rx)
}
#[allow(dead_code)]
pub fn subscribe(&self) -> Option<broadcast::Receiver<Event>> {
self.broadcast_tx.as_ref().map(|tx| tx.subscribe())
}
pub fn emit(&self, kind: EventKind) -> u64 {
let id = self.next_id.fetch_add(1, Ordering::SeqCst);
let event = Event {
id,
timestamp_ms: self.start_time.elapsed().as_millis() as u64,
kind,
};
self.events.write().push(event.clone());
if let Some(ref tx) = self.broadcast_tx {
let _ = tx.send(event);
}
id
}
#[allow(dead_code)] pub fn events(&self) -> Vec<Event> {
self.events.read().clone()
}
#[allow(dead_code)] pub fn with_events<T>(&self, f: impl FnOnce(&[Event]) -> T) -> T {
f(&self.events.read())
}
#[allow(dead_code)] pub fn filter_task(&self, task_id: &str) -> Vec<Event> {
self.with_events(|events| {
events
.iter()
.filter(|e| e.kind.task_id() == Some(task_id))
.cloned()
.collect()
})
}
#[allow(dead_code)] pub fn workflow_events(&self) -> Vec<Event> {
self.with_events(|events| {
events
.iter()
.filter(|e| e.kind.is_workflow_event())
.cloned()
.collect()
})
}
#[allow(dead_code)] pub fn count_task(&self, task_id: &str) -> usize {
self.with_events(|events| {
events
.iter()
.filter(|e| e.kind.task_id() == Some(task_id))
.count()
})
}
#[allow(dead_code)] pub fn to_json(&self) -> Value {
self.with_events(|events| serde_json::to_value(events).unwrap_or(Value::Null))
}
#[allow(dead_code)] pub fn len(&self) -> usize {
self.events.read().len()
}
#[allow(dead_code)] pub fn is_empty(&self) -> bool {
self.len() == 0
}
}
impl Default for EventLog {
fn default() -> Self {
Self::new()
}
}
impl std::fmt::Debug for EventLog {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("EventLog")
.field("len", &self.len())
.finish()
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
const TEST_VERSION: &str = env!("CARGO_PKG_VERSION");
fn workflow_started(task_count: usize) -> EventKind {
EventKind::WorkflowStarted {
task_count,
generation_id: "test-gen-123".to_string(),
workflow_hash: "abc123".to_string(),
nika_version: TEST_VERSION.to_string(),
}
}
fn provider_responded(task_id: &str, input_tokens: u32, output_tokens: u32) -> EventKind {
EventKind::ProviderResponded {
task_id: Arc::from(task_id),
request_id: Some("req-456".to_string()),
input_tokens,
output_tokens,
cache_read_tokens: 0,
ttft_ms: Some(150),
finish_reason: "stop".to_string(),
cost_usd: 0.001,
}
}
#[test]
fn eventkind_task_id_extraction() {
let started = EventKind::TaskStarted {
verb: "infer".into(),
task_id: "task1".into(),
inputs: json!({}),
};
assert_eq!(started.task_id(), Some("task1"));
let workflow = workflow_started(5);
assert_eq!(workflow.task_id(), None);
}
#[test]
fn eventkind_is_workflow_event() {
assert!(workflow_started(3).is_workflow_event());
assert!(EventKind::WorkflowCompleted {
final_output: Arc::new(json!("done")),
total_duration_ms: 1000,
}
.is_workflow_event());
assert!(!EventKind::TaskStarted {
verb: "infer".into(),
task_id: "t1".into(),
inputs: json!({}),
}
.is_workflow_event());
}
#[test]
fn eventkind_serializes_with_type_tag() {
let kind = EventKind::TaskCompleted {
task_id: "greet".into(),
output: Arc::new(json!({"message": "Hello"})),
duration_ms: 150,
};
let json = serde_json::to_value(&kind).unwrap();
assert_eq!(json["type"], "task_completed");
assert_eq!(json["task_id"], "greet");
assert_eq!(json["output"]["message"], "Hello");
}
#[test]
fn eventkind_deserializes_from_tagged_json() {
let json = json!({
"type": "task_started",
"task_id": "analyze",
"verb": "infer",
"inputs": {"weather": "sunny"}
});
let kind: EventKind = serde_json::from_value(json).unwrap();
assert_eq!(
kind,
EventKind::TaskStarted {
task_id: "analyze".into(),
verb: "infer".into(),
inputs: json!({"weather": "sunny"}),
}
);
}
#[test]
fn eventlog_new_starts_empty() {
let log = EventLog::new();
assert!(log.is_empty());
assert_eq!(log.len(), 0);
}
#[test]
fn eventlog_emit_returns_monotonic_ids() {
let log = EventLog::new();
let id1 = log.emit(workflow_started(3));
let id2 = log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: "t1".into(),
inputs: json!({}),
});
let id3 = log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: "t2".into(),
inputs: json!({}),
});
assert_eq!(id1, 0);
assert_eq!(id2, 1);
assert_eq!(id3, 2);
assert_eq!(log.len(), 3);
}
#[test]
fn eventlog_events_returns_all() {
let log = EventLog::new();
log.emit(workflow_started(2));
log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: "t1".into(),
inputs: json!({}),
});
let events = log.events();
assert_eq!(events.len(), 2);
assert_eq!(events[0].id, 0);
assert_eq!(events[1].id, 1);
}
#[test]
fn eventlog_filter_task_returns_only_matching() {
let log = EventLog::new();
log.emit(workflow_started(2));
log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: "alpha".into(),
inputs: json!({}),
});
log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: "beta".into(),
inputs: json!({}),
});
log.emit(EventKind::TaskCompleted {
task_id: "alpha".into(),
output: Arc::new(json!("result")),
duration_ms: 100,
});
let alpha_events = log.filter_task("alpha");
assert_eq!(alpha_events.len(), 2); assert!(alpha_events
.iter()
.all(|e| e.kind.task_id() == Some("alpha")));
let beta_events = log.filter_task("beta");
assert_eq!(beta_events.len(), 1);
}
#[test]
fn eventlog_workflow_events_returns_only_workflow() {
let log = EventLog::new();
log.emit(workflow_started(1));
log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: "t1".into(),
inputs: json!({}),
});
log.emit(EventKind::WorkflowCompleted {
final_output: Arc::new(json!("done")),
total_duration_ms: 500,
});
let wf_events = log.workflow_events();
assert_eq!(wf_events.len(), 2);
assert!(wf_events.iter().all(|e| e.kind.is_workflow_event()));
}
#[test]
fn eventlog_to_json() {
let log = EventLog::new();
log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: "task1".into(),
inputs: json!({}),
});
let json = log.to_json();
assert!(json.is_array());
assert_eq!(json.as_array().unwrap().len(), 1);
assert_eq!(json[0]["kind"]["type"], "task_started");
}
#[test]
fn eventlog_is_clone() {
let log = EventLog::new();
log.emit(workflow_started(1));
let cloned = log.clone();
assert_eq!(cloned.len(), 1);
log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: "t1".into(),
inputs: json!({}),
});
assert_eq!(cloned.len(), 2);
}
#[test]
fn eventlog_thread_safe_concurrent_emits() {
use std::thread;
let log = EventLog::new();
let handles: Vec<_> = (0..10)
.map(|i| {
let log = log.clone();
thread::spawn(move || {
log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: Arc::from(format!("task{}", i)),
inputs: json!({}),
})
})
})
.collect();
for h in handles {
h.join().unwrap();
}
assert_eq!(log.len(), 10);
let events = log.events();
let mut ids: Vec<u64> = events.iter().map(|e| e.id).collect();
ids.sort();
ids.dedup();
assert_eq!(ids.len(), 10);
}
#[test]
fn event_timestamp_is_relative() {
let log = EventLog::new();
log.emit(workflow_started(1));
std::thread::sleep(std::time::Duration::from_millis(10));
log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: "t1".into(),
inputs: json!({}),
});
let events = log.events();
assert!(events[1].timestamp_ms >= events[0].timestamp_ms);
}
#[test]
fn task_started_captures_full_context() {
let log = EventLog::new();
let inputs = json!({
"weather": "sunny",
"temperature": 25,
"nested": {"key": "value"}
});
log.emit(EventKind::TaskStarted {
verb: "infer".into(),
task_id: "analyze".into(),
inputs: inputs.clone(),
});
let events = log.filter_task("analyze");
assert_eq!(events.len(), 1);
if let EventKind::TaskStarted {
inputs: captured, ..
} = &events[0].kind
{
assert_eq!(captured, &inputs);
assert_eq!(captured["weather"], "sunny");
assert_eq!(captured["nested"]["key"], "value");
} else {
panic!("Expected TaskStarted event");
}
}
#[test]
fn workflow_started_includes_generation_id() {
let log = EventLog::new();
log.emit(EventKind::WorkflowStarted {
task_count: 3,
generation_id: "gen-abc-123".to_string(),
workflow_hash: "sha256:deadbeef".to_string(),
nika_version: TEST_VERSION.to_string(),
});
let events = log.events();
if let EventKind::WorkflowStarted {
generation_id,
workflow_hash,
nika_version,
..
} = &events[0].kind
{
assert_eq!(generation_id, "gen-abc-123");
assert_eq!(workflow_hash, "sha256:deadbeef");
assert_eq!(nika_version, TEST_VERSION);
} else {
panic!("Expected WorkflowStarted event");
}
}
#[test]
fn provider_responded_tracks_detailed_tokens() {
let log = EventLog::new();
log.emit(EventKind::ProviderResponded {
task_id: "infer_task".into(),
request_id: Some("req-xyz-789".to_string()),
input_tokens: 500,
output_tokens: 150,
cache_read_tokens: 200,
ttft_ms: Some(85),
finish_reason: "stop".to_string(),
cost_usd: 0.0025,
});
let events = log.filter_task("infer_task");
assert_eq!(events.len(), 1);
if let EventKind::ProviderResponded {
request_id,
input_tokens,
output_tokens,
cache_read_tokens,
ttft_ms,
finish_reason,
cost_usd,
..
} = &events[0].kind
{
assert_eq!(request_id, &Some("req-xyz-789".to_string()));
assert_eq!(*input_tokens, 500);
assert_eq!(*output_tokens, 150);
assert_eq!(*cache_read_tokens, 200);
assert_eq!(*ttft_ms, Some(85));
assert_eq!(finish_reason, "stop");
assert!((*cost_usd - 0.0025).abs() < f64::EPSILON);
} else {
panic!("Expected ProviderResponded event");
}
}
#[test]
fn context_assembled_tracks_sources() {
let log = EventLog::new();
let sources = vec![
ContextSource {
node: "system_prompt".to_string(),
tokens: 200,
},
ContextSource {
node: "user_input".to_string(),
tokens: 50,
},
ContextSource {
node: "examples".to_string(),
tokens: 300,
},
];
let excluded = vec![ExcludedItem {
node: "large_history".to_string(),
reason: "exceeded budget".to_string(),
}];
log.emit(EventKind::ContextAssembled {
task_id: "assemble_task".into(),
sources: sources.clone(),
excluded: excluded.clone(),
total_tokens: 550,
budget_used_pct: 55.0,
truncated: false,
});
let events = log.filter_task("assemble_task");
assert_eq!(events.len(), 1);
if let EventKind::ContextAssembled {
sources: s,
excluded: e,
total_tokens,
budget_used_pct,
truncated,
..
} = &events[0].kind
{
assert_eq!(s.len(), 3);
assert_eq!(s[0].node, "system_prompt");
assert_eq!(s[0].tokens, 200);
assert_eq!(e.len(), 1);
assert_eq!(e[0].reason, "exceeded budget");
assert_eq!(*total_tokens, 550);
assert!((*budget_used_pct - 55.0).abs() < f32::EPSILON);
assert!(!*truncated);
} else {
panic!("Expected ContextAssembled event");
}
}
#[test]
fn context_source_and_excluded_item_serialize() {
let source = ContextSource {
node: "test_node".to_string(),
tokens: 100,
};
let json = serde_json::to_value(&source).unwrap();
assert_eq!(json["node"], "test_node");
assert_eq!(json["tokens"], 100);
let excluded = ExcludedItem {
node: "big_file".to_string(),
reason: "too large".to_string(),
};
let json = serde_json::to_value(&excluded).unwrap();
assert_eq!(json["node"], "big_file");
assert_eq!(json["reason"], "too large");
}
#[test]
fn provider_responded_helper_creates_valid_event() {
let event = provider_responded("test_task", 100, 50);
assert_eq!(event.task_id(), Some("test_task"));
if let EventKind::ProviderResponded {
input_tokens,
output_tokens,
..
} = event
{
assert_eq!(input_tokens, 100);
assert_eq!(output_tokens, 50);
} else {
panic!("Expected ProviderResponded event");
}
}
#[test]
fn agent_turn_metadata_text_only() {
let metadata = AgentTurnMetadata::text_only("Hello world", "end_turn");
assert_eq!(metadata.response_text, "Hello world");
assert_eq!(metadata.stop_reason, "end_turn");
assert_eq!(metadata.input_tokens, 0);
assert_eq!(metadata.output_tokens, 0);
assert_eq!(metadata.cache_read_tokens, 0);
assert!(!metadata.has_thinking());
assert_eq!(metadata.total_tokens(), 0);
}
#[test]
fn agent_turn_metadata_with_usage() {
let metadata = AgentTurnMetadata::with_usage("Response", 100, 50, "tool_use");
assert_eq!(metadata.response_text, "Response");
assert_eq!(metadata.stop_reason, "tool_use");
assert_eq!(metadata.input_tokens, 100);
assert_eq!(metadata.output_tokens, 50);
assert_eq!(metadata.total_tokens(), 150);
assert!(!metadata.has_thinking());
}
#[test]
fn agent_turn_metadata_with_thinking() {
let metadata = AgentTurnMetadata {
thinking: Some("Let me think about this...".to_string()),
response_text: "Here's my answer".to_string(),
input_tokens: 200,
output_tokens: 100,
cache_read_tokens: 50,
stop_reason: "end_turn".to_string(),
};
assert!(metadata.has_thinking());
assert_eq!(
metadata.thinking.as_ref().unwrap(),
"Let me think about this..."
);
assert_eq!(metadata.total_tokens(), 300);
}
#[test]
fn agent_turn_metadata_serializes() {
let metadata = AgentTurnMetadata::with_usage("Test response", 100, 50, "end_turn");
let json = serde_json::to_value(&metadata).unwrap();
assert_eq!(json["response_text"], "Test response");
assert_eq!(json["input_tokens"], 100);
assert_eq!(json["output_tokens"], 50);
assert_eq!(json["stop_reason"], "end_turn");
assert!(json.get("thinking").is_none());
}
#[test]
fn agent_turn_metadata_with_thinking_serializes() {
let metadata = AgentTurnMetadata {
thinking: Some("My thoughts".to_string()),
response_text: "My response".to_string(),
input_tokens: 50,
output_tokens: 25,
cache_read_tokens: 0,
stop_reason: "end_turn".to_string(),
};
let json = serde_json::to_value(&metadata).unwrap();
assert_eq!(json["thinking"], "My thoughts");
assert_eq!(json["response_text"], "My response");
}
#[test]
fn agent_turn_with_metadata_serializes() {
let log = EventLog::new();
let metadata = AgentTurnMetadata::with_usage("Agent response", 100, 50, "end_turn");
log.emit(EventKind::AgentTurn {
task_id: "agent_task".into(),
turn_index: 1,
kind: "end_turn".to_string(), metadata: Some(metadata),
});
let events = log.filter_task("agent_task");
assert_eq!(events.len(), 1);
if let EventKind::AgentTurn {
metadata: Some(m), ..
} = &events[0].kind
{
assert_eq!(m.response_text, "Agent response");
assert_eq!(m.total_tokens(), 150);
} else {
panic!("Expected AgentTurn with metadata");
}
}
#[test]
fn agent_turn_without_metadata_serializes() {
let log = EventLog::new();
log.emit(EventKind::AgentTurn {
task_id: "agent_task".into(),
turn_index: 1,
kind: "started".to_string(),
metadata: None,
});
let events = log.filter_task("agent_task");
assert_eq!(events.len(), 1);
if let EventKind::AgentTurn { metadata, kind, .. } = &events[0].kind {
assert!(metadata.is_none());
assert_eq!(kind, "started");
} else {
panic!("Expected AgentTurn without metadata");
}
}
}