Skip to main content

stygian_graph/ports/
agent_source.rs

1//! Agent source port trait for LLM-as-data-source.
2//!
3//! Defines the interface for using an AI agent (LLM) as a data source
4//! within the pipeline.  Unlike [`AIProvider`](crate::ports::AIProvider),
5//! which extracts structured data from existing content, an agent source
6//! *generates* content by executing a prompt — making it suitable for
7//! summarisation, enrichment, or synthetic data generation steps in a DAG.
8
9use async_trait::async_trait;
10use serde::{Deserialize, Serialize};
11use serde_json::Value;
12
13use crate::domain::error::Result;
14
15/// Configuration for an agent source invocation.
16///
17/// # Example
18///
19/// ```
20/// use stygian_graph::ports::agent_source::AgentRequest;
21/// use serde_json::json;
22///
23/// let req = AgentRequest {
24///     prompt: "Summarise this article".into(),
25///     context: Some("The article text goes here...".into()),
26///     parameters: json!({"temperature": 0.3}),
27/// };
28/// ```
29#[derive(Debug, Clone, Serialize, Deserialize)]
30pub struct AgentRequest {
31    /// The prompt / instruction for the agent.
32    pub prompt: String,
33    /// Optional context to feed alongside the prompt (e.g. scraped content
34    /// from an upstream pipeline node).
35    pub context: Option<String>,
36    /// Provider-specific parameters (temperature, max_tokens, etc.).
37    pub parameters: Value,
38}
39
40/// Response from an agent source invocation.
41///
42/// # Example
43///
44/// ```
45/// use stygian_graph::ports::agent_source::AgentResponse;
46/// use serde_json::json;
47///
48/// let resp = AgentResponse {
49///     content: "Here is a concise summary…".into(),
50///     metadata: json!({"tokens_used": 142}),
51/// };
52/// ```
53#[derive(Debug, Clone, Serialize, Deserialize)]
54pub struct AgentResponse {
55    /// Generated content from the agent.
56    pub content: String,
57    /// Provider-specific metadata (token counts, model info, etc.).
58    pub metadata: Value,
59}
60
61/// Port trait for LLM agent data sources.
62///
63/// Implementations wrap an LLM provider and expose it as a pipeline-compatible
64/// data source.
65///
66/// # Example
67///
68/// ```no_run
69/// use stygian_graph::ports::agent_source::{AgentSourcePort, AgentRequest};
70/// use serde_json::json;
71///
72/// # async fn example(agent: impl AgentSourcePort) {
73/// let req = AgentRequest {
74///     prompt: "List the key takeaways".into(),
75///     context: Some("...article text...".into()),
76///     parameters: json!({}),
77/// };
78/// let resp = agent.invoke(req).await.unwrap();
79/// println!("{}", resp.content);
80/// # }
81/// ```
82#[async_trait]
83pub trait AgentSourcePort: Send + Sync {
84    /// Invoke the agent with the given request.
85    ///
86    /// # Arguments
87    ///
88    /// * `request` - Prompt, optional context, and parameters.
89    ///
90    /// # Returns
91    ///
92    /// * `Ok(AgentResponse)` - Generated content and metadata.
93    /// * `Err(StygianError)` - Provider error, rate limit, etc.
94    async fn invoke(&self, request: AgentRequest) -> Result<AgentResponse>;
95
96    /// Name of this agent source for logging and identification.
97    fn source_name(&self) -> &str;
98}