use agentic_tools_core::fmt::TextFormat;
use agentic_tools_core::fmt::TextOptions;
use schemars::JsonSchema;
use schemars::Schema;
use serde::Deserialize;
use serde::Serialize;
#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, Default, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum AgentType {
#[default]
Locator,
Analyzer,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, Default, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum AgentLocation {
#[default]
Codebase,
Thoughts,
References,
Web,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct AgentOutput {
pub text: String,
}
impl AgentOutput {
pub fn new(text: String) -> Self {
Self { text }
}
}
impl TextFormat for AgentOutput {
fn fmt_text(&self, _opts: &TextOptions) -> String {
self.text.clone()
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Default)]
pub struct Depth(u8);
impl Depth {
pub const MAX: u8 = 10;
pub fn new(v: u8) -> Result<Self, String> {
if v <= Self::MAX {
Ok(Self(v))
} else {
Err(format!("Depth {} exceeds max {}", v, Self::MAX))
}
}
pub fn as_u8(self) -> u8 {
self.0
}
}
impl<'de> Deserialize<'de> for Depth {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let v = u8::deserialize(deserializer)?;
Self::new(v).map_err(serde::de::Error::custom)
}
}
impl JsonSchema for Depth {
fn schema_name() -> std::borrow::Cow<'static, str> {
std::borrow::Cow::Borrowed("Depth0to10")
}
#[expect(
clippy::expect_used,
reason = "Schema is a known-valid literal; failure indicates a bug in schemars."
)]
fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> Schema {
Schema::try_from(serde_json::json!({
"type": "integer",
"description": "Depth of directory traversal (0-10)",
"minimum": 0,
"maximum": 10
}))
.expect("valid schema")
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, Default)]
#[serde(rename_all = "lowercase")]
pub enum Show {
#[default]
All,
Files,
Dirs,
}
impl std::str::FromStr for Show {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"all" => Ok(Self::All),
"files" => Ok(Self::Files),
"dirs" | "directories" => Ok(Self::Dirs),
_ => Err(format!("invalid show: {s}")),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct LsEntry {
pub path: String,
pub kind: EntryKind,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
pub enum EntryKind {
File,
Dir,
Symlink,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct LsOutput {
pub root: String,
pub entries: Vec<LsEntry>,
pub has_more: bool,
pub warnings: Vec<String>,
}
pub const TRUNCATION_SENTINEL: &str = "<<<mcp:ls:page_info>>>";
pub fn encode_truncation_info(shown: usize, total: usize, page_size: usize) -> String {
format!("{TRUNCATION_SENTINEL} shown={shown} total={total} page_size={page_size}")
}
fn decode_truncation_info(s: &str) -> Option<(usize, usize, usize)> {
if !s.starts_with(TRUNCATION_SENTINEL) {
return None;
}
let mut shown = None;
let mut total = None;
let mut page_size = None;
for part in s.split_whitespace() {
if let Some(val) = part.strip_prefix("shown=") {
shown = val.parse::<usize>().ok();
} else if let Some(val) = part.strip_prefix("total=") {
total = val.parse::<usize>().ok();
} else if let Some(val) = part.strip_prefix("page_size=") {
page_size = val.parse::<usize>().ok();
}
}
match (shown, total, page_size) {
(Some(a), Some(b), Some(c)) => Some((a, b, c)),
_ => None,
}
}
impl TextFormat for LsOutput {
fn fmt_text(&self, _opts: &TextOptions) -> String {
use std::fmt::Write;
let mut out = String::new();
let _ = writeln!(out, "{}/", self.root.trim_end_matches('/'));
for entry in &self.entries {
let _ = write!(out, " {}", entry.path);
if matches!(entry.kind, EntryKind::Dir) && !entry.path.ends_with('/') {
out.push('/');
}
out.push('\n');
}
let mut trunc_info: Option<(usize, usize, usize)> = None;
let mut normal_warnings: Vec<&str> = Vec::new();
for w in &self.warnings {
if let Some(info) = decode_truncation_info(w) {
trunc_info = Some(info);
} else {
normal_warnings.push(w);
}
}
if self.has_more {
if let Some((shown, total, page_size)) = trunc_info {
let remaining = total.saturating_sub(shown);
let pages_remaining = remaining.div_ceil(page_size);
let _ = writeln!(
out,
"(truncated — showing {} of {} entries; {} page{} remaining; call again with same params for next page{})",
shown,
total,
pages_remaining,
if pages_remaining == 1 { "" } else { "s" },
if pages_remaining > 1 {
"\nREMINDER: You can also narrow your search with additional param filters if desired"
} else {
""
}
);
} else {
let _ = writeln!(
out,
"(truncated — call again with same params for next page)"
);
}
}
for warning in normal_warnings {
let _ = writeln!(out, "Note: {warning}");
}
out.trim_end().to_string()
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, Default, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum OutputMode {
#[default]
Files,
Content,
Count,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, Default, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum SortOrder {
#[default]
Name,
Mtime,
}
pub const SEARCH_REMINDER: &str = "REMINDER: You should rarely need to call this repeatedly. If you didn't find \
what you need, use ask_agent(agent_type='locator', location='codebase', \
query='describe what you're looking for') instead of issuing more searches.";
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct GrepOutput {
pub root: String,
pub mode: OutputMode,
pub lines: Vec<String>,
pub has_more: bool,
pub warnings: Vec<String>,
pub summary: Option<String>,
}
impl TextFormat for GrepOutput {
fn fmt_text(&self, opts: &TextOptions) -> String {
use std::fmt::Write;
let mut out = String::new();
let mode_str = match self.mode {
OutputMode::Files => "files",
OutputMode::Content => "content",
OutputMode::Count => "count",
};
let _ = writeln!(
out,
"grep results ({}) in {}/",
mode_str,
self.root.trim_end_matches('/')
);
for line in &self.lines {
let _ = writeln!(out, " {line}");
}
if let Some(ref s) = self.summary {
let _ = writeln!(out, "{s}");
}
if self.has_more {
let _ = writeln!(
out,
"(truncated — pass explicit head_limit and offset for next page)"
);
}
for w in &self.warnings {
let _ = writeln!(out, "Note: {w}");
}
if !opts.suppress_search_reminder {
let _ = writeln!(out, "{SEARCH_REMINDER}");
}
out.trim_end().to_string()
}
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct GlobOutput {
pub root: String,
pub entries: Vec<String>,
pub has_more: bool,
pub warnings: Vec<String>,
}
impl TextFormat for GlobOutput {
fn fmt_text(&self, opts: &TextOptions) -> String {
use std::fmt::Write;
let mut out = String::new();
let _ = writeln!(out, "glob results in {}/", self.root.trim_end_matches('/'));
for e in &self.entries {
let _ = writeln!(out, " {e}");
}
if self.has_more {
let _ = writeln!(
out,
"(truncated — pass explicit head_limit and offset for next page)"
);
}
for w in &self.warnings {
let _ = writeln!(out, "Note: {w}");
}
if !opts.suppress_search_reminder {
let _ = writeln!(out, "{SEARCH_REMINDER}");
}
out.trim_end().to_string()
}
}
#[cfg(test)]
#[expect(clippy::unwrap_used)]
mod tests {
use super::*;
#[test]
fn test_agent_type_default() {
let default = AgentType::default();
assert_eq!(default, AgentType::Locator);
}
#[test]
fn test_agent_location_default() {
let default = AgentLocation::default();
assert_eq!(default, AgentLocation::Codebase);
}
#[test]
fn test_agent_type_serde_roundtrip() {
for agent_type in [AgentType::Locator, AgentType::Analyzer] {
let json = serde_json::to_string(&agent_type).unwrap();
let deserialized: AgentType = serde_json::from_str(&json).unwrap();
assert_eq!(agent_type, deserialized);
}
}
#[test]
fn test_agent_location_serde_roundtrip() {
for location in [
AgentLocation::Codebase,
AgentLocation::Thoughts,
AgentLocation::References,
AgentLocation::Web,
] {
let json = serde_json::to_string(&location).unwrap();
let deserialized: AgentLocation = serde_json::from_str(&json).unwrap();
assert_eq!(location, deserialized);
}
}
#[test]
fn test_agent_type_snake_case_serialization() {
assert_eq!(
serde_json::to_string(&AgentType::Locator).unwrap(),
"\"locator\""
);
assert_eq!(
serde_json::to_string(&AgentType::Analyzer).unwrap(),
"\"analyzer\""
);
}
#[test]
fn test_agent_location_snake_case_serialization() {
assert_eq!(
serde_json::to_string(&AgentLocation::Codebase).unwrap(),
"\"codebase\""
);
assert_eq!(
serde_json::to_string(&AgentLocation::Thoughts).unwrap(),
"\"thoughts\""
);
assert_eq!(
serde_json::to_string(&AgentLocation::References).unwrap(),
"\"references\""
);
assert_eq!(
serde_json::to_string(&AgentLocation::Web).unwrap(),
"\"web\""
);
}
#[test]
fn grep_fmt_text_includes_search_reminder_by_default() {
let output = GrepOutput {
root: "/tmp/repo".into(),
mode: OutputMode::Files,
lines: vec!["src/lib.rs".into()],
has_more: false,
warnings: vec![],
summary: None,
};
let text = output.fmt_text(&TextOptions::default());
assert!(text.contains(SEARCH_REMINDER));
}
#[test]
fn grep_fmt_text_suppresses_search_reminder_when_opt_set() {
let output = GrepOutput {
root: "/tmp/repo".into(),
mode: OutputMode::Files,
lines: vec!["src/lib.rs".into()],
has_more: false,
warnings: vec![],
summary: None,
};
let text = output.fmt_text(&TextOptions::new().with_suppress_search_reminder(true));
assert!(!text.contains(SEARCH_REMINDER));
}
#[test]
fn glob_fmt_text_includes_search_reminder_by_default() {
let output = GlobOutput {
root: "/tmp/repo".into(),
entries: vec!["src/lib.rs".into()],
has_more: false,
warnings: vec![],
};
let text = output.fmt_text(&TextOptions::default());
assert!(text.contains(SEARCH_REMINDER));
}
#[test]
fn glob_fmt_text_suppresses_search_reminder_when_opt_set() {
let output = GlobOutput {
root: "/tmp/repo".into(),
entries: vec!["src/lib.rs".into()],
has_more: false,
warnings: vec![],
};
let text = output.fmt_text(&TextOptions::new().with_suppress_search_reminder(true));
assert!(!text.contains(SEARCH_REMINDER));
}
}