use std::future::Future;
use std::pin::Pin;
use std::sync::Arc;
use axum::extract::{Path, Query, State};
use axum::http::StatusCode;
use axum::response::IntoResponse;
use axum::Json;
use serde::{Deserialize, Serialize};
use oxios_kernel::memory::{MemoryEntry, MemoryType};
use crate::error::AppError;
use crate::routes::{paginate, PageParams};
use crate::server::AppState;
#[derive(Debug, Deserialize)]
pub(crate) struct TreeQuery {
#[serde(default)]
pub dir: Option<String>,
}
#[derive(Debug, Serialize, Clone)]
pub(crate) struct TreeEntry {
name: String,
is_dir: bool,
size: u64,
}
pub(crate) async fn handle_workspace_tree(
state: State<Arc<AppState>>,
Query(query): Query<TreeQuery>,
) -> Result<Json<Vec<TreeEntry>>, AppError> {
let base = state.kernel.state.workspace_path();
let canonical_base = base.canonicalize().unwrap_or_else(|_| base.to_path_buf());
let dir = match &query.dir {
Some(d) => {
let candidate = base.join(d);
let canonical = match candidate.canonicalize() {
Ok(c) => c,
Err(_) => return Err(AppError::NotFound("directory not found".into())),
};
if !canonical.starts_with(&canonical_base) {
return Err(AppError::Forbidden("path traversal denied".into()));
}
canonical
}
None => canonical_base,
};
let mut entries = Vec::new();
if let Ok(mut read_dir) = tokio::fs::read_dir(&dir).await {
while let Ok(Some(entry)) = read_dir.next_entry().await {
let metadata = match entry.metadata().await {
Ok(m) => m,
Err(_) => continue,
};
entries.push(TreeEntry {
name: entry.file_name().to_string_lossy().into_owned(),
is_dir: metadata.is_dir(),
size: metadata.len(),
});
}
}
entries.sort_by(|a, b| b.is_dir.cmp(&a.is_dir).then(a.name.cmp(&b.name)));
Ok(Json(entries))
}
pub(crate) async fn handle_workspace_file_get(
state: State<Arc<AppState>>,
Path(path): Path<String>,
) -> Result<impl IntoResponse, AppError> {
let base = state.kernel.state.workspace_path();
let full_path = base.join(&path);
let canonical_base = base.canonicalize().unwrap_or_else(|_| base.to_path_buf());
let canonical_file = match full_path.canonicalize() {
Ok(p) => p,
Err(_) => return Err(AppError::NotFound("file not found".into())),
};
if !canonical_file.starts_with(&canonical_base) {
return Err(AppError::Forbidden("path traversal denied".into()));
}
match tokio::fs::read_to_string(&canonical_file).await {
Ok(content) => {
let mime = guess_mime(&path);
Ok((
StatusCode::OK,
[(axum::http::header::CONTENT_TYPE, mime)],
content,
))
}
Err(_) => Err(AppError::NotFound("file not found".into())),
}
}
pub(crate) async fn handle_workspace_file_put(
state: State<Arc<AppState>>,
Path(path): Path<String>,
body: String,
) -> Result<(), AppError> {
const MAX_FILE_SIZE: usize = 1024 * 1024;
if body.len() > MAX_FILE_SIZE {
return Err(AppError::PayloadTooLarge {
size: body.len(),
limit: MAX_FILE_SIZE,
});
}
let base = state.kernel.state.workspace_path();
let full_path = base.join(&path);
let canonical_base = base.canonicalize().unwrap_or_else(|_| base.to_path_buf());
if let Some(parent) = full_path.parent() {
if !parent.exists() {
tokio::fs::create_dir_all(parent)
.await
.map_err(|e| AppError::Internal(format!("failed to create directory: {e}")))?;
}
let canonical_parent = parent
.canonicalize()
.map_err(|e| AppError::Internal(format!("failed to resolve path: {e}")))?;
if !canonical_parent.starts_with(&canonical_base) {
return Err(AppError::Forbidden("path traversal denied".into()));
}
}
match tokio::fs::write(&full_path, &body).await {
Ok(_) => {
tracing::info!(path = %path, "File written");
Ok(())
}
Err(e) => {
tracing::error!(path = %path, error = %e, "Failed to write file");
Err(AppError::Internal("failed to write file".into()))
}
}
}
fn guess_mime(path: &str) -> String {
match path.rsplit('.').next() {
Some("md") => "text/markdown; charset=utf-8".into(),
Some("json") => "application/json".into(),
Some("toml") => "application/toml".into(),
Some("yaml" | "yml") => "application/yaml".into(),
Some("txt") => "text/plain; charset=utf-8".into(),
Some("html") => "text/html".into(),
Some("css") => "text/css".into(),
Some("js") => "application/javascript".into(),
_ => "text/plain; charset=utf-8".into(),
}
}
#[derive(Debug, Serialize, Clone)]
pub(crate) struct SeedSummary {
id: String,
goal: String,
constraints_count: usize,
created_at: String,
}
pub(crate) async fn handle_seeds_list(
state: State<Arc<AppState>>,
Query(params): Query<PageParams>,
) -> Json<serde_json::Value> {
let mut summaries = Vec::new();
if let Ok(names) = state.kernel.state.list_category("seeds").await {
for name in names {
if let Ok(Some(content)) = state.kernel.state.load_markdown("seeds", &name).await {
if let Ok(seed) = serde_json::from_str::<oxios_ouroboros::Seed>(&content) {
summaries.push(SeedSummary {
id: seed.id.to_string(),
goal: seed.goal,
constraints_count: seed.constraints.len(),
created_at: seed.created_at.to_rfc3339(),
});
} else {
summaries.push(SeedSummary {
id: name.clone(),
goal: content.lines().next().unwrap_or(&name).into(),
constraints_count: 0,
created_at: String::new(),
});
}
}
}
}
Json(paginate(&summaries, ¶ms))
}
pub(crate) async fn handle_seed_get(
state: State<Arc<AppState>>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, AppError> {
if let Ok(Some(content)) = state.kernel.state.load_markdown("seeds", &id).await {
if let Ok(seed) = serde_json::from_str::<oxios_ouroboros::Seed>(&content) {
return Ok(Json(serde_json::to_value(&seed).unwrap_or_default()));
}
return Ok(Json(serde_json::json!({
"id": id,
"content": content,
})));
}
Err(AppError::NotFound("seed not found".into()))
}
#[derive(Debug, Serialize, Clone)]
pub(crate) struct EvolutionEntry {
id: String,
generation: u32,
goal: String,
#[serde(skip_serializing_if = "Option::is_none")]
parent_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
score: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
passed: Option<bool>,
}
pub(crate) async fn handle_seed_evolution(
state: State<Arc<AppState>>,
Path(id): Path<String>,
) -> Result<Json<Vec<EvolutionEntry>>, AppError> {
use oxios_ouroboros::Seed;
fn build_lineage_iterative(
kernel: Arc<oxios_kernel::KernelHandle>,
seed_id: String,
) -> Pin<Box<dyn Future<Output = anyhow::Result<Vec<EvolutionEntry>>> + Send>> {
Box::pin(async move {
let mut lineage = Vec::new();
let mut stack = vec![seed_id];
while let Some(current_id) = stack.pop() {
let content = match kernel.state.load_markdown("seeds", ¤t_id).await {
Ok(Some(c)) => c,
_ => continue,
};
let seed: Seed = match serde_json::from_str(&content) {
Ok(s) => s,
Err(e) => {
tracing::warn!(error = %e, "Skipping invalid seed");
continue;
}
};
if let Some(ref parent_id) = seed.parent_seed_id {
stack.push(parent_id.to_string());
}
let (score, passed) = {
let eval_name = format!("{}-eval", current_id);
if let Ok(Some(eval_content)) =
kernel.state.load_markdown("evals", &eval_name).await
{
if let Ok(eval) =
serde_json::from_str::<oxios_ouroboros::EvaluationResult>(&eval_content)
{
(Some(eval.score), Some(eval.all_passed()))
} else {
(None, None)
}
} else {
(None, None)
}
};
lineage.push(EvolutionEntry {
id: seed.id.to_string(),
generation: seed.generation,
goal: seed.goal,
parent_id: seed.parent_seed_id.map(|p| p.to_string()),
score,
passed,
});
}
lineage.reverse(); Ok(lineage)
})
}
match build_lineage_iterative(state.kernel.clone(), id).await {
Ok(lineage) if !lineage.is_empty() => Ok(Json(lineage)),
_ => Err(AppError::NotFound("seed evolution not found".into())),
}
}
#[derive(Debug, Serialize, Clone)]
pub(crate) struct SkillSummary {
name: String,
description: String,
}
pub(crate) async fn handle_skills_list(
state: State<Arc<AppState>>,
Query(params): Query<PageParams>,
) -> Json<serde_json::Value> {
match state.kernel.extensions.list_skills().await {
Ok(skills) => {
let summaries: Vec<SkillSummary> = skills
.into_iter()
.map(|s| SkillSummary {
name: s.name,
description: s.description,
})
.collect();
Json(paginate(&summaries, ¶ms))
}
Err(e) => {
tracing::error!(error = %e, "Failed to list skills");
Json(paginate(&Vec::<SkillSummary>::new(), ¶ms))
}
}
}
pub(crate) async fn handle_skill_get(
state: State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<serde_json::Value>, AppError> {
match state.kernel.extensions.load_skill(&name).await {
Ok(Some(skill)) => Ok(Json(serde_json::json!({
"name": skill.meta.name,
"description": skill.meta.description,
"content": skill.content,
"path": skill.path.to_string_lossy(),
}))),
Ok(None) => Err(AppError::NotFound("skill not found".into())),
Err(e) => {
tracing::error!(error = %e, "Failed to load skill");
Err(AppError::Internal("failed to load skill".into()))
}
}
}
#[derive(Debug, Deserialize)]
pub(crate) struct SkillCreateRequest {
name: String,
description: String,
#[serde(default)]
content: String,
}
pub(crate) async fn handle_skill_create(
state: State<Arc<AppState>>,
Json(body): Json<SkillCreateRequest>,
) -> Result<Json<serde_json::Value>, AppError> {
const MAX_SKILL_CONTENT: usize = 64 * 1024;
if body.content.len() > MAX_SKILL_CONTENT {
return Err(AppError::PayloadTooLarge {
size: body.content.len(),
limit: MAX_SKILL_CONTENT,
});
}
state
.kernel
.extensions
.create_skill(&body.name, &body.description, &body.content)
.await
.map_err(|e| {
tracing::error!(error = %e, skill = %body.name, "Failed to create skill");
AppError::BadRequest(e.to_string())
})?;
tracing::info!(skill = %body.name, "Skill created via API");
Ok(Json(serde_json::json!({
"status": "created",
"name": body.name,
})))
}
pub(crate) async fn handle_skill_delete(
state: State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<Json<serde_json::Value>, AppError> {
state
.kernel
.extensions
.delete_skill(&name)
.await
.map_err(|e| {
tracing::error!(error = %e, skill = %name, "Failed to delete skill");
AppError::BadRequest(e.to_string())
})?;
tracing::info!(skill = %name, "Skill deleted via API");
Ok(Json(serde_json::json!({
"status": "deleted",
"name": name,
})))
}
#[derive(Debug, Serialize, Clone)]
pub(crate) struct MemorySummary {
name: String,
category: String,
}
pub(crate) async fn handle_memory_list(
state: State<Arc<AppState>>,
Query(params): Query<PageParams>,
) -> Json<serde_json::Value> {
let mut entries = Vec::new();
if let Ok(names) = state.kernel.state.list_category("memory").await {
for name in names {
entries.push(MemorySummary {
name,
category: "daily".into(),
});
}
}
if let Ok(names) = state.kernel.state.list_category("memory/knowledge").await {
for name in names {
entries.push(MemorySummary {
name,
category: "knowledge".into(),
});
}
}
Json(paginate(&entries, ¶ms))
}
pub(crate) async fn handle_memory_get(
state: State<Arc<AppState>>,
Path(name): Path<String>,
) -> Result<impl IntoResponse, AppError> {
if let Ok(Some(content)) = state.kernel.state.load_markdown("memory", &name).await {
return Ok(Json(serde_json::json!({
"name": name,
"category": "daily",
"content": content,
}))
.into_response());
}
if let Ok(Some(content)) = state
.kernel
.state
.load_markdown("memory/knowledge", &name)
.await
{
return Ok(Json(serde_json::json!({
"name": name,
"category": "knowledge",
"content": content,
}))
.into_response());
}
Err(AppError::NotFound("memory entry not found".into()))
}
#[derive(Debug, Deserialize)]
pub(crate) struct MemoryCreateRequest {
content: String,
#[serde(default = "default_memory_type")]
memory_type: String,
#[serde(default)]
tags: Vec<String>,
#[serde(default = "default_importance")]
importance: f32,
}
fn default_memory_type() -> String {
"fact".to_string()
}
fn default_importance() -> f32 {
0.5
}
pub(crate) async fn handle_memory_create(
state: State<Arc<AppState>>,
Json(body): Json<MemoryCreateRequest>,
) -> Result<Json<serde_json::Value>, AppError> {
const MAX_MEMORY_ENTRY: usize = 32 * 1024;
if body.content.len() > MAX_MEMORY_ENTRY {
return Err(AppError::PayloadTooLarge {
size: body.content.len(),
limit: MAX_MEMORY_ENTRY,
});
}
let memory_type = match body.memory_type.as_str() {
"fact" => MemoryType::Fact,
"episode" => MemoryType::Episode,
"knowledge" => MemoryType::Knowledge,
_ => {
return Err(AppError::BadRequest(
"memory_type must be fact, episode, or knowledge".into(),
))
}
};
let entry = MemoryEntry {
id: uuid::Uuid::new_v4().to_string(),
memory_type,
content: body.content,
source: "api".to_string(),
session_id: None,
tags: body.tags,
importance: body.importance,
created_at: chrono::Utc::now(),
accessed_at: chrono::Utc::now(),
access_count: 0,
};
let id = state
.kernel
.agents
.remember(entry)
.await
.map_err(|e| AppError::Internal(e.to_string()))?;
Ok(Json(serde_json::json!({ "id": id, "status": "created" })))
}
#[derive(Debug, Deserialize)]
pub(crate) struct MemorySearchRequest {
query: String,
memory_type: Option<String>,
limit: Option<usize>,
}
pub(crate) async fn handle_memory_search(
state: State<Arc<AppState>>,
Json(body): Json<MemorySearchRequest>,
) -> Result<Json<serde_json::Value>, AppError> {
let type_filter = body.memory_type.as_deref().and_then(|s| match s {
"conversation" => Some(MemoryType::Conversation),
"session" => Some(MemoryType::Session),
"fact" => Some(MemoryType::Fact),
"episode" => Some(MemoryType::Episode),
"knowledge" => Some(MemoryType::Knowledge),
_ => None,
});
let limit = body.limit.unwrap_or(10);
let entries = state
.kernel
.agents
.search_memory(&body.query, type_filter, limit)
.await
.map_err(|e| AppError::Internal(e.to_string()))?;
let results: Vec<serde_json::Value> = entries
.iter()
.map(|e| {
serde_json::json!({
"id": e.id,
"type": e.memory_type.label(),
"content": e.content,
"tags": e.tags,
"importance": e.importance,
"created_at": e.created_at.to_rfc3339(),
})
})
.collect();
Ok(Json(
serde_json::json!({ "count": results.len(), "entries": results }),
))
}
#[derive(Debug, Deserialize)]
pub(crate) struct SemanticSearchRequest {
query: String,
memory_type: Option<String>,
limit: Option<usize>,
}
pub(crate) async fn handle_memory_semantic_search(
state: State<Arc<AppState>>,
Json(body): Json<SemanticSearchRequest>,
) -> Result<Json<serde_json::Value>, AppError> {
let type_filter = body.memory_type.as_deref().and_then(|s| match s {
"conversation" => Some(MemoryType::Conversation),
"session" => Some(MemoryType::Session),
"fact" => Some(MemoryType::Fact),
"episode" => Some(MemoryType::Episode),
"knowledge" => Some(MemoryType::Knowledge),
_ => None,
});
let limit = body.limit.unwrap_or(10);
let hits = state
.kernel
.agents
.semantic_search_memory(&body.query, type_filter, limit)
.await
.map_err(|e| AppError::Internal(e.to_string()))?;
let results: Vec<serde_json::Value> = hits
.iter()
.map(|hit| {
serde_json::json!({
"id": hit.entry.id,
"type": hit.entry.memory_type.label(),
"content": hit.entry.content,
"tags": hit.entry.tags,
"importance": hit.entry.importance,
"similarity": hit.similarity,
"distance": hit.distance,
"created_at": hit.entry.created_at.to_rfc3339(),
})
})
.collect();
Ok(Json(serde_json::json!({
"count": results.len(),
"entries": results,
"engine": "hnsw",
})))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_tree_entry_serialization() {
let entry = TreeEntry {
name: "hello.md".into(),
is_dir: false,
size: 1024,
};
let json = serde_json::to_value(&entry).unwrap();
assert_eq!(json["name"], "hello.md");
assert_eq!(json["is_dir"], false);
assert_eq!(json["size"], 1024);
let dir_entry = TreeEntry {
name: "src".into(),
is_dir: true,
size: 0,
};
let json = serde_json::to_value(&dir_entry).unwrap();
assert_eq!(json["is_dir"], true);
assert_eq!(json["size"], 0);
}
#[test]
fn test_pagination_bounds() {
let items: Vec<i32> = (1..=10).collect();
let p1 = PageParams { page: 1, limit: 3 };
let result = paginate(&items, &p1);
assert_eq!(result["total"], 10);
assert_eq!(result["page"], 1);
assert_eq!(result["limit"], 3);
let returned: Vec<i32> = serde_json::from_value(result["items"].clone()).unwrap();
assert_eq!(returned, vec![1, 2, 3]);
let p4 = PageParams { page: 4, limit: 3 };
let result = paginate(&items, &p4);
let returned: Vec<i32> = serde_json::from_value(result["items"].clone()).unwrap();
assert_eq!(returned, vec![10]);
let p0 = PageParams { page: 0, limit: 3 };
let result = paginate(&items, &p0);
let returned: Vec<i32> = serde_json::from_value(result["items"].clone()).unwrap();
assert_eq!(returned, vec![1, 2, 3]);
let big = PageParams {
page: 1,
limit: 9999,
};
let result = paginate(&items, &big);
assert_eq!(result["limit"], 500);
}
#[test]
fn test_guess_mime_common_types() {
assert_eq!(guess_mime("main.rs"), "text/plain; charset=utf-8");
assert_eq!(guess_mime("Cargo.toml"), "application/toml");
assert_eq!(guess_mime("README.md"), "text/markdown; charset=utf-8");
assert_eq!(guess_mime("data.json"), "application/json");
assert_eq!(guess_mime("app.js"), "application/javascript");
assert_eq!(guess_mime("index.html"), "text/html");
assert_eq!(guess_mime("unknown.bin"), "text/plain; charset=utf-8");
}
#[test]
fn test_memory_type_validation() {
let valid = vec!["fact", "episode", "knowledge"];
for t in valid {
let mt = match t {
"fact" => Some(MemoryType::Fact),
"episode" => Some(MemoryType::Episode),
"knowledge" => Some(MemoryType::Knowledge),
_ => None,
};
assert!(mt.is_some(), "expected '{}' to be a valid memory type", t);
}
let invalid = vec!["invalid", "", "FACT", "EpIsOdE"];
for t in invalid {
let mt: Option<MemoryType> = match t {
"fact" => Some(MemoryType::Fact),
"episode" => Some(MemoryType::Episode),
"knowledge" => Some(MemoryType::Knowledge),
_ => None,
};
assert!(mt.is_none(), "expected '{}' to be rejected", t);
}
}
#[test]
fn test_file_size_limit_enforcement() {
const MAX_FILE_SIZE: usize = 1024 * 1024;
let body_at_limit = "x".repeat(MAX_FILE_SIZE);
assert_eq!(body_at_limit.len(), MAX_FILE_SIZE);
assert!(body_at_limit.len() <= MAX_FILE_SIZE);
let body_over_limit = "x".repeat(MAX_FILE_SIZE + 1);
assert!(body_over_limit.len() > MAX_FILE_SIZE);
assert!(body_over_limit.len() > MAX_FILE_SIZE);
const MAX_SKILL_CONTENT: usize = 64 * 1024;
let big_skill = "a".repeat(MAX_SKILL_CONTENT + 1);
assert!(big_skill.len() > MAX_SKILL_CONTENT);
const MAX_MEMORY_ENTRY: usize = 32 * 1024;
let big_memory = "m".repeat(MAX_MEMORY_ENTRY + 1);
assert!(big_memory.len() > MAX_MEMORY_ENTRY);
}
}