use anyhow::{Context, Result, anyhow};
use serde::{Deserialize, Serialize};
use serde_json::{Map, to_string, to_value};
use std::collections::{BTreeMap, HashMap, HashSet};
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use tera::Context as TeraContext;
use crate::core::ResourceType;
use crate::lockfile::{LockFile, ResourceId};
use super::cache::RenderCache;
use super::content::ContentExtractor;
use super::dependencies::DependencyExtractor;
use super::utils::{deep_merge_json, to_native_path_display};
const MAX_RECURSION_DEPTH: usize = 50;
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct ResourceMetadata {
#[serde(rename = "type")]
pub resource_type: String,
pub name: String,
pub install_path: String,
pub source: Option<String>,
pub version: Option<String>,
pub resolved_commit: Option<String>,
pub checksum: String,
pub path: String,
}
#[derive(Clone, Serialize, Deserialize)]
pub struct DependencyData {
#[serde(rename = "type")]
pub resource_type: String,
pub name: String,
pub install_path: String,
pub source: Option<String>,
pub version: Option<String>,
pub resolved_commit: Option<String>,
pub checksum: String,
pub path: String,
pub content: String,
}
impl std::fmt::Debug for DependencyData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("DependencyData")
.field("resource_type", &self.resource_type)
.field("name", &self.name)
.field("install_path", &self.install_path)
.field("source", &self.source)
.field("version", &self.version)
.field("resolved_commit", &self.resolved_commit)
.field("checksum", &self.checksum)
.field("path", &self.path)
.field("content", &format!("<{} bytes>", self.content.len()))
.finish()
}
}
pub struct TemplateContextBuilder {
lockfile: Arc<LockFile>,
project_config: Option<crate::manifest::ProjectConfig>,
cache: Arc<crate::cache::Cache>,
project_dir: PathBuf,
render_cache: Arc<Mutex<RenderCache>>,
custom_names_cache: Arc<Mutex<HashMap<String, BTreeMap<String, String>>>>,
dependency_specs_cache:
Arc<Mutex<HashMap<String, BTreeMap<String, crate::manifest::DependencySpec>>>>,
}
impl TemplateContextBuilder {
pub fn new(
lockfile: Arc<LockFile>,
project_config: Option<crate::manifest::ProjectConfig>,
cache: Arc<crate::cache::Cache>,
project_dir: PathBuf,
) -> Self {
Self {
lockfile,
project_config,
cache,
project_dir,
render_cache: Arc::new(Mutex::new(RenderCache::new())),
custom_names_cache: Arc::new(Mutex::new(HashMap::new())),
dependency_specs_cache: Arc::new(Mutex::new(HashMap::new())),
}
}
pub fn clear_render_cache(&self) {
if let Ok(mut cache) = self.render_cache.lock() {
cache.clear();
}
}
pub fn clear_custom_names_cache(&self) {
if let Ok(mut cache) = self.custom_names_cache.lock() {
cache.clear();
}
}
pub fn clear_dependency_specs_cache(&self) {
if let Ok(mut cache) = self.dependency_specs_cache.lock() {
cache.clear();
}
}
pub fn render_cache_stats(&self) -> Option<(usize, usize, f64)> {
self.render_cache.lock().ok().map(|cache| {
let (hits, misses) = cache.stats();
let hit_rate = cache.hit_rate();
(hits, misses, hit_rate)
})
}
pub async fn build_context(
&self,
resource_id: &ResourceId,
variant_inputs: &serde_json::Value,
) -> Result<(TeraContext, Option<String>)> {
let context = self
.build_context_with_visited(resource_id, variant_inputs, &mut HashSet::new())
.await?;
let uses_templating = self.resource_uses_templating(resource_id).await?;
let context_with_checksum = ContextWithChecksum::new(context, uses_templating);
Ok(context_with_checksum.into_tuple())
}
async fn resource_uses_templating(&self, resource_id: &ResourceId) -> Result<bool> {
let resource = self
.lockfile
.find_resource_by_id(resource_id)
.ok_or_else(|| anyhow!("Resource not found in lockfile"))?;
if !resource.path.ends_with(".md") {
return Ok(false);
}
let source_path = if let Some(_source_name) = &resource.source {
let url = resource
.url
.as_ref()
.ok_or_else(|| anyhow!("Resource '{}' has source but no URL", resource.name))?;
if resource.is_local() {
std::path::PathBuf::from(url).join(&resource.path)
} else {
let sha = resource.resolved_commit.as_deref().ok_or_else(|| {
anyhow!("Resource '{}' has no resolved commit", resource.name)
})?;
let worktree_dir = self.cache.get_worktree_path(url, sha)?;
worktree_dir.join(&resource.path)
}
} else {
let local_path = std::path::Path::new(&resource.path);
if local_path.is_absolute() {
local_path.to_path_buf()
} else {
self.project_dir.join(local_path)
}
};
let content = match tokio::fs::read_to_string(&source_path).await {
Ok(c) => c,
Err(e) => {
tracing::debug!(
"Could not read file for resource '{}' from {}: {}. Assuming templating disabled.",
resource.name,
source_path.display(),
e
);
return Ok(false);
}
};
let doc = match crate::markdown::MarkdownDocument::parse(&content) {
Ok(d) => d,
Err(e) => {
tracing::debug!(
"Could not parse markdown for resource '{}': {}. Assuming templating disabled.",
resource.name,
e
);
return Ok(false);
}
};
Ok(super::content::is_markdown_templating_enabled(doc.metadata.as_ref()))
}
fn build_resource_data(&self, resource: &crate::lockfile::LockedResource) -> ResourceMetadata {
ResourceMetadata {
resource_type: resource.resource_type.to_string(),
name: resource.name.clone(),
install_path: to_native_path_display(&resource.installed_at),
source: resource.source.clone(),
version: resource.version.clone(),
resolved_commit: resource.resolved_commit.clone(),
checksum: resource.checksum.clone(),
path: resource.path.clone(),
}
}
pub fn compute_context_digest(&self) -> Result<String> {
use sha2::{Digest, Sha256};
use std::collections::BTreeMap;
let mut digest_data: BTreeMap<String, BTreeMap<String, BTreeMap<&str, String>>> =
BTreeMap::new();
for resource_type in [
ResourceType::Agent,
ResourceType::Snippet,
ResourceType::Command,
ResourceType::Script,
ResourceType::Hook,
ResourceType::McpServer,
] {
let resources = self.lockfile.get_resources_by_type(&resource_type);
if resources.is_empty() {
continue;
}
let type_str = resource_type.to_plural().to_string();
let mut sorted_resources: Vec<_> = resources.iter().collect();
sorted_resources.sort_by(|a, b| a.name.cmp(&b.name));
let mut type_data = BTreeMap::new();
for resource in sorted_resources {
let mut resource_data: BTreeMap<&str, String> = BTreeMap::new();
resource_data.insert("name", resource.name.clone());
resource_data.insert("install_path", resource.installed_at.clone());
resource_data.insert("path", resource.path.clone());
resource_data.insert("checksum", resource.checksum.clone());
if let Some(ref source) = resource.source {
resource_data.insert("source", source.to_string());
}
if let Some(ref version) = resource.version {
resource_data.insert("version", version.to_string());
}
if let Some(ref commit) = resource.resolved_commit {
resource_data.insert("resolved_commit", commit.to_string());
}
type_data.insert(resource.name.clone(), resource_data);
}
digest_data.insert(type_str, type_data);
}
let json_str =
to_string(&digest_data).context("Failed to serialize template context for digest")?;
let mut hasher = Sha256::new();
hasher.update(json_str.as_bytes());
let hash = hasher.finalize();
Ok(hex::encode(&hash[..8]))
}
}
#[derive(Debug, Clone)]
pub struct ContextWithChecksum {
pub context: TeraContext,
pub checksum: Option<String>,
}
impl ContextWithChecksum {
#[must_use]
pub fn new(context: TeraContext, compute_checksum: bool) -> Self {
let checksum = if compute_checksum {
Self::compute_checksum(&context).ok()
} else {
None
};
Self {
context,
checksum,
}
}
fn compute_checksum(context: &TeraContext) -> Result<String> {
use crate::utils::canonicalize_json;
use sha2::{Digest, Sha256};
let context_clone = context.clone();
let json_value = context_clone.into_json();
let json_str = canonicalize_json(&json_value)?;
let mut hasher = Sha256::new();
hasher.update(json_str.as_bytes());
let hash = hasher.finalize();
Ok(format!("sha256:{}", hex::encode(hash)))
}
#[must_use]
pub fn context(&self) -> &TeraContext {
&self.context
}
#[must_use]
pub fn checksum(&self) -> Option<&str> {
self.checksum.as_deref()
}
#[must_use]
pub fn into_tuple(self) -> (TeraContext, Option<String>) {
(self.context, self.checksum)
}
}
impl ContentExtractor for TemplateContextBuilder {
fn cache(&self) -> &Arc<crate::cache::Cache> {
&self.cache
}
fn project_dir(&self) -> &PathBuf {
&self.project_dir
}
}
impl DependencyExtractor for TemplateContextBuilder {
fn lockfile(&self) -> &Arc<LockFile> {
&self.lockfile
}
fn render_cache(&self) -> &Arc<Mutex<RenderCache>> {
&self.render_cache
}
fn custom_names_cache(&self) -> &Arc<Mutex<HashMap<String, BTreeMap<String, String>>>> {
&self.custom_names_cache
}
fn dependency_specs_cache(
&self,
) -> &Arc<Mutex<HashMap<String, BTreeMap<String, crate::manifest::DependencySpec>>>> {
&self.dependency_specs_cache
}
async fn build_dependencies_data(
&self,
current_resource: &crate::lockfile::LockedResource,
rendering_stack: &mut HashSet<String>,
) -> Result<BTreeMap<String, BTreeMap<String, DependencyData>>> {
super::dependencies::build_dependencies_data(self, current_resource, rendering_stack).await
}
async fn build_context_with_visited(
&self,
resource_id: &ResourceId,
variant_inputs: &serde_json::Value,
rendering_stack: &mut HashSet<String>,
) -> Result<TeraContext> {
if rendering_stack.len() >= MAX_RECURSION_DEPTH {
anyhow::bail!(
"Maximum recursion depth ({}) exceeded while rendering '{}'. \
This likely indicates a complex or cyclic dependency chain. \
Current stack contains {} resources.",
MAX_RECURSION_DEPTH,
resource_id.name(),
rendering_stack.len()
);
}
tracing::info!(
"Starting context build for '{}' (type: {:?}, depth: {})",
resource_id.name(),
resource_id.resource_type(),
rendering_stack.len()
);
let mut context = TeraContext::new();
let mut agpm = Map::new();
let current_resource =
self.lockfile.find_resource_by_id(resource_id).with_context(|| {
format!(
"Resource '{}' of type {:?} not found in lockfile (source: {:?}, tool: {:?})",
resource_id.name(),
resource_id.resource_type(),
resource_id.source(),
resource_id.tool()
)
})?;
tracing::info!(
"Found resource '{}' with {} dependencies",
resource_id.name(),
current_resource.dependencies.len()
);
let resource_data = self.build_resource_data(current_resource);
agpm.insert("resource".to_string(), to_value(resource_data)?);
tracing::info!("Building dependencies data for '{}'...", resource_id.name());
let deps_data = self.build_dependencies_data(current_resource, rendering_stack).await?;
tracing::info!("Successfully built dependencies data with {} types", deps_data.len());
agpm.insert("deps".to_string(), to_value(deps_data)?);
if let Some(ref project_config) = self.project_config {
let project_json = project_config.to_json_value();
agpm.insert("project".to_string(), project_json.clone());
context.insert("project", &project_json);
}
context.insert("agpm", &agpm);
if let Some(overrides_obj) = variant_inputs.as_object() {
if !overrides_obj.is_empty() {
tracing::debug!(
"Applying template variable overrides for resource '{}'",
resource_id.name()
);
let mut context_json = context.clone().into_json();
for (key, value) in overrides_obj {
if key == "project" {
let original_project = context_json
.get("agpm")
.and_then(|v| v.as_object())
.and_then(|o| o.get("project"))
.cloned()
.unwrap_or(serde_json::Value::Object(serde_json::Map::new()));
let merged_project = deep_merge_json(original_project, value);
if let Some(agpm_obj) =
context_json.get_mut("agpm").and_then(|v| v.as_object_mut())
{
agpm_obj.insert("project".to_string(), merged_project.clone());
}
context_json
.as_object_mut()
.expect("context JSON must be an object")
.insert("project".to_string(), merged_project);
} else {
let original = context_json
.get(key)
.cloned()
.unwrap_or(serde_json::Value::Object(serde_json::Map::new()));
let merged = deep_merge_json(original, value);
context_json
.as_object_mut()
.expect("context JSON must be an object")
.insert(key.clone(), merged);
}
}
context = TeraContext::from_serialize(&context_json)
.context("Failed to create context from merged template variables")?;
tracing::debug!(
"Applied template overrides: {}",
serde_json::to_string_pretty(&variant_inputs)
.unwrap_or_else(|_| "{}".to_string())
);
}
}
Ok(context)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_context_with_checksum_computation() {
let mut context = TeraContext::new();
context.insert("test", "value");
context.insert("number", &42);
let ctx_with_checksum = ContextWithChecksum::new(context.clone(), true);
assert!(ctx_with_checksum.checksum().is_some(), "Checksum should be computed when enabled");
assert_eq!(ctx_with_checksum.context(), &context, "Context should be preserved");
let ctx_with_checksum2 = ContextWithChecksum::new(context.clone(), true);
assert_eq!(
ctx_with_checksum.checksum(),
ctx_with_checksum2.checksum(),
"Checksum should be deterministic for same context"
);
}
#[test]
fn test_context_with_checksum_disabled() {
let mut context = TeraContext::new();
context.insert("test", "value");
let ctx_with_checksum = ContextWithChecksum::new(context.clone(), false);
assert!(
ctx_with_checksum.checksum().is_none(),
"Checksum should not be computed when disabled"
);
assert_eq!(ctx_with_checksum.context(), &context, "Context should be preserved");
}
#[test]
fn test_context_with_checksum_different_contexts() {
let mut context1 = TeraContext::new();
context1.insert("test", "value1");
let mut context2 = TeraContext::new();
context2.insert("test", "value2");
let ctx1 = ContextWithChecksum::new(context1, true);
let ctx2 = ContextWithChecksum::new(context2, true);
assert_ne!(
ctx1.checksum(),
ctx2.checksum(),
"Different contexts should have different checksums"
);
}
#[test]
fn test_context_with_checksum_into_tuple() {
let mut context = TeraContext::new();
context.insert("test", "value");
let ctx_with_checksum = ContextWithChecksum::new(context.clone(), true);
let (returned_context, returned_checksum) = ctx_with_checksum.into_tuple();
assert_eq!(returned_context, context, "Returned context should match original");
assert!(returned_checksum.is_some(), "Returned checksum should be present");
}
#[test]
fn test_context_with_checksum_complex_structure() {
let mut context = TeraContext::new();
context.insert("simple", "value");
context.insert("number", &42);
context.insert("boolean", &true);
let ctx_with_checksum = ContextWithChecksum::new(context, true);
assert!(ctx_with_checksum.checksum().is_some(), "Complex context should produce checksum");
let checksum = ctx_with_checksum.checksum().unwrap();
assert!(checksum.starts_with("sha256:"), "Checksum should have sha256: prefix");
assert_eq!(checksum.len(), 7 + 64, "SHA256 hex should be 64 characters plus prefix");
}
}