use anyhow::{Context, Result};
use handlebars::Handlebars;
use moka::sync::Cache;
use rand::{Rng, SeedableRng};
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::fs;
use tracing::{debug, info, warn};
use crate::providers::{GenerationRequest, LegacyLLMProvider, OpenAIProvider};
use crate::stages::CrateContext;
#[derive(
Debug, Clone, Copy, Eq, PartialEq, Hash, serde::Serialize, serde::Deserialize, Default,
)]
pub enum CrateType {
#[default]
Library,
Binary,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TemplateDetails {
pub description: String,
pub features: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Template {
pub name: String,
pub description: String,
pub category: String,
pub features: Vec<String>,
pub dependencies: Vec<String>,
pub files: HashMap<String, String>,
pub variables: HashMap<String, TemplateVariable>,
pub ultrathink_patterns: Vec<UltraThinkPattern>,
pub reasoning_chains: Vec<ReasoningChain>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TemplateVariable {
pub name: String,
pub description: String,
pub var_type: String,
pub default_value: Option<String>,
pub required: bool,
pub validation_pattern: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UltraThinkPattern {
pub name: String,
pub description: String,
pub reasoning_type: ReasoningType,
pub prompt_template: String,
pub validation_criteria: Vec<String>,
pub chain_of_thought_steps: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ReasoningType {
Deductive,
Inductive,
Abductive,
Analogical,
Causal,
Counterfactual,
Metacognitive,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReasoningChain {
pub id: String,
pub description: String,
pub steps: Vec<ReasoningStep>,
pub validation_points: Vec<ValidationPoint>,
pub confidence_threshold: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReasoningStep {
pub step_id: String,
pub description: String,
pub reasoning_type: ReasoningType,
pub input_requirements: Vec<String>,
pub output_expectations: Vec<String>,
pub validation_criteria: Vec<String>,
pub prompt_template: String,
pub validation_threshold: f64,
pub confidence_weight: Option<f64>,
pub timeout: Duration,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ValidationPoint {
pub checkpoint_id: String,
pub description: String,
pub validation_type: ValidationType,
pub criteria: Vec<String>,
pub required_confidence: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ValidationType {
Logical,
Empirical,
Pragmatic,
Semantic,
Syntactic,
Performance,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Architecture {
pub concept: Concept,
pub modules: Vec<Module>,
}
impl Architecture {
#[must_use]
pub fn minimal() -> Self {
Self {
concept: Concept {
spec: CrateSpec::default(),
},
modules: Vec::new(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Concept {
pub spec: CrateSpec,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Module {
pub name: String,
pub purpose: String,
pub exports: Vec<String>,
pub dependencies: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CrateSpec {
pub name: String,
pub description: String,
pub version: String,
pub authors: Vec<String>,
pub license: Option<String>,
pub crate_type: CrateType,
pub dependencies: HashMap<String, String>,
pub dev_dependencies: HashMap<String, String>,
pub features: Vec<String>,
pub keywords: Vec<String>,
pub categories: Vec<String>,
pub repository: Option<String>,
pub homepage: Option<String>,
pub documentation: Option<String>,
pub readme: Option<String>,
pub rust_version: Option<String>,
pub edition: String,
pub publish: bool,
pub author: Option<String>,
pub template: Option<String>,
}
impl Default for CrateSpec {
fn default() -> Self {
Self {
name: "default-crate".to_string(),
description: "A default crate specification".to_string(),
version: "0.1.0".to_string(),
authors: vec![],
license: Some("MIT OR Apache-2.0".to_string()),
crate_type: CrateType::default(),
dependencies: HashMap::new(),
dev_dependencies: HashMap::new(),
features: Vec::new(),
keywords: vec![],
categories: vec![],
repository: None,
homepage: None,
documentation: None,
readme: None,
rust_version: None,
edition: "2021".to_string(),
publish: true,
author: None,
template: None,
}
}
}
const LIB_TEMPLATE: &str = r#"//! {{description}}
//!
//! This crate provides {{crate_name}} functionality with comprehensive
//! error handling and modern Rust idioms.
#![deny(missing_docs)]
#![warn(clippy::all)]
/// Main public API for {{crate_name}}
// TODO: document this
// TODO: document this
// TODO: document this
pub mod api {
//! Core API functionality
}
/// Error types and handling
// TODO: document this
// TODO: document this
// TODO: document this
pub mod error {
//! Error definitions and utilities
/// Main error type for {{crate_name}}
#[derive(Debug, thiserror::Error)]
// TODO: document this
// TODO: document this
pub enum Error {
#[error("Invalid input: {0}")]
// TODO: document this
InvalidInput(String),
#[error("IO error: {0}")]
// TODO: document this
Io(#[from] std::io::Error),
}
/// Result type alias
// TODO: document this
// TODO: document this
pub type Result<T> = std::result::Result<T, Error>;
}
/// Re-exports for convenience
pub use api::*;
pub use error::{Error, Result};
"#;
const MAIN_TEMPLATE: &str = r#"//! {{description}}
//!
//! {{crate_name}} - A Rust application
use anyhow::Result;
use clap::Parser;
#[derive(Parser)]
#[command(name = "{{crate_name}}")]
#[command(about = "{{description}}")]
struct Args {
/// Enable verbose output
#[arg(short, long)]
verbose: bool,
}
fn main() -> Result<()> {
let args = Args::parse();
if args.verbose {
println!("Running {{crate_name}} in verbose mode");
}
println!("Welcome to OpenCrates Interactive Generator!");
Ok(())
}
"#;
#[derive(Debug, Clone)]
pub struct TemplateManager {
handlebars: Arc<Handlebars<'static>>,
templates: HashMap<String, String>,
}
impl TemplateManager {
pub async fn new() -> Result<Self> {
let mut handlebars_instance = Handlebars::new();
let mut templates = HashMap::new();
let builtin_templates = Self::load_builtin_templates();
for (name, template) in builtin_templates {
handlebars_instance.register_template_string(&name, &template)?;
let _ = templates.insert(name.clone(), template);
}
Ok(Self {
handlebars: Arc::new(handlebars_instance),
templates,
})
}
#[must_use]
pub fn templates(&self) -> &HashMap<String, String> {
&self.templates
}
fn load_builtin_templates() -> Vec<(String, String)> {
vec![
(
"cargo_toml".to_string(),
r#"[package]
name = "{{crate_name}}"
version = "{{version}}"
edition = "2021"
description = "{{description}}"
authors = ["Generated by OpenCrates"]
license = "MIT OR Apache-2.0"
[dependencies]
{{#each dependencies}}
{{name}} = "{{version}}"
{{/each}}
{{#if features}}
[features]
{{#each features}}
{{this}} = []
{{/each}}
{{/if}}
"#
.to_string(),
),
(
"lib_rs".to_string(),
r#"//! {{description}}
//!
//! This crate provides {{crate_name}} functionality with comprehensive
//! error handling and modern Rust idioms.
#![deny(missing_docs)]
#![warn(clippy::all)]
/// Main public API for {{crate_name}}
// TODO: document this
// TODO: document this
// TODO: document this
pub mod api {
//! Core API functionality
}
/// Error types and handling
// TODO: document this
// TODO: document this
// TODO: document this
pub mod error {
//! Error definitions and utilities
/// Main error type for {{crate_name}}
#[derive(Debug, thiserror::Error)]
// TODO: document this
// TODO: document this
pub enum Error {
#[error("Invalid input: {0}")]
// TODO: document this
InvalidInput(String),
#[error("IO error: {0}")]
// TODO: document this
Io(#[from] std::io::Error),
}
/// Result type alias
// TODO: document this
// TODO: document this
pub type Result<T> = std::result::Result<T, Error>;
}
/// Re-exports for convenience
pub use api::*;
pub use error::{Error, Result};
"#
.to_string(),
),
(
"main_rs".to_string(),
r#"//! {{description}}
//!
//! {{crate_name}} - A Rust application
use anyhow::Result;
use clap::Parser;
#[derive(Parser)]
#[command(name = "{{crate_name}}")]
#[command(about = "{{description}}")]
struct Args {
/// Enable verbose output
#[arg(short, long)]
verbose: bool,
}
fn main() -> Result<()> {
let args = Args::parse();
if args.verbose {
println!("Running {{crate_name}} in verbose mode");
}
println!("Welcome to {{crate_name}}!");
Ok(())
}
"#
.to_string(),
),
(
"readme".to_string(),
r"# {{crate_name}}
{{description}}
## Installation
```bash
cargo add {{crate_name}}
```
## Usage
```rust
use {{crate_name}}::*;
// Your usage examples here
```
## Contributing
Pull requests are welcome! For major changes, please open an issue first.
## License
Licensed under either of:
- Apache License, Version 2.0
- MIT License
at your option.
"
.to_string(),
),
(
"gitignore".to_string(),
r"# Rust
target/
Cargo.lock
**/*.rs.bk
# IDE
.vscode/
.idea/
*.swp
*.swo
# OS
.DS_Store
Thumbs.db
# Logs
*.log
# Local env files
.env
.env.local
"
.to_string(),
),
(
"test_rs".to_string(),
r#"//! Integration tests for {{crate_name}}
use {{crate_name}}::*;
#[test]
fn test_basic_functionality() {
// Add your tests here
assert_eq!(2 + 2, 4);
}
#[test]
fn test_error_handling() {
// Test error cases
let result = std::result::Result::<(), String>::Err("test error".to_string());
assert!(result.is_err());
}
"#
.to_string(),
),
]
}
pub async fn render_crate(
&self,
context: &CrateContext,
output_dir: &PathBuf,
) -> Result<HashMap<String, String>> {
utils::validate_crate_name(&context.crate_name)?;
if !output_dir.exists() {
fs::create_dir_all(output_dir).await?;
}
let output_dir = output_dir
.canonicalize()
.unwrap_or_else(|_| output_dir.clone());
let mut rendered_files = HashMap::new();
fs::create_dir_all(&output_dir).await?;
fs::create_dir_all(output_dir.join("src")).await?;
fs::create_dir_all(output_dir.join("tests")).await?;
fs::create_dir_all(output_dir.join("examples")).await?;
fs::create_dir_all(output_dir.join("benches")).await?;
let mut rng = rand::rngs::StdRng::from_entropy();
let cargo_content = self
.render_template("cargo_toml", context)
.with_context(|| "Failed to render Cargo.toml")?;
let cargo_content_with_nonce =
format!("{}<!-- nonce:{} -->", cargo_content, rng.gen::<u32>());
let cargo_path = output_dir.join("Cargo.toml");
fs::write(&cargo_path, &cargo_content_with_nonce).await?;
let _ = rendered_files.insert("Cargo.toml".to_string(), cargo_content_with_nonce);
let lib_content = self
.render_template("lib_rs", context)
.with_context(|| "Failed to render lib.rs")?;
let lib_content_with_nonce = format!("{}<!-- nonce:{} -->", lib_content, rng.gen::<u32>());
let lib_path = output_dir.join("src/lib.rs");
fs::write(&lib_path, &lib_content_with_nonce).await?;
let _ = rendered_files.insert("src/lib.rs".to_string(), lib_content_with_nonce);
let readme_content = self
.render_template("readme", context)
.with_context(|| "Failed to render README.md")?;
let readme_content_with_nonce =
format!("{}<!-- nonce:{} -->", readme_content, rng.gen::<u32>());
let readme_path = output_dir.join("README.md");
fs::write(&readme_path, &readme_content_with_nonce).await?;
let _ = rendered_files.insert("README.md".to_string(), readme_content_with_nonce);
let gitignore_content = self
.render_template("gitignore", context)
.with_context(|| "Failed to render .gitignore")?;
let gitignore_content_with_nonce =
format!("{}<!-- nonce:{} -->", gitignore_content, rng.gen::<u32>());
let gitignore_path = output_dir.join(".gitignore");
fs::write(&gitignore_path, &gitignore_content_with_nonce).await?;
let _ = rendered_files.insert(".gitignore".to_string(), gitignore_content_with_nonce);
let test_content = self
.render_template("test_rs", context)
.with_context(|| "Failed to render tests")?;
let test_content_with_nonce =
format!("{}<!-- nonce:{} -->", test_content, rng.gen::<u32>());
let test_path = output_dir.join("tests/integration_test.rs");
fs::write(&test_path, &test_content_with_nonce).await?;
let _ = rendered_files.insert(
"tests/integration_test.rs".to_string(),
test_content_with_nonce,
);
for (path, content) in &context.generated_files {
let file_path = output_dir.join(path);
if let Some(parent) = file_path.parent() {
fs::create_dir_all(parent).await?;
}
fs::write(&file_path, content).await?;
let _ = rendered_files.insert(path.clone(), content.clone());
}
info!(
"Successfully rendered {} files for crate '{}'",
rendered_files.len(),
context.crate_name
);
Ok(rendered_files)
}
fn render_template<T: Serialize>(&self, template_name: &str, data: &T) -> Result<String> {
self.handlebars
.render(template_name, data)
.map_err(anyhow::Error::from)
}
pub async fn generate_custom_template(&self, context: &CrateContext) -> Result<String> {
let mut files = Vec::new();
let lib_content = LIB_TEMPLATE
.replace("{{description}}", &context.description)
.replace("{{crate_name}}", &context.crate_name);
files.push(FileOutput {
name: "lib.rs".to_string(),
content: lib_content,
});
let main_content = MAIN_TEMPLATE
.replace("{{description}}", &context.description)
.replace("{{crate_name}}", &context.crate_name);
files.push(FileOutput {
name: "main.rs".to_string(),
content: main_content,
});
debug!(
"Generated {} files for crate: {}",
files.len(),
context.crate_name
);
Ok(serde_json::to_string(&files)?)
}
pub async fn generate_crate(
&self,
name: &str,
description: &str,
features: Vec<String>,
output_path: PathBuf,
_openai_provider: Option<&OpenAIProvider>,
) -> Result<()> {
let crate_output_path = output_path.join(name);
let context = CrateContext {
crate_name: name.to_string(),
description: description.to_string(),
version: "0.1.0".to_string(),
features,
dependencies: Vec::new(),
generated_files: HashMap::new(),
output_path: crate_output_path.clone(),
metadata: HashMap::new(),
stage_outputs: HashMap::new(),
};
let rendered_files = self.render_crate(&context, &crate_output_path).await?;
info!(
"Successfully generated crate '{}' with {} files at '{}'",
name,
rendered_files.len(),
crate_output_path.display()
);
Ok(())
}
pub async fn generate_publish_ready_package(&self, context: &CrateContext) -> Result<()> {
let _rendered = self.render_crate(context, &context.output_path).await?;
let mit_license = include_str!("../../LICENSE-MIT");
let apache_license = include_str!("../../LICENSE-APACHE");
tokio::fs::write(context.output_path.join("LICENSE-MIT"), mit_license).await?;
tokio::fs::write(context.output_path.join("LICENSE-APACHE"), apache_license).await?;
let basic_docs = format!(
"# {} Documentation\n\n{}\n\n## Features\n\n{}\n\n## License\n\nMIT OR Apache-2.0\n",
context.crate_name,
context.description,
context.features.join("\n- ")
);
tokio::fs::write(context.output_path.join("DOCUMENTATION.md"), basic_docs).await?;
Ok(())
}
pub async fn preview_crate_generation(
&self,
spec: &CrateSpec,
) -> anyhow::Result<serde_json::Value> {
Ok(serde_json::json!({
"crate_name": spec.name,
"estimated_files": 7,
"estimated_lines": 320,
"crate_type": spec.crate_type,
}))
}
}
pub struct UltraThinkEngine {
openai_provider: OpenAIProvider,
reasoning_cache: HashMap<String, ReasoningResult>,
confidence_cache: Cache<String, f64>,
section_cache: Cache<String, HashMap<String, Vec<String>>>,
confidence_regexes: Vec<Regex>,
section_regexes: Vec<(Regex, &'static str)>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReasoningResult {
pub reasoning_type: ReasoningType,
pub confidence: f64,
pub reasoning_steps: Vec<String>,
pub conclusions: Vec<String>,
pub evidence: Vec<String>,
pub assumptions: Vec<String>,
pub alternatives: Vec<String>,
}
impl UltraThinkEngine {
pub fn new(openai_provider: OpenAIProvider) -> Result<Self> {
let confidence_regexes = vec![
Regex::new(r"confidence:\s*(\d+(?:\.\d+)?)")
.context("Failed to compile confidence regex")?,
Regex::new(r"certainty:\s*(\d+(?:\.\d+)?)")
.context("Failed to compile certainty regex")?,
Regex::new(r"(\d+(?:\.\d+)?)%?\s*confident")
.context("Failed to compile confident regex")?,
];
let section_regexes = vec![
(
Regex::new(r"(?i)(?:reasoning )?steps?:?\s*\n((?:[-*]\s*.+\n?)+)")
.context("Failed to compile steps regex")?,
"steps",
),
(
Regex::new(r"(?i)conclusions?:?\s*\n((?:[-*]\s*.+\n?)+)")
.context("Failed to compile conclusions regex")?,
"conclusions",
),
(
Regex::new(r"(?i)evidence:?\s*\n((?:[-*]\s*.+\n?)+)")
.context("Failed to compile evidence regex")?,
"evidence",
),
(
Regex::new(r"(?i)assumptions?:?\s*\n((?:[-*]\s*.+\n?)+)")
.context("Failed to compile assumptions regex")?,
"assumptions",
),
(
Regex::new(r"(?i)alternatives?:?\s*\n((?:[-*]\s*.+\n?)+)")
.context("Failed to compile alternatives regex")?,
"alternatives",
),
];
Ok(Self {
openai_provider,
reasoning_cache: HashMap::new(),
confidence_cache: Cache::new(10_000),
section_cache: Cache::new(10_000),
confidence_regexes,
section_regexes,
})
}
pub async fn generate_usage_examples(&self, architecture: &Architecture) -> Result<String> {
let arch_details = serde_json::to_string_pretty(architecture)?;
let prompt = format!(
"Based on the following architecture, generate a concise and clear usage example in a Rust doc comment (like /// Example section).
The example should demonstrate how to use the main functionality of the crate.
Architecture:
{arch_details}
Provide only the code for the example, without any explanation before or after."
);
let request = GenerationRequest {
spec: crate::core::CrateSpec::default(),
prompt: Some(prompt),
max_tokens: Some(500),
model: None,
temperature: None,
context: None,
};
let response = self.openai_provider.generate(request).await?;
Ok(response.preview)
}
pub async fn generate_documentation(&self, architecture: &Architecture) -> Result<String> {
let arch_details = serde_json::to_string_pretty(architecture)?;
let prompt = format!(
"Generate comprehensive documentation for the following architecture:
Architecture:
{arch_details}
// TODO: document this
Include:
1. Overview
2. Module descriptions
3. API documentation
4. Usage examples
5. Best practices"
);
let request = GenerationRequest {
spec: crate::core::CrateSpec::default(),
prompt: Some(prompt),
max_tokens: Some(2000),
model: None,
temperature: None,
context: None,
};
let response = self.openai_provider.generate(request).await?;
Ok(response.preview)
}
pub async fn generate_readme(&self, architecture: &Architecture) -> Result<String> {
let arch_details = serde_json::to_string_pretty(architecture)?;
let prompt = format!(
"Generate a comprehensive README.md for the following Rust crate architecture:
Architecture:
{arch_details}
// TODO: document this
Include:
1. Project title and description
2. Features
3. Installation instructions
4. Usage examples
5. API documentation links
6. Contributing guidelines
7. License information"
);
let request = GenerationRequest {
spec: crate::core::CrateSpec::default(),
prompt: Some(prompt),
max_tokens: Some(1500),
model: None,
temperature: None,
context: None,
};
let response = self.openai_provider.generate(request).await?;
Ok(response.preview)
}
pub async fn generate_api_docs(&self, architecture: &Architecture) -> Result<String> {
let arch_details = serde_json::to_string_pretty(architecture)?;
let prompt = format!(
"Generate detailed API documentation for the following architecture:
Architecture:
{arch_details}
// TODO: document this
Include:
1. Public API surface
2. Function signatures
3. Type definitions
4. Trait implementations
5. Examples for each public function"
);
let request = GenerationRequest {
spec: crate::core::CrateSpec::default(),
prompt: Some(prompt),
max_tokens: Some(2000),
model: None,
temperature: None,
context: None,
};
let response = self.openai_provider.generate(request).await?;
Ok(response.preview)
}
#[must_use]
pub fn reasoning_cache(&self) -> &HashMap<String, ReasoningResult> {
&self.reasoning_cache
}
pub async fn apply_patterns(
&self,
patterns: &[UltraThinkPattern],
context: &str,
reasoning_results: &HashMap<String, serde_json::Value>,
) -> Result<String> {
let mut enhanced_context = context.to_string();
for pattern in patterns {
let pattern_context = format!(
"Context: {}\nReasoning Results: {}\nPattern: {}",
context,
serde_json::to_string_pretty(reasoning_results)?,
pattern.description
);
let request = GenerationRequest {
spec: crate::core::CrateSpec::default(),
prompt: Some(format!(
"{}\n\n{}",
pattern.prompt_template, pattern_context
)),
max_tokens: Some(1000),
model: None,
temperature: None,
context: None,
};
let response = self.openai_provider.generate(request).await?;
enhanced_context = format!("{}\n\n{}", enhanced_context, response.preview);
}
Ok(enhanced_context)
}
pub async fn execute_reasoning_chains(
&self,
chains: &[ReasoningChain],
context: &str,
variables: &HashMap<String, String>,
) -> Result<HashMap<String, serde_json::Value>> {
let mut results = HashMap::new();
for chain in chains {
let chain_result = self.execute_single_chain(chain, context, variables).await?;
if chain_result.success {
let _ = results.insert(chain.id.clone(), serde_json::to_value(chain_result)?);
}
}
Ok(results)
}
async fn execute_single_chain(
&self,
chain: &ReasoningChain,
context: &str,
variables: &HashMap<String, String>,
) -> Result<ChainResult> {
let start_time = Instant::now();
let mut step_results = Vec::new();
for step in &chain.steps {
let step_result = self
.execute_reasoning_step(step, context, variables, &step_results)
.await?;
let validation_passed = self.validate_step_result(step, &step_result);
let mut final_step_result = step_result;
final_step_result.validation_passed = validation_passed;
if !validation_passed && step.validation_threshold > 0.0 {
warn!("Step {} failed validation", step.step_id);
final_step_result.confidence *= 0.5;
}
step_results.push(final_step_result);
}
let overall_confidence = self.calculate_chain_confidence(chain, &step_results);
let success = overall_confidence >= chain.confidence_threshold;
Ok(ChainResult {
chain_id: chain.id.clone(),
step_results,
overall_confidence,
success,
total_execution_time: start_time.elapsed(),
output: HashMap::new(), })
}
async fn execute_reasoning_step(
&self,
step: &ReasoningStep,
context: &str,
variables: &HashMap<String, String>,
previous_results: &[StepResult],
) -> Result<StepResult> {
let start_time = Instant::now();
let enhanced_prompt =
self.enhance_prompt_with_reasoning_type(&step.prompt_template, &step.reasoning_type);
let full_prompt =
self.build_context_prompt(&enhanced_prompt, context, variables, previous_results);
let response = tokio::time::timeout(step.timeout, async {
let request = GenerationRequest {
spec: crate::core::CrateSpec::default(),
prompt: Some(full_prompt),
max_tokens: Some(1000),
model: Some("gpt-4".to_string()),
temperature: Some(0.7),
context: None,
};
self.openai_provider.generate(request).await
})
.await
.map_err(|_| anyhow::anyhow!("Step execution timeout"))?
.map_err(|e| anyhow::anyhow!("OpenAI generation failed: {}", e))?;
let reasoning_result =
self.parse_reasoning_response(&response.preview, &step.reasoning_type)?;
let confidence = self.calculate_step_confidence(step, &response.preview);
Ok(StepResult {
step_id: step.step_id.clone(),
output: response.preview,
confidence,
validation_passed: false, execution_time: start_time.elapsed(),
metadata: serde_json::to_value(reasoning_result)?
.as_object()
.unwrap()
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect(),
})
}
fn build_context_prompt(
&self,
prompt: &str,
context: &str,
variables: &HashMap<String, String>,
previous_results: &[StepResult],
) -> String {
let mut full_prompt = prompt.to_string();
for (key, value) in variables {
full_prompt = full_prompt.replace(&format!("{{{key}}}"), value);
}
full_prompt.push_str(&format!("\n\nContext: {context}"));
if !previous_results.is_empty() {
full_prompt.push_str("\n\nPrevious reasoning steps:");
for result in previous_results {
let output_preview = result.output.chars().take(100).collect::<String>();
full_prompt.push_str(&format!(
"\n- {}: {} (confidence: {:.2})",
result.step_id, output_preview, result.confidence
));
}
}
full_prompt
}
fn enhance_prompt_with_reasoning_type(
&self,
prompt: &str,
reasoning_type: &ReasoningType,
) -> String {
let reasoning_instructions = match reasoning_type {
ReasoningType::Deductive => {
"Use deductive reasoning: Start with general principles and derive specific conclusions. Show your logical steps clearly."
}
ReasoningType::Inductive => {
"Use inductive reasoning: Analyze specific examples to identify patterns and form general conclusions."
}
ReasoningType::Abductive => {
"Use abductive reasoning: Find the best explanation for the observed facts. Consider multiple hypotheses."
}
ReasoningType::Analogical => {
"Use analogical reasoning: Draw parallels with similar situations and apply lessons learned."
}
ReasoningType::Causal => {
"Use causal reasoning: Identify cause-and-effect relationships and trace their implications."
}
ReasoningType::Counterfactual => {
"Use counterfactual reasoning: Consider alternative scenarios and their potential outcomes."
}
ReasoningType::Metacognitive => {
"Use metacognitive reasoning: Think about your thinking process and evaluate your reasoning quality."
}
};
format!(
"{}\n\n{}\n\n{}",
reasoning_instructions,
prompt,
"Please structure your response with clear reasoning steps and provide a confidence score (0.0-1.0)."
)
}
fn parse_reasoning_response(
&self,
response: &str,
reasoning_type: &ReasoningType,
) -> Result<ReasoningResult> {
let sections = self.extract_reasoning_sections(response);
let confidence = self.extract_confidence_score(response);
Ok(ReasoningResult {
reasoning_type: reasoning_type.clone(),
confidence,
reasoning_steps: sections.get("steps").cloned().unwrap_or_default(),
conclusions: sections.get("conclusions").cloned().unwrap_or_default(),
evidence: sections.get("evidence").cloned().unwrap_or_default(),
assumptions: sections.get("assumptions").cloned().unwrap_or_default(),
alternatives: sections.get("alternatives").cloned().unwrap_or_default(),
})
}
fn extract_reasoning_sections(&self, response: &str) -> HashMap<String, Vec<String>> {
let mut sections = self.section_cache.get(response).unwrap_or_default();
if !sections.is_empty() {
return sections;
}
let mut current_section: Option<String> = None;
let mut items = Vec::new();
for line in response.lines() {
let mut matched_section = false;
for (regex, name) in &self.section_regexes {
if regex.is_match(line) {
if let Some(section_name) = current_section.take() {
let _ = sections.insert(section_name, items.clone());
items.clear();
}
current_section = Some((*name).to_string());
matched_section = true;
break;
}
}
if !matched_section && current_section.is_some() {
items.push(line.trim().to_string());
}
}
if let Some(section_name) = current_section {
let _ = sections.insert(section_name, items);
}
self.section_cache
.insert(response.to_string(), sections.clone());
sections
}
fn extract_confidence_score(&self, response: &str) -> f64 {
if let Some(cached) = self.confidence_cache.get(response) {
return cached;
}
let mut confidence = 0.0;
for regex in &self.confidence_regexes {
if let Some(captures) = regex.captures(response) {
if let Some(score_str) = captures.get(1) {
if let Ok(score) = score_str.as_str().parse::<f64>() {
confidence = if score > 1.0 { score / 100.0 } else { score };
confidence = confidence.clamp(0.0, 1.0);
break;
}
}
}
}
if confidence == 0.0 {
let quality_indicators = [
("specific examples", 0.15),
("detailed analysis", 0.20),
("multiple perspectives", 0.15),
("evidence", 0.10),
("reasoning", 0.10),
("validation", 0.10),
("alternative", 0.10),
("conclusion", 0.10),
];
confidence = quality_indicators
.iter()
.filter(|(indicator, _)| response.to_lowercase().contains(indicator))
.map(|(_, score)| score)
.sum::<f64>()
.min(0.8);
let length_factor = (response.len() as f64 / 1000.0).clamp(0.0, 0.2);
confidence = (confidence + length_factor).min(1.0);
}
self.confidence_cache
.insert(response.to_string(), confidence);
confidence
}
fn calculate_step_confidence(&self, step: &ReasoningStep, output: &str) -> f64 {
let base_confidence = self.extract_confidence_score(output);
let validation_score = step
.validation_criteria
.iter()
.filter(|criterion| self.check_criterion(criterion, output))
.count() as f64
/ step.validation_criteria.len().max(1) as f64;
(base_confidence * 0.7) + (validation_score * 0.3)
}
fn check_criterion(&self, criterion: &str, output: &str) -> bool {
let output_lower = output.to_lowercase();
match criterion {
"complete_use_cases" => output_lower.contains("use case") && output.len() > 200,
"clear_requirements" => {
output_lower.contains("requirement") && output_lower.contains("must")
}
"follows_unix_conventions" => {
output_lower.contains("flag") || output_lower.contains("option")
}
"intuitive_interface" => {
output_lower.contains("user") && output_lower.contains("intuitive")
}
"helpful_error_messages" => {
output_lower.contains("error") && output_lower.contains("message")
}
"proper_exit_codes" => output_lower.contains("exit") && output_lower.contains("code"),
"secure_by_default" => {
output_lower.contains("sanitize") || output_lower.contains("validate")
}
"performance_optimized" => {
output_lower.contains("cache") || output_lower.contains("optimize")
}
"logical_consistency" => {
output_lower.contains("therefore") || output_lower.contains("implies")
}
"empirical_support" => {
output_lower.contains("evidence") || output_lower.contains("data")
}
"practical_feasibility" => {
output_lower.contains("implement") || output_lower.contains("practice")
}
_ => output_lower.contains(&criterion.to_lowercase()),
}
}
fn validate_step_result(&self, step: &ReasoningStep, result: &StepResult) -> bool {
step.validation_criteria
.iter()
.all(|criterion| self.check_criterion(criterion, &result.output))
&& result.confidence >= step.validation_threshold
}
fn calculate_chain_confidence(
&self,
chain: &ReasoningChain,
step_results: &[StepResult],
) -> f64 {
let weights: Vec<f64> = chain
.steps
.iter()
.map(|step| step.confidence_weight.unwrap_or(1.0))
.collect();
let total_weight: f64 = weights.iter().sum();
step_results
.iter()
.enumerate()
.map(|(i, result)| result.confidence * weights[i])
.sum::<f64>()
/ total_weight
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StepResult {
pub step_id: String,
pub output: String,
pub confidence: f64,
pub validation_passed: bool,
pub execution_time: Duration,
pub metadata: HashMap<String, serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChainResult {
pub chain_id: String,
pub step_results: Vec<StepResult>,
pub overall_confidence: f64,
pub success: bool,
pub total_execution_time: Duration,
pub output: HashMap<String, serde_json::Value>,
}
impl ReasoningStep {
#[must_use]
pub fn new(step_id: String, description: String, reasoning_type: ReasoningType) -> Self {
Self {
step_id,
description,
reasoning_type,
input_requirements: Vec::new(),
output_expectations: Vec::new(),
validation_criteria: Vec::new(),
prompt_template: String::new(),
validation_threshold: 0.7,
confidence_weight: Some(1.0),
timeout: Duration::from_secs(30),
}
}
#[must_use]
pub fn with_validation_criteria(mut self, criteria: Vec<String>) -> Self {
self.validation_criteria = criteria;
self
}
#[must_use]
pub fn with_threshold(mut self, threshold: f64) -> Self {
self.validation_threshold = threshold;
self
}
#[must_use]
pub fn with_timeout(mut self, timeout: Duration) -> Self {
self.timeout = timeout;
self
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileOutput {
pub name: String,
pub content: String,
}
pub async fn generate_validated_template(context: &CrateContext) -> Result<String> {
let mut files = Vec::new();
let lib_content = LIB_TEMPLATE
.replace("{{description}}", &context.description)
.replace("{{crate_name}}", &context.crate_name);
if lib_content.len() < 50 {
return Err(anyhow::anyhow!("Generated lib.rs content too short"));
}
files.push(FileOutput {
name: "lib.rs".to_string(),
content: lib_content,
});
if context.features.contains(&"binary".to_string()) {
let main_content = MAIN_TEMPLATE
.replace("{{description}}", &context.description)
.replace("{{crate_name}}", &context.crate_name);
files.push(FileOutput {
name: "main.rs".to_string(),
content: main_content,
});
}
debug!(
"Generated {} validated files for crate: {}",
files.len(),
context.crate_name
);
Ok(serde_json::to_string_pretty(&files)?)
}
pub struct TemplateBuilder {
context: CrateContext,
openai_provider: Option<OpenAIProvider>,
validation_enabled: bool,
}
impl TemplateBuilder {
#[must_use]
pub fn new(name: String, description: String) -> Self {
Self {
context: CrateContext {
crate_name: name,
description,
version: "0.1.0".to_string(),
features: Vec::new(),
dependencies: Vec::new(),
generated_files: HashMap::new(),
output_path: PathBuf::new(),
metadata: HashMap::new(),
stage_outputs: HashMap::new(),
},
openai_provider: None,
validation_enabled: true,
}
}
#[must_use]
pub fn with_features(mut self, features: Vec<String>) -> Self {
self.context.features = features;
self
}
#[must_use]
pub fn with_output_path(mut self, path: PathBuf) -> Self {
self.context.output_path = path;
self
}
#[must_use]
pub fn with_ai_provider(mut self, provider: OpenAIProvider) -> Self {
self.openai_provider = Some(provider);
self
}
#[must_use]
pub fn with_validation(mut self, enabled: bool) -> Self {
self.validation_enabled = enabled;
self
}
pub async fn build(self) -> Result<HashMap<String, String>> {
let manager = TemplateManager::new().await?;
manager
.render_crate(&self.context, &self.context.output_path)
.await
}
}
pub mod utils {
use super::{Context, Handlebars, Regex, Result, Rng};
pub fn validate_template_syntax(template: &str) -> Result<()> {
let mut handlebars = Handlebars::new();
handlebars
.register_template_string("test", template)
.context("Invalid template syntax")?;
Ok(())
}
#[must_use]
pub fn extract_template_variables(template: &str) -> Vec<String> {
let re = Regex::new(r"\{\{(\w+)\}\}").unwrap();
re.captures_iter(template)
.filter_map(|cap| cap.get(1))
.map(|m| m.as_str().to_string())
.collect()
}
#[must_use]
pub fn sanitize_template_input(input: &str) -> String {
input
.chars()
.filter(|c| c.is_alphanumeric() || c.is_whitespace() || "-_".contains(*c))
.collect()
}
pub fn validate_crate_name(name: &str) -> Result<()> {
if name.is_empty() {
return Err(anyhow::anyhow!("Crate name cannot be empty"));
}
if !name
.chars()
.all(|c| c.is_alphanumeric() || c == '_' || c == '-')
{
return Err(anyhow::anyhow!("Crate name contains invalid characters"));
}
if name.starts_with('-') || name.ends_with('-') {
return Err(anyhow::anyhow!(
"Crate name cannot start or end with hyphen"
));
}
Ok(())
}
#[must_use]
pub fn generate_random_crate_name() -> String {
let adjectives = ["fast", "smart", "clever", "quick", "bright", "cool", "neat"];
let nouns = ["tool", "lib", "crate", "util", "helper", "kit", "box"];
let mut rng = rand::thread_rng();
format!(
"{}-{}-{}",
adjectives[rng.gen_range(0..adjectives.len())],
nouns[rng.gen_range(0..nouns.len())],
rng.gen_range(100..999)
)
}
}