rsclaw 0.0.1-alpha.1

rsclaw: High-performance AI agent (BETA). Optimized for M4 Max and 2GB VPS. 100% compatible with openclaw
Documentation
use super::context::AgentContext;
use super::memory::MemoryTracker;
use crate::provider::{ChatRequest, LlmProvider};
use crate::skill::{ShellRunner, SkillLoader};
use anyhow::Result;
use serde_json::Value;
use std::sync::Arc;
use tokio::sync::RwLock;

/// Agent configuration.
#[derive(Debug, Clone)]
pub struct AgentConfig {
    pub name: Arc<str>,
    pub model: Arc<str>,
    pub system_prompt: Arc<str>,
    pub max_tokens: u32,
    pub memory_limit_mb: u32,
}

impl Default for AgentConfig {
    fn default() -> Self {
        Self {
            name: Arc::from("default"),
            model: Arc::from("gpt-4"),
            system_prompt: Arc::from("You are a helpful assistant."),
            max_tokens: 4096,
            memory_limit_mb: 512,
        }
    }
}

/// Agent state.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AgentState {
    Idle,
    Running,
    Error,
}

/// Agent runtime.
pub struct AgentRuntime {
    pub config: AgentConfig,
    pub context: Arc<RwLock<AgentContext>>,
    pub state: Arc<RwLock<AgentState>>,
    pub memory: MemoryTracker,
}

impl AgentRuntime {
    /// Create a new agent runtime.
    pub fn new(config: AgentConfig) -> Self {
        let context = AgentContext::new(
            config.system_prompt.clone(),
            100,
            config.max_tokens as usize,
        );

        Self {
            config,
            context: Arc::new(RwLock::new(context)),
            state: Arc::new(RwLock::new(AgentState::Idle)),
            memory: MemoryTracker::default(),
        }
    }

    /// Get agent name.
    pub fn name(&self) -> &str {
        &self.config.name
    }

    /// Get current state.
    pub async fn state(&self) -> AgentState {
        self.state.read().await.clone()
    }

    /// Send message and get response.
    pub async fn chat(
        &self,
        message: &str,
        provider: Arc<dyn LlmProvider>,
    ) -> Result<String> {
        {
            let mut state = self.state.write().await;
            *state = AgentState::Running;
        }

        let result = self.do_chat(message, provider).await;

        {
            let mut state = self.state.write().await;
            *state = match &result {
                Ok(_) => AgentState::Idle,
                Err(_) => AgentState::Error,
            };
        }

        result
    }

    async fn do_chat(
        &self,
        message: &str,
        provider: Arc<dyn LlmProvider>,
    ) -> Result<String> {
        {
            let mut ctx = self.context.write().await;
            ctx.add_user_message(message);
        }

        let messages = {
            let ctx = self.context.read().await;
            ctx.get_messages()
        };

        let request = ChatRequest {
            model: self.config.model.to_string(),
            messages,
            max_tokens: Some(self.config.max_tokens),
            temperature: Some(0.7),
            tools: None,
        };

        let response = provider.chat(request).await?;

        {
            let mut ctx = self.context.write().await;
            ctx.add_assistant_message(&response.content);
        }

        Ok(response.content)
    }

    /// Execute a skill.
    pub async fn execute_skill(
        &self,
        skill_name: &str,
        args: &[String],
        skills_dir: &std::path::Path,
    ) -> Result<String> {
        let mut loader = SkillLoader::new(skills_dir.to_path_buf());
        loader.load_all()?;

        let skill = loader.get(skill_name)
            .ok_or_else(|| anyhow::anyhow!("Skill '{}' not found", skill_name))?;

        let runner = ShellRunner::default();
        let result = runner.run(&skill.path, args).await?;

        if result.success {
            Ok(result.stdout.to_string())
        } else {
            anyhow::bail!("Skill execution failed: {}", result.stderr);
        }
    }

    /// Clear context.
    pub async fn clear_context(&self) {
        let mut ctx = self.context.write().await;
        ctx.clear();
    }

    /// Get context message count.
    pub async fn message_count(&self) -> usize {
        let ctx = self.context.read().await;
        ctx.message_count()
    }
}

impl Drop for AgentRuntime {
    fn drop(&mut self) {
        tracing::info!("Agent '{}' dropped, memory released", self.config.name);
    }
}