openserve 2.0.3

A modern, high-performance, AI-enhanced file server built in Rust
Documentation
//! AI-related data models.

use serde::{Deserialize, Serialize};

/// Request for AI content analysis
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnalysisRequest {
    /// Path of the file to analyze
    pub file_path: String,
    /// Type of analysis to perform
    pub analysis_type: AnalysisType,
    /// Optional parameters for the analysis
    pub options: Option<AnalysisOptions>,
}

/// Result of AI content analysis
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnalysisResult {
    /// Original file path that was analyzed
    pub file_path: String,
    /// Generated summary of the content
    pub summary: Option<String>,
    /// Extracted tags from the content
    pub tags: Vec<String>,
    /// Detected language of the content
    pub language: Option<String>,
    /// Confidence score for the analysis
    pub confidence: f32,
}

/// Type of AI analysis to perform
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum AnalysisType {
    /// Generate a summary of the content
    Summary,
    /// Extract relevant tags from the content
    TagExtraction,
    /// Detect the language of the content
    LanguageDetection,
    /// Perform all available analysis types
    Complete,
}

/// Options for AI analysis
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnalysisOptions {
    /// Maximum length of the summary
    pub max_summary_length: Option<usize>,
    /// Maximum number of tags to extract
    pub max_tags: Option<usize>,
    /// Whether to include confidence scores
    pub include_confidence: bool,
}

impl Default for AnalysisOptions {
    fn default() -> Self {
        Self {
            max_summary_length: Some(200),
            max_tags: Some(10),
            include_confidence: true,
        }
    }
}

/// Chat request to AI service
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatRequest {
    /// User message to send to the AI
    pub message: String,
    /// Optional context from files
    pub context: Option<String>,
    /// Maximum length of the response
    pub max_response_length: Option<usize>,
}

/// Response from AI chat service
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatResponse {
    /// AI-generated response
    pub response: String,
    /// Confidence score for the response
    pub confidence: f32,
    /// Number of tokens used in the request
    pub tokens_used: Option<u32>,
}