openserve 2.0.3

A modern, high-performance, AI-enhanced file server built in Rust
Documentation
//! # OpenServe Library
//!
//! This is the core library for OpenServe, an AI-enhanced, cloud-native
//! file server. It encapsulates all the core functionalities, including
//! the server, services, models, and utilities. By organizing the project
//! as a library, we ensure modularity, reusability, and testability.
//!
//! ## Modules
//!
//! The library is structured into the following modules:
//!
//! - `ai`:         Handles all AI-related features, including integration with OpenAI.
//! - `config`:     Defines the configuration structures and loading mechanisms.
//! - `handlers`:   Contains all Axum HTTP request handlers.
//! - `middleware`: Provides custom Axum middleware for logging, auth, etc.
//! - `models`:     Defines the data models and structures for the application.
//! - `server`:     Contains the main server implementation and setup.
//! - `services`:   Implements the business logic for file operations, search, etc.
//! - `telemetry`:  Configures and initializes logging, metrics, and tracing.
//! - `utils`:      Provides utility functions used across the application.
//!
#![warn(missing_docs)]
#![warn(rustdoc::missing_crate_level_docs)]

pub mod ai;
pub mod config;
pub mod error;
pub mod handlers;
pub mod middleware;
pub mod models;
pub mod server;
pub mod services;
pub mod telemetry;
pub mod utils;

use clap::Parser;

/// Command-line arguments for OpenServe server.
#[derive(Parser, Debug)]
#[command(
    name = "openserve",
    version = env!("CARGO_PKG_VERSION"),
    author = "OpenServe Team <team@openserve.io>",
    about = "An AI-powered, cloud-native file server with intelligent features.",
    long_about = "OpenServe is a modern, high-performance file server built in Rust. \
                 It provides a flexible and scalable platform for serving files with \
                 advanced capabilities like AI-powered search, content analysis, and more."
)]
pub struct Args {
    /// Path to the directory to be served.
    #[arg(default_value = ".", env = "OPENSERVE_PATH")]
    pub path: String,

    /// Port number for the server to listen on.
    #[arg(short, long, default_value = "5000", env = "OPENSERVE_PORT")]
    pub port: u16,

    /// Host address to bind the server to.
    #[arg(short = 'b', long, default_value = "0.0.0.0", env = "OPENSERVE_HOST")]
    pub host: String,

    /// Enable or disable AI-powered features.
    #[arg(long, env = "OPENSERVE_AI_ENABLED")]
    pub ai: bool,

    /// API key for OpenAI services.
    #[arg(long, env = "OPENAI_API_KEY", hide = true)]
    pub openai_api_key: Option<String>,

    /// Path to a custom configuration file.
    #[arg(short, long, env = "OPENSERVE_CONFIG")]
    pub config: Option<String>,

    /// Sets the logging level for the application.
    #[arg(long, default_value = "info", env = "OPENSERVE_LOG_LEVEL")]
    pub log_level: String,

    /// Enable Transport Layer Security (TLS) for HTTPS.
    #[arg(long, env = "OPENSERVE_TLS_ENABLED")]
    pub tls: bool,

    /// Path to the TLS certificate file (e.g., cert.pem).
    #[arg(long, requires = "tls", env = "OPENSERVE_TLS_CERT")]
    pub tls_cert: Option<String>,

    /// Path to the TLS private key file (e.g., key.pem).
    #[arg(long, requires = "tls", env = "OPENSERVE_TLS_KEY")]
    pub tls_key: Option<String>,
}

/// The main configuration for the OpenServe application.
pub use config::Config;
/// The core server implementation.
pub use server::Server;

/// The current version of the OpenServe application, read from `Cargo.toml`.
pub const VERSION: &str = env!("CARGO_PKG_VERSION");

/// A prelude module for easy importing of common types.
///
/// This module re-exports the most commonly used types from the library,
/// making it convenient for users to import them all at once.
pub mod prelude {
    pub use crate::{
        config::{Config, ServerConfig, AiConfig, AuthConfig, SearchConfig, TelemetryConfig},
        server::Server,
        models::{
            self,
            file::{File, Directory, FileMetadata, UploadResponse},
            ai::{AnalysisRequest, AnalysisResult, ChatRequest, ChatResponse},
            auth::{Claims, AuthToken, User},
            search::{SearchRequest, SearchResult},
        },
        services::{FileService, SearchService, AuthService},
        ai::AiService,
    };
}