llm_link/api/
mod.rs

1pub mod openai;
2pub mod ollama;
3pub mod anthropic;
4pub mod convert;
5
6use crate::settings::Settings;
7use crate::service::Service as LlmService;
8use axum::response::Json;
9use serde_json::json;
10use std::sync::Arc;
11
12/// 应用状态
13#[derive(Clone)]
14pub struct AppState {
15    pub llm_service: Arc<LlmService>,
16    pub config: Arc<Settings>,
17}
18
19impl AppState {
20    pub fn new(llm_service: LlmService, config: Settings) -> Self {
21        Self {
22            llm_service: Arc::new(llm_service),
23            config: Arc::new(config),
24        }
25    }
26}
27
28/// 健康检查端点
29pub async fn health_check() -> Json<serde_json::Value> {
30    Json(json!({
31        "status": "ok",
32        "service": "llm-link",
33        "version": "0.1.0"
34    }))
35}
36
37/// 调试测试端点
38pub async fn debug_test() -> Json<serde_json::Value> {
39    Json(json!({
40        "debug": "test",
41        "timestamp": "2025-10-15T16:00:00Z"
42    }))
43}