pub struct ModelDeploymentRepository {
pub base: BaseRepository<ModelDeployment>,
}Expand description
模型部署Repository
Fields§
§base: BaseRepository<ModelDeployment>Implementations§
Source§impl ModelDeploymentRepository
impl ModelDeploymentRepository
pub fn new(query_executor: Box<dyn QueryExecutor>) -> Self
Sourcepub async fn find_by_model_id(
&self,
model_id: Uuid,
context: &QueryContext,
) -> DatabaseResult<Vec<ModelDeployment>>
pub async fn find_by_model_id( &self, model_id: Uuid, context: &QueryContext, ) -> DatabaseResult<Vec<ModelDeployment>>
根据模型ID查找部署
Sourcepub async fn find_by_status(
&self,
status: DeploymentStatus,
context: &QueryContext,
) -> DatabaseResult<Vec<ModelDeployment>>
pub async fn find_by_status( &self, status: DeploymentStatus, context: &QueryContext, ) -> DatabaseResult<Vec<ModelDeployment>>
根据状态查找部署
Sourcepub async fn find_by_port(
&self,
port: u16,
context: &QueryContext,
) -> DatabaseResult<Option<ModelDeployment>>
pub async fn find_by_port( &self, port: u16, context: &QueryContext, ) -> DatabaseResult<Option<ModelDeployment>>
根据端口查找部署
Sourcepub async fn get_running_deployments(
&self,
context: &QueryContext,
) -> DatabaseResult<Vec<ModelDeployment>>
pub async fn get_running_deployments( &self, context: &QueryContext, ) -> DatabaseResult<Vec<ModelDeployment>>
获取运行中的部署
Examples found in repository?
examples/burncloud_usage.rs (line 132)
8async fn main() -> Result<(), Box<dyn std::error::Error>> {
9 println!("🚀 BurnCloud Database Example");
10
11 // 1. 创建数据库连接
12 let db = BurnCloudDatabaseBuilder::new()
13 .with_postgres("localhost", 5432, "burncloud", "postgres", "password")
14 .with_pool_size(10)
15 .with_timeout(30)
16 .build_and_initialize()
17 .await?;
18
19 println!("✅ Database connected and initialized");
20
21 // 2. 健康检查
22 db.health_check().await?;
23
24 // 3. 添加示例AI模型
25 let context = QueryContext::default();
26
27 let model = AiModel {
28 id: Uuid::new_v4(),
29 name: "Qwen2.5-7B-Chat".to_string(),
30 version: "v1.0".to_string(),
31 size_gb: 4.1,
32 model_type: ModelType::ChatCompletion,
33 provider: "Alibaba".to_string(),
34 description: Some("7B参数的对话模型".to_string()),
35 tags: vec!["chat".to_string(), "chinese".to_string()],
36 capabilities: vec!["text-generation".to_string(), "conversation".to_string()],
37 requirements: ModelRequirements {
38 min_ram_gb: 8.0,
39 min_vram_gb: Some(6.0),
40 gpu_required: true,
41 cpu_cores: 4,
42 disk_space_gb: 5.0,
43 supported_platforms: vec!["linux".to_string(), "windows".to_string()],
44 },
45 status: ModelStatus::Downloaded,
46 download_url: Some("https://example.com/qwen2.5-7b".to_string()),
47 checksum: Some("abc123".to_string()),
48 created_at: chrono::Utc::now(),
49 updated_at: chrono::Utc::now(),
50 };
51
52 let model_id = db.ai_models.base.create(&model, &context).await?;
53 println!("✅ AI模型已添加: {}", model_id);
54
55 // 4. 创建模型部署
56 let deployment = ModelDeployment {
57 id: Uuid::new_v4(),
58 model_id: model.id,
59 name: "qwen2.5-chat-prod".to_string(),
60 port: 8001,
61 bind_address: "127.0.0.1".to_string(),
62 api_key: "sk-burncloud-123456".to_string(),
63 max_concurrent: 4,
64 config: DeploymentConfig {
65 auto_start: true,
66 restart_on_failure: true,
67 max_restart_count: 3,
68 health_check_interval: 30,
69 timeout_seconds: 60,
70 log_level: LogLevel::Info,
71 custom_args: std::collections::HashMap::new(),
72 },
73 resource_config: ResourceConfig {
74 context_length: 4096,
75 temperature: 0.7,
76 top_p: 0.9,
77 top_k: 50,
78 max_tokens: 2048,
79 gpu_layers: Some(32),
80 threads: Some(8),
81 batch_size: 1,
82 },
83 status: DeploymentStatus::Running,
84 pid: Some(12345),
85 started_at: Some(chrono::Utc::now()),
86 stopped_at: None,
87 created_at: chrono::Utc::now(),
88 updated_at: chrono::Utc::now(),
89 };
90
91 let deployment_id = db.deployments.base.create(&deployment, &context).await?;
92 println!("✅ 模型部署已创建: {}", deployment_id);
93
94 // 5. 记录系统指标
95 let system_metrics = SystemMetrics {
96 id: Uuid::new_v4(),
97 timestamp: chrono::Utc::now(),
98 cpu_usage: 45.2,
99 memory_usage: 78.5,
100 memory_total: 16_000_000_000, // 16GB
101 disk_usage: 65.0,
102 disk_total: 500_000_000_000, // 500GB
103 gpu_usage: Some(32.1),
104 gpu_memory_usage: Some(60.5),
105 network_rx: 1024 * 1024, // 1MB
106 network_tx: 512 * 1024, // 512KB
107 };
108
109 db.system_metrics.base.create(&system_metrics, &context).await?;
110 println!("✅ 系统指标已记录");
111
112 // 6. 记录模型性能指标
113 let model_metrics = ModelMetrics {
114 id: Uuid::new_v4(),
115 deployment_id: deployment.id,
116 timestamp: chrono::Utc::now(),
117 request_count: 142,
118 error_count: 2,
119 average_response_time: 1.2,
120 tokens_per_second: 45.3,
121 concurrent_requests: 3,
122 queue_length: 0,
123 memory_usage: 85.2,
124 };
125
126 db.model_metrics.base.create(&model_metrics, &context).await?;
127 println!("✅ 模型性能指标已记录");
128
129 // 7. 查询示例
130
131 // 查找运行中的部署
132 let running_deployments = db.deployments.get_running_deployments(&context).await?;
133 println!("🔍 运行中的部署数量: {}", running_deployments.len());
134
135 // 查找聊天模型
136 let chat_models = db.ai_models.find_by_type(ModelType::ChatCompletion, &context).await?;
137 println!("🔍 聊天模型数量: {}", chat_models.len());
138
139 // 搜索模型
140 let search_results = db.ai_models.search("Qwen", &context).await?;
141 println!("🔍 搜索'Qwen'的结果: {}", search_results.len());
142
143 // 获取最新系统指标
144 let latest_metrics = db.system_metrics.get_latest(&context).await?;
145 if let Some(metrics) = latest_metrics {
146 println!("📊 当前CPU使用率: {:.1}%", metrics.cpu_usage);
147 println!("📊 当前内存使用率: {:.1}%", metrics.memory_usage);
148 }
149
150 // 8. 获取数据库统计信息
151 let stats = db.get_database_stats().await?;
152 println!("\n📈 数据库统计:");
153 println!(" 模型数量: {}", stats.models_count);
154 println!(" 部署数量: {}", stats.deployments_count);
155 println!(" 指标记录数: {}", stats.metrics_count);
156 println!(" 连接池大小: {}", stats.pool_size);
157
158 // 9. 创建用户设置
159 let user_settings = UserSettings {
160 id: Uuid::new_v4(),
161 user_id: "admin".to_string(),
162 theme: Theme::Dark,
163 language: "zh-CN".to_string(),
164 font_size: FontSize::Medium,
165 auto_refresh_interval: 30,
166 notifications_enabled: true,
167 notification_types: vec![
168 NotificationType::ModelStarted,
169 NotificationType::ModelStopped,
170 NotificationType::ModelError,
171 ],
172 created_at: chrono::Utc::now(),
173 updated_at: chrono::Utc::now(),
174 };
175
176 db.user_settings.base.create(&user_settings, &context).await?;
177 println!("✅ 用户设置已保存");
178
179 // 10. 清理旧数据 (保留30天)
180 let cleanup_stats = db.cleanup_old_data(30).await?;
181 println!("\n🧹 数据清理完成:");
182 println!(" 删除指标记录: {}", cleanup_stats.metrics_deleted);
183 println!(" 删除日志记录: {}", cleanup_stats.logs_deleted);
184
185 // 11. 备份配置
186 let backup_config = db.backup_config().await?;
187 println!("\n💾 配置已备份:");
188 println!(" 模型数量: {}", backup_config.models.len());
189 println!(" 部署数量: {}", backup_config.deployments.len());
190 println!(" 备份时间: {}", backup_config.backup_time.format("%Y-%m-%d %H:%M:%S"));
191
192 println!("\n🎉 BurnCloud数据库示例运行完成!");
193
194 Ok(())
195}
196
197/// 演示高级查询功能
198async fn advanced_query_examples(db: &BurnCloudDatabase) -> Result<(), Box<dyn std::error::Error>> {
199 let context = QueryContext::default();
200
201 // 时间范围查询
202 let start_time = chrono::Utc::now() - chrono::Duration::hours(24);
203 let end_time = chrono::Utc::now();
204
205 // 获取过去24小时的系统指标
206 let recent_metrics = db.system_metrics.find_by_time_range(start_time, end_time, &context).await?;
207 println!("📊 过去24小时系统指标: {} 条记录", recent_metrics.len());
208
209 // 获取特定部署的性能指标
210 if let Some(deployment) = db.deployments.get_running_deployments(&context).await?.first() {
211 let deployment_metrics = db.model_metrics
212 .find_by_deployment_id(deployment.id, start_time, end_time, &context)
213 .await?;
214 println!("📈 部署 {} 的性能指标: {} 条记录", deployment.name, deployment_metrics.len());
215
216 // 获取该部署的错误日志
217 let error_logs = db.request_logs.find_errors(start_time, end_time, &context).await?;
218 println!("❌ 过去24小时错误日志: {} 条记录", error_logs.len());
219 }
220
221 Ok(())
222}Auto Trait Implementations§
impl Freeze for ModelDeploymentRepository
impl !RefUnwindSafe for ModelDeploymentRepository
impl Send for ModelDeploymentRepository
impl Sync for ModelDeploymentRepository
impl Unpin for ModelDeploymentRepository
impl !UnwindSafe for ModelDeploymentRepository
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read more