feagi_api/endpoints/
burst_engine.rs

1// Copyright 2025 Neuraville Inc.
2// Licensed under the Apache License, Version 2.0
3
4//! Burst Engine API Endpoints - Exact port from Python `/v1/burst_engine/*`
5
6// Removed - using crate::common::State instead
7use crate::common::ApiState;
8use crate::common::{ApiError, ApiResult, Json, Path, Query, State};
9use std::collections::HashMap;
10
11/// Get the current simulation timestep in seconds.
12#[utoipa::path(
13    get,
14    path = "/v1/burst_engine/simulation_timestep",
15    tag = "burst_engine"
16)]
17pub async fn get_simulation_timestep(State(state): State<ApiState>) -> ApiResult<Json<f64>> {
18    let runtime_service = state.runtime_service.as_ref();
19    match runtime_service.get_status().await {
20        Ok(status) => {
21            // Convert frequency to timestep (1/Hz = seconds)
22            let timestep = if status.frequency_hz > 0.0 {
23                1.0 / status.frequency_hz
24            } else {
25                0.0
26            };
27            Ok(Json(timestep))
28        }
29        Err(e) => Err(ApiError::internal(format!("Failed to get timestep: {}", e))),
30    }
31}
32
33/// Set the simulation timestep in seconds (converted to burst frequency).
34#[utoipa::path(
35    post,
36    path = "/v1/burst_engine/simulation_timestep",
37    tag = "burst_engine"
38)]
39pub async fn post_simulation_timestep(
40    State(state): State<ApiState>,
41    Json(request): Json<HashMap<String, f64>>,
42) -> ApiResult<Json<HashMap<String, String>>> {
43    let runtime_service = state.runtime_service.as_ref();
44
45    if let Some(&timestep) = request.get("simulation_timestep") {
46        // Convert timestep (seconds) to frequency (Hz)
47        let frequency = if timestep > 0.0 { 1.0 / timestep } else { 0.0 };
48
49        match runtime_service.set_frequency(frequency).await {
50            Ok(_) => Ok(Json(HashMap::from([(
51                "message".to_string(),
52                format!("Timestep set to {}", timestep),
53            )]))),
54            Err(e) => Err(ApiError::internal(format!("Failed to set timestep: {}", e))),
55        }
56    } else {
57        Err(ApiError::invalid_input("simulation_timestep required"))
58    }
59}
60
61// ============================================================================
62// FCL (Fire Candidate List) ENDPOINTS
63// ============================================================================
64
65/// Get the Fire Candidate List (FCL) content showing neurons ready to fire.
66#[utoipa::path(
67    get,
68    path = "/v1/burst_engine/fcl",
69    tag = "burst_engine",
70    responses(
71        (status = 200, description = "FCL content", body = HashMap<String, serde_json::Value>),
72        (status = 500, description = "Internal server error")
73    )
74)]
75pub async fn get_fcl(
76    State(state): State<ApiState>,
77) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
78    use std::collections::BTreeMap;
79    use tracing::debug;
80
81    let runtime_service = state.runtime_service.as_ref();
82    let connectome_service = state.connectome_service.as_ref();
83
84    // CRITICAL FIX: Get FCL snapshot WITH cortical_idx from NPU (not extracted from neuron_id bits!)
85    // Old code was doing (neuron_id >> 32) which is WRONG - neuron_id is u32, not packed!
86    let fcl_data = runtime_service
87        .get_fcl_snapshot_with_cortical_idx()
88        .await
89        .map_err(|e| ApiError::internal(format!("Failed to get FCL snapshot: {}", e)))?;
90
91    // Get burst count for timestep
92    let timestep = runtime_service
93        .get_burst_count()
94        .await
95        .map_err(|e| ApiError::internal(format!("Failed to get burst count: {}", e)))?;
96
97    // Get all cortical areas to map cortical_idx -> cortical_id
98    let areas = connectome_service
99        .list_cortical_areas()
100        .await
101        .map_err(|e| ApiError::internal(format!("Failed to list cortical areas: {}", e)))?;
102
103    // Build cortical_idx -> cortical_id mapping
104    let mut idx_to_id: HashMap<u32, String> = HashMap::new();
105    for area in &areas {
106        idx_to_id.insert(area.cortical_idx, area.cortical_id.clone());
107    }
108
109    // Group FCL neurons by cortical area
110    // Use BTreeMap for consistent ordering in JSON output
111    let mut cortical_areas: BTreeMap<String, Vec<u64>> = BTreeMap::new();
112
113    for (neuron_id, cortical_idx, _potential) in &fcl_data {
114        // Map cortical_idx to cortical_id using actual stored values
115        let cortical_id = idx_to_id
116            .get(cortical_idx)
117            .cloned()
118            .unwrap_or_else(|| format!("area_{}", cortical_idx));
119
120        cortical_areas
121            .entry(cortical_id)
122            .or_default()
123            .push(*neuron_id);
124    }
125
126    // Limit to first 20 neuron IDs per area (matching Python behavior for network efficiency)
127    for neuron_list in cortical_areas.values_mut() {
128        neuron_list.truncate(20);
129    }
130
131    let active_cortical_count = cortical_areas.len();
132    let total_neurons: usize = cortical_areas.values().map(|v| v.len()).sum();
133
134    // Build response (NO global_fcl per user request)
135    let mut response = HashMap::new();
136    response.insert("timestep".to_string(), serde_json::json!(timestep));
137    response.insert(
138        "total_neurons".to_string(),
139        serde_json::json!(total_neurons),
140    );
141    response.insert(
142        "cortical_areas".to_string(),
143        serde_json::json!(cortical_areas),
144    );
145    response.insert("default_window_size".to_string(), serde_json::json!(20));
146    response.insert(
147        "active_cortical_count".to_string(),
148        serde_json::json!(active_cortical_count),
149    );
150
151    debug!(target: "feagi-api", "GET /fcl - {} neurons across {} cortical areas (limited to 20/area)",
152           total_neurons, active_cortical_count);
153
154    Ok(Json(response))
155}
156
157/// Get the Fire Queue (FQ) showing neurons that actually fired this timestep.
158#[utoipa::path(
159    get,
160    path = "/v1/burst_engine/fire_queue",
161    tag = "burst_engine",
162    responses(
163        (status = 200, description = "Fire queue content", body = HashMap<String, serde_json::Value>),
164        (status = 500, description = "Internal server error")
165    )
166)]
167pub async fn get_fire_queue(
168    State(state): State<ApiState>,
169) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
170    use tracing::debug;
171
172    let runtime_service = state.runtime_service.as_ref();
173    let connectome_service = state.connectome_service.as_ref();
174
175    // Get Fire Queue sample from RuntimeService
176    let fq_sample = runtime_service
177        .get_fire_queue_sample()
178        .await
179        .map_err(|e| ApiError::internal(format!("Failed to get fire queue: {}", e)))?;
180
181    // Get burst count for timestep
182    let timestep = runtime_service
183        .get_burst_count()
184        .await
185        .map_err(|e| ApiError::internal(format!("Failed to get burst count: {}", e)))?;
186
187    // CRITICAL FIX: Build cortical_idx -> cortical_id mapping from ConnectomeService
188    // This uses the actual stored cortical_idx values instead of fabricating names
189    let areas = connectome_service
190        .list_cortical_areas()
191        .await
192        .map_err(|e| ApiError::internal(format!("Failed to list cortical areas: {}", e)))?;
193
194    let idx_to_id: HashMap<u32, String> = areas
195        .iter()
196        .map(|a| (a.cortical_idx, a.cortical_id.clone()))
197        .collect();
198
199    // Convert cortical_idx to cortical_id
200    let mut cortical_areas: HashMap<String, Vec<u64>> = HashMap::new();
201    let mut total_fired = 0;
202
203    for (cortical_idx, (neuron_ids, _, _, _, _)) in fq_sample {
204        // Use actual cortical_id from mapping, fallback to area_{idx} if not found
205        let cortical_id = idx_to_id
206            .get(&cortical_idx)
207            .cloned()
208            .unwrap_or_else(|| format!("area_{}", cortical_idx));
209
210        let ids_u64: Vec<u64> = neuron_ids.iter().map(|&id| id as u64).collect();
211        total_fired += ids_u64.len();
212        cortical_areas.insert(cortical_id, ids_u64);
213    }
214
215    let mut response = HashMap::new();
216    response.insert("timestep".to_string(), serde_json::json!(timestep));
217    response.insert("total_fired".to_string(), serde_json::json!(total_fired));
218    response.insert(
219        "cortical_areas".to_string(),
220        serde_json::json!(cortical_areas),
221    );
222
223    debug!(target: "feagi-api", "GET /fire_queue - returned {} fired neurons", total_fired);
224
225    Ok(Json(response))
226}
227
228/// Reset the Fire Candidate List, clearing all pending fire candidates.
229#[utoipa::path(
230    post,
231    path = "/v1/burst_engine/fcl_reset",
232    tag = "burst_engine",
233    responses(
234        (status = 200, description = "FCL reset successfully", body = HashMap<String, String>),
235        (status = 500, description = "Internal server error")
236    )
237)]
238pub async fn post_fcl_reset(
239    State(_state): State<ApiState>,
240) -> ApiResult<Json<HashMap<String, String>>> {
241    use tracing::info;
242
243    // TODO: Reset FCL in BurstLoopRunner/NPU
244    info!(target: "feagi-api", "FCL reset requested (implementation pending)");
245
246    Ok(Json(HashMap::from([(
247        "message".to_string(),
248        "Fire Candidate List reset successfully".to_string(),
249    )])))
250}
251
252/// Get Fire Candidate List sampler status including frequency and consumer state.
253#[utoipa::path(
254    get,
255    path = "/v1/burst_engine/fcl_status",
256    tag = "burst_engine",
257    responses(
258        (status = 200, description = "FCL status", body = HashMap<String, serde_json::Value>),
259        (status = 500, description = "Internal server error")
260    )
261)]
262pub async fn get_fcl_status(
263    State(_state): State<ApiState>,
264) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
265    use tracing::debug;
266
267    // TODO: Get FCL manager status
268    debug!(target: "feagi-api", "GET /fcl_status - returning stub (implementation pending)");
269
270    let mut response = HashMap::new();
271    response.insert("available".to_string(), serde_json::json!(false));
272    response.insert(
273        "error".to_string(),
274        serde_json::json!("FCL manager not yet implemented in Rust"),
275    );
276
277    Ok(Json(response))
278}
279
280// ============================================================================
281// FIRE LEDGER WINDOW SIZE ENDPOINTS
282// ============================================================================
283
284/// GET /v1/burst_engine/fire_ledger/default_window_size
285#[utoipa::path(
286    get,
287    path = "/v1/burst_engine/fire_ledger/default_window_size",
288    tag = "burst_engine",
289    responses(
290        (status = 200, description = "Default window size", body = i32),
291        (status = 500, description = "Internal server error")
292    )
293)]
294pub async fn get_fire_ledger_default_window_size(
295    State(_state): State<ApiState>,
296) -> ApiResult<Json<i32>> {
297    // Get default window size from Fire Ledger configuration
298    // TODO: Add get_default_window_size to RuntimeService
299    // For now, return standard default
300    Ok(Json(20))
301}
302
303/// Set the default fire history window size for all cortical areas.
304#[utoipa::path(
305    put,
306    path = "/v1/burst_engine/fire_ledger/default_window_size",
307    tag = "burst_engine",
308    responses(
309        (status = 200, description = "Window size updated", body = HashMap<String, serde_json::Value>),
310        (status = 500, description = "Internal server error")
311    )
312)]
313pub async fn put_fire_ledger_default_window_size(
314    State(_state): State<ApiState>,
315    Json(request): Json<HashMap<String, i32>>,
316) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
317    let window_size = request.get("window_size").copied().unwrap_or(20);
318
319    if window_size <= 0 {
320        return Err(ApiError::invalid_input("Window size must be positive"));
321    }
322
323    // TODO: Update default window size configuration
324    tracing::info!(target: "feagi-api", "Default Fire Ledger window size set to {}", window_size);
325
326    let mut response = HashMap::new();
327    response.insert("success".to_string(), serde_json::json!(true));
328    response.insert("window_size".to_string(), serde_json::json!(window_size));
329    response.insert(
330        "message".to_string(),
331        serde_json::json!(format!("Default window size set to {}", window_size)),
332    );
333
334    Ok(Json(response))
335}
336
337/// Get fire history window configuration for all cortical areas.
338#[utoipa::path(
339    get,
340    path = "/v1/burst_engine/fire_ledger/areas_window_config",
341    tag = "burst_engine",
342    responses(
343        (status = 200, description = "Areas window configuration", body = HashMap<String, serde_json::Value>),
344        (status = 500, description = "Internal server error")
345    )
346)]
347pub async fn get_fire_ledger_areas_window_config(
348    State(state): State<ApiState>,
349) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
350    let runtime_service = state.runtime_service.as_ref();
351    let connectome_service = state.connectome_service.as_ref();
352
353    // Get Fire Ledger configurations from RuntimeService
354    let configs = runtime_service
355        .get_fire_ledger_configs()
356        .await
357        .map_err(|e| ApiError::internal(format!("Failed to get fire ledger configs: {}", e)))?;
358
359    // CRITICAL FIX: Build cortical_idx -> cortical_id mapping from ConnectomeService
360    // This uses the actual stored cortical_idx values instead of fabricating names
361    let cortical_areas_list = connectome_service
362        .list_cortical_areas()
363        .await
364        .map_err(|e| ApiError::internal(format!("Failed to list cortical areas: {}", e)))?;
365
366    let idx_to_id: HashMap<u32, String> = cortical_areas_list
367        .iter()
368        .map(|a| (a.cortical_idx, a.cortical_id.clone()))
369        .collect();
370
371    // Convert to area_id -> window_size HashMap using actual cortical_id
372    let mut areas: HashMap<String, usize> = HashMap::new();
373    for (cortical_idx, window_size) in configs {
374        // Use actual cortical_id from mapping, fallback to area_{idx} if not found
375        let cortical_id = idx_to_id
376            .get(&cortical_idx)
377            .cloned()
378            .unwrap_or_else(|| format!("area_{}", cortical_idx));
379        areas.insert(cortical_id, window_size);
380    }
381
382    let mut response = HashMap::new();
383    response.insert("default_window_size".to_string(), serde_json::json!(20));
384    response.insert("areas".to_string(), serde_json::json!(areas));
385    response.insert(
386        "total_configured_areas".to_string(),
387        serde_json::json!(areas.len()),
388    );
389
390    Ok(Json(response))
391}
392
393// ============================================================================
394// BURST ENGINE CONTROL & STATUS ENDPOINTS
395// ============================================================================
396
397/// Get burst engine statistics including burst count, frequency, and performance metrics.
398#[utoipa::path(
399    get,
400    path = "/v1/burst_engine/stats",
401    tag = "burst_engine",
402    responses(
403        (status = 200, description = "Burst engine statistics", body = HashMap<String, serde_json::Value>),
404        (status = 500, description = "Internal server error")
405    )
406)]
407pub async fn get_stats(
408    State(state): State<ApiState>,
409) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
410    let runtime_service = state.runtime_service.as_ref();
411
412    match runtime_service.get_status().await {
413        Ok(status) => {
414            let mut response = HashMap::new();
415            response.insert(
416                "burst_count".to_string(),
417                serde_json::json!(status.burst_count),
418            );
419            response.insert(
420                "frequency_hz".to_string(),
421                serde_json::json!(status.frequency_hz),
422            );
423            response.insert("active".to_string(), serde_json::json!(status.is_running));
424            response.insert("paused".to_string(), serde_json::json!(status.is_paused));
425
426            Ok(Json(response))
427        }
428        Err(e) => Err(ApiError::internal(format!("Failed to get stats: {}", e))),
429    }
430}
431
432/// Get burst engine status including active state, pause state, and burst count.
433#[utoipa::path(
434    get,
435    path = "/v1/burst_engine/status",
436    tag = "burst_engine",
437    responses(
438        (status = 200, description = "Burst engine status", body = HashMap<String, serde_json::Value>),
439        (status = 500, description = "Internal server error")
440    )
441)]
442pub async fn get_status(
443    State(state): State<ApiState>,
444) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
445    let runtime_service = state.runtime_service.as_ref();
446
447    match runtime_service.get_status().await {
448        Ok(status) => {
449            let mut response = HashMap::new();
450            response.insert("active".to_string(), serde_json::json!(status.is_running));
451            response.insert("paused".to_string(), serde_json::json!(status.is_paused));
452            response.insert(
453                "burst_count".to_string(),
454                serde_json::json!(status.burst_count),
455            );
456            response.insert(
457                "frequency_hz".to_string(),
458                serde_json::json!(status.frequency_hz),
459            );
460
461            Ok(Json(response))
462        }
463        Err(e) => Err(ApiError::internal(format!("Failed to get status: {}", e))),
464    }
465}
466
467/// Control burst engine with actions: start, pause, stop, or resume.
468#[utoipa::path(
469    post,
470    path = "/v1/burst_engine/control",
471    tag = "burst_engine",
472    responses(
473        (status = 200, description = "Control command executed", body = HashMap<String, String>),
474        (status = 500, description = "Internal server error")
475    )
476)]
477pub async fn post_control(
478    State(state): State<ApiState>,
479    Json(request): Json<HashMap<String, String>>,
480) -> ApiResult<Json<HashMap<String, String>>> {
481    let runtime_service = state.runtime_service.as_ref();
482    let action = request.get("action").map(|s| s.as_str());
483
484    match action {
485        Some("start") | Some("resume") => {
486            runtime_service
487                .start()
488                .await
489                .map_err(|e| ApiError::internal(format!("Failed to start: {}", e)))?;
490            Ok(Json(HashMap::from([(
491                "message".to_string(),
492                "Burst engine started".to_string(),
493            )])))
494        }
495        Some("pause") => {
496            runtime_service
497                .pause()
498                .await
499                .map_err(|e| ApiError::internal(format!("Failed to pause: {}", e)))?;
500            Ok(Json(HashMap::from([(
501                "message".to_string(),
502                "Burst engine paused".to_string(),
503            )])))
504        }
505        Some("stop") => {
506            runtime_service
507                .stop()
508                .await
509                .map_err(|e| ApiError::internal(format!("Failed to stop: {}", e)))?;
510            Ok(Json(HashMap::from([(
511                "message".to_string(),
512                "Burst engine stopped".to_string(),
513            )])))
514        }
515        _ => Err(ApiError::invalid_input(
516            "Invalid action: must be 'start', 'pause', or 'stop'",
517        )),
518    }
519}
520
521// ============================================================================
522// FCL SAMPLER CONFIGURATION ENDPOINTS
523// ============================================================================
524
525/// Get FCL/FQ sampler configuration including frequency and consumer settings.
526#[utoipa::path(
527    get,
528    path = "/v1/burst_engine/fcl_sampler/config",
529    tag = "burst_engine",
530    responses(
531        (status = 200, description = "FCL sampler configuration", body = HashMap<String, serde_json::Value>),
532        (status = 500, description = "Internal server error")
533    )
534)]
535pub async fn get_fcl_sampler_config(
536    State(state): State<ApiState>,
537) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
538    let runtime_service = state.runtime_service.as_ref();
539
540    let (frequency, consumer) = runtime_service
541        .get_fcl_sampler_config()
542        .await
543        .map_err(|e| ApiError::internal(format!("Failed to get FCL sampler config: {}", e)))?;
544
545    let mut response = HashMap::new();
546    response.insert("frequency".to_string(), serde_json::json!(frequency));
547    response.insert("consumer".to_string(), serde_json::json!(consumer));
548
549    Ok(Json(response))
550}
551
552/// Update FCL/FQ sampler configuration including frequency and consumer settings.
553#[utoipa::path(
554    post,
555    path = "/v1/burst_engine/fcl_sampler/config",
556    tag = "burst_engine",
557    responses(
558        (status = 200, description = "FCL sampler configuration updated", body = HashMap<String, serde_json::Value>),
559        (status = 400, description = "Invalid input"),
560        (status = 500, description = "Internal server error")
561    )
562)]
563pub async fn post_fcl_sampler_config(
564    State(state): State<ApiState>,
565    Json(request): Json<HashMap<String, serde_json::Value>>,
566) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
567    let runtime_service = state.runtime_service.as_ref();
568
569    let frequency = request.get("frequency").and_then(|v| v.as_f64());
570    let consumer = request
571        .get("consumer")
572        .and_then(|v| v.as_u64().map(|n| n as u32));
573
574    runtime_service
575        .set_fcl_sampler_config(frequency, consumer)
576        .await
577        .map_err(|e| ApiError::internal(format!("Failed to update FCL sampler config: {}", e)))?;
578
579    // Return the updated config
580    let (freq, cons) = runtime_service
581        .get_fcl_sampler_config()
582        .await
583        .map_err(|e| ApiError::internal(format!("Failed to get updated config: {}", e)))?;
584
585    let mut response = HashMap::new();
586    response.insert("frequency".to_string(), serde_json::json!(freq));
587    response.insert("consumer".to_string(), serde_json::json!(cons));
588
589    Ok(Json(response))
590}
591
592/// Get FCL sample rate for a specific cortical area.
593#[utoipa::path(
594    get,
595    path = "/v1/burst_engine/fcl_sampler/area/{area_id}/sample_rate",
596    tag = "burst_engine",
597    params(
598        ("area_id" = u32, Path, description = "Cortical area ID (cortical_idx)")
599    ),
600    responses(
601        (status = 200, description = "Sample rate", body = HashMap<String, f64>),
602        (status = 500, description = "Internal server error")
603    )
604)]
605pub async fn get_area_fcl_sample_rate(
606    State(state): State<ApiState>,
607    Path(area_id): Path<u32>,
608) -> ApiResult<Json<HashMap<String, f64>>> {
609    let runtime_service = state.runtime_service.as_ref();
610
611    let sample_rate = runtime_service
612        .get_area_fcl_sample_rate(area_id)
613        .await
614        .map_err(|e| ApiError::internal(format!("Failed to get sample rate: {}", e)))?;
615
616    let mut response = HashMap::new();
617    response.insert("sample_rate".to_string(), sample_rate);
618
619    Ok(Json(response))
620}
621
622/// Set FCL sample rate for a specific cortical area.
623#[utoipa::path(
624    post,
625    path = "/v1/burst_engine/fcl_sampler/area/{area_id}/sample_rate",
626    tag = "burst_engine",
627    params(
628        ("area_id" = u32, Path, description = "Cortical area ID (cortical_idx)")
629    ),
630    responses(
631        (status = 200, description = "Sample rate updated", body = HashMap<String, f64>),
632        (status = 400, description = "Invalid input"),
633        (status = 500, description = "Internal server error")
634    )
635)]
636pub async fn post_area_fcl_sample_rate(
637    State(state): State<ApiState>,
638    Path(area_id): Path<u32>,
639    Json(request): Json<HashMap<String, f64>>,
640) -> ApiResult<Json<HashMap<String, f64>>> {
641    let runtime_service = state.runtime_service.as_ref();
642
643    let sample_rate = request
644        .get("sample_rate")
645        .copied()
646        .ok_or_else(|| ApiError::invalid_input("sample_rate required"))?;
647
648    if sample_rate <= 0.0 {
649        return Err(ApiError::invalid_input("Sample rate must be positive"));
650    }
651
652    runtime_service
653        .set_area_fcl_sample_rate(area_id, sample_rate)
654        .await
655        .map_err(|e| ApiError::internal(format!("Failed to set sample rate: {}", e)))?;
656
657    let mut response = HashMap::new();
658    response.insert("sample_rate".to_string(), sample_rate);
659
660    Ok(Json(response))
661}
662
663// ============================================================================
664// BURST ENGINE RUNTIME CONTROL ENDPOINTS
665// ============================================================================
666
667/// Get the total number of bursts executed since start.
668#[utoipa::path(
669    get,
670    path = "/v1/burst_engine/burst_counter",
671    tag = "burst_engine",
672    responses(
673        (status = 200, description = "Burst counter", body = u64),
674        (status = 500, description = "Internal server error")
675    )
676)]
677pub async fn get_burst_counter(State(state): State<ApiState>) -> ApiResult<Json<u64>> {
678    let runtime_service = state.runtime_service.as_ref();
679
680    let burst_count = runtime_service
681        .get_burst_count()
682        .await
683        .map_err(|e| ApiError::internal(format!("Failed to get burst counter: {}", e)))?;
684
685    Ok(Json(burst_count))
686}
687
688/// Start the burst engine to begin neural processing.
689#[utoipa::path(
690    post,
691    path = "/v1/burst_engine/start",
692    tag = "burst_engine",
693    responses(
694        (status = 200, description = "Burst engine started", body = HashMap<String, String>),
695        (status = 400, description = "Invalid state"),
696        (status = 500, description = "Internal server error")
697    )
698)]
699pub async fn post_start(State(state): State<ApiState>) -> ApiResult<Json<HashMap<String, String>>> {
700    let runtime_service = state.runtime_service.as_ref();
701
702    runtime_service
703        .start()
704        .await
705        .map_err(|e| ApiError::internal(format!("Failed to start burst engine: {}", e)))?;
706
707    Ok(Json(HashMap::from([(
708        "message".to_string(),
709        "Burst engine started successfully".to_string(),
710    )])))
711}
712
713/// Stop the burst engine and halt neural processing.
714#[utoipa::path(
715    post,
716    path = "/v1/burst_engine/stop",
717    tag = "burst_engine",
718    responses(
719        (status = 200, description = "Burst engine stopped", body = HashMap<String, String>),
720        (status = 500, description = "Internal server error")
721    )
722)]
723pub async fn post_stop(State(state): State<ApiState>) -> ApiResult<Json<HashMap<String, String>>> {
724    let runtime_service = state.runtime_service.as_ref();
725
726    runtime_service
727        .stop()
728        .await
729        .map_err(|e| ApiError::internal(format!("Failed to stop burst engine: {}", e)))?;
730
731    Ok(Json(HashMap::from([(
732        "message".to_string(),
733        "Burst engine stopped successfully".to_string(),
734    )])))
735}
736
737/// Pause the burst engine temporarily (alias for pause).
738#[utoipa::path(
739    post,
740    path = "/v1/burst_engine/hold",
741    tag = "burst_engine",
742    responses(
743        (status = 200, description = "Burst engine paused", body = HashMap<String, String>),
744        (status = 400, description = "Invalid state"),
745        (status = 500, description = "Internal server error")
746    )
747)]
748pub async fn post_hold(State(state): State<ApiState>) -> ApiResult<Json<HashMap<String, String>>> {
749    let runtime_service = state.runtime_service.as_ref();
750
751    runtime_service
752        .pause()
753        .await
754        .map_err(|e| ApiError::internal(format!("Failed to pause burst engine: {}", e)))?;
755
756    Ok(Json(HashMap::from([(
757        "message".to_string(),
758        "Burst engine paused successfully".to_string(),
759    )])))
760}
761
762/// Resume the burst engine after pause to continue neural processing.
763#[utoipa::path(
764    post,
765    path = "/v1/burst_engine/resume",
766    tag = "burst_engine",
767    responses(
768        (status = 200, description = "Burst engine resumed", body = HashMap<String, String>),
769        (status = 400, description = "Invalid state"),
770        (status = 500, description = "Internal server error")
771    )
772)]
773pub async fn post_resume(
774    State(state): State<ApiState>,
775) -> ApiResult<Json<HashMap<String, String>>> {
776    let runtime_service = state.runtime_service.as_ref();
777
778    runtime_service
779        .resume()
780        .await
781        .map_err(|e| ApiError::internal(format!("Failed to resume burst engine: {}", e)))?;
782
783    Ok(Json(HashMap::from([(
784        "message".to_string(),
785        "Burst engine resumed successfully".to_string(),
786    )])))
787}
788
789/// Get burst engine configuration including frequency and timing settings.
790#[utoipa::path(
791    get,
792    path = "/v1/burst_engine/config",
793    tag = "burst_engine",
794    responses(
795        (status = 200, description = "Burst engine configuration", body = HashMap<String, serde_json::Value>),
796        (status = 500, description = "Internal server error")
797    )
798)]
799pub async fn get_config(
800    State(state): State<ApiState>,
801) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
802    let runtime_service = state.runtime_service.as_ref();
803
804    let status = runtime_service
805        .get_status()
806        .await
807        .map_err(|e| ApiError::internal(format!("Failed to get config: {}", e)))?;
808
809    let mut response = HashMap::new();
810    response.insert(
811        "burst_frequency_hz".to_string(),
812        serde_json::json!(status.frequency_hz),
813    );
814    response.insert(
815        "burst_interval_seconds".to_string(),
816        serde_json::json!(1.0 / status.frequency_hz),
817    );
818    response.insert(
819        "target_frequency_hz".to_string(),
820        serde_json::json!(status.frequency_hz),
821    );
822    response.insert(
823        "is_running".to_string(),
824        serde_json::json!(status.is_running),
825    );
826    response.insert("is_paused".to_string(), serde_json::json!(status.is_paused));
827
828    Ok(Json(response))
829}
830
831/// Update burst engine configuration including frequency and timing parameters.
832#[utoipa::path(
833    put,
834    path = "/v1/burst_engine/config",
835    tag = "burst_engine",
836    responses(
837        (status = 200, description = "Configuration updated", body = HashMap<String, serde_json::Value>),
838        (status = 400, description = "Invalid input"),
839        (status = 500, description = "Internal server error")
840    )
841)]
842pub async fn put_config(
843    State(state): State<ApiState>,
844    Json(request): Json<HashMap<String, serde_json::Value>>,
845) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
846    let runtime_service = state.runtime_service.as_ref();
847
848    // Extract burst_frequency_hz from request
849    if let Some(freq) = request.get("burst_frequency_hz").and_then(|v| v.as_f64()) {
850        if freq <= 0.0 {
851            return Err(ApiError::invalid_input("Frequency must be positive"));
852        }
853
854        runtime_service
855            .set_frequency(freq)
856            .await
857            .map_err(|e| ApiError::internal(format!("Failed to set frequency: {}", e)))?;
858    }
859
860    // Return updated config
861    get_config(State(state)).await
862}
863
864// ============================================================================
865// FIRE LEDGER ENDPOINTS
866// ============================================================================
867
868/// Get fire ledger window size for a specific cortical area.
869#[utoipa::path(
870    get,
871    path = "/v1/burst_engine/fire_ledger/area/{area_id}/window_size",
872    tag = "burst_engine",
873    params(
874        ("area_id" = u32, Path, description = "Cortical area ID (cortical_idx)")
875    ),
876    responses(
877        (status = 200, description = "Window size", body = i32),
878        (status = 500, description = "Internal server error")
879    )
880)]
881pub async fn get_fire_ledger_area_window_size(
882    State(state): State<ApiState>,
883    Path(area_id): Path<u32>,
884) -> ApiResult<Json<i32>> {
885    let runtime_service = state.runtime_service.as_ref();
886
887    let configs = runtime_service
888        .get_fire_ledger_configs()
889        .await
890        .map_err(|e| ApiError::internal(format!("Failed to get fire ledger configs: {}", e)))?;
891
892    // Find the window size for this area
893    for (idx, window_size) in configs {
894        if idx == area_id {
895            return Ok(Json(window_size as i32));
896        }
897    }
898
899    // Return default if not found
900    Ok(Json(20))
901}
902
903/// Set fire ledger window size for a specific cortical area.
904#[utoipa::path(
905    put,
906    path = "/v1/burst_engine/fire_ledger/area/{area_id}/window_size",
907    tag = "burst_engine",
908    params(
909        ("area_id" = u32, Path, description = "Cortical area ID (cortical_idx)")
910    ),
911    responses(
912        (status = 200, description = "Window size updated", body = HashMap<String, serde_json::Value>),
913        (status = 400, description = "Invalid input"),
914        (status = 500, description = "Internal server error")
915    )
916)]
917pub async fn put_fire_ledger_area_window_size(
918    State(state): State<ApiState>,
919    Path(area_id): Path<u32>,
920    Json(request): Json<HashMap<String, i32>>,
921) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
922    let runtime_service = state.runtime_service.as_ref();
923
924    let window_size = request
925        .get("window_size")
926        .copied()
927        .ok_or_else(|| ApiError::invalid_input("window_size required"))?;
928
929    if window_size <= 0 {
930        return Err(ApiError::invalid_input("Window size must be positive"));
931    }
932
933    runtime_service
934        .configure_fire_ledger_window(area_id, window_size as usize)
935        .await
936        .map_err(|e| ApiError::internal(format!("Failed to configure window: {}", e)))?;
937
938    let mut response = HashMap::new();
939    response.insert("success".to_string(), serde_json::json!(true));
940    response.insert("area_id".to_string(), serde_json::json!(area_id));
941    response.insert("window_size".to_string(), serde_json::json!(window_size));
942
943    Ok(Json(response))
944}
945
946/// Get fire ledger historical data for a specific cortical area.
947#[utoipa::path(
948    get,
949    path = "/v1/burst_engine/fire_ledger/area/{area_id}/history",
950    tag = "burst_engine",
951    params(
952        ("area_id" = String, Path, description = "Cortical area ID or index"),
953        ("lookback_steps" = Option<i32>, Query, description = "Number of timesteps to retrieve")
954    ),
955    responses(
956        (status = 200, description = "Fire ledger history", body = HashMap<String, serde_json::Value>),
957        (status = 400, description = "Invalid area ID"),
958        (status = 500, description = "Internal server error")
959    )
960)]
961pub async fn get_fire_ledger_history(
962    State(_state): State<ApiState>,
963    Path(area_id): Path<String>,
964    Query(params): Query<HashMap<String, String>>,
965) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
966    // Parse area_id as cortical_idx
967    let cortical_idx = area_id
968        .parse::<u32>()
969        .map_err(|_| ApiError::invalid_input(format!("Invalid area_id: {}", area_id)))?;
970
971    let _lookback_steps = params
972        .get("lookback_steps")
973        .and_then(|s| s.parse::<i32>().ok());
974
975    // TODO: Implement fire ledger history retrieval from NPU
976    // For now, return placeholder
977    let mut response = HashMap::new();
978    response.insert("success".to_string(), serde_json::json!(true));
979    response.insert("area_id".to_string(), serde_json::json!(area_id));
980    response.insert("cortical_idx".to_string(), serde_json::json!(cortical_idx));
981    response.insert("history".to_string(), serde_json::json!([]));
982    response.insert("window_size".to_string(), serde_json::json!(20));
983    response.insert(
984        "note".to_string(),
985        serde_json::json!("Fire ledger history not yet implemented"),
986    );
987
988    Ok(Json(response))
989}
990
991// ============================================================================
992// MEMBRANE POTENTIALS ENDPOINTS
993// ============================================================================
994
995/// Get membrane potentials for specific neurons.
996#[utoipa::path(
997    get,
998    path = "/v1/burst_engine/membrane_potentials",
999    tag = "burst_engine",
1000    params(
1001        ("neuron_ids" = Vec<u64>, Query, description = "List of neuron IDs")
1002    ),
1003    responses(
1004        (status = 200, description = "Membrane potentials", body = HashMap<String, f32>),
1005        (status = 500, description = "Internal server error")
1006    )
1007)]
1008pub async fn get_membrane_potentials(
1009    State(_state): State<ApiState>,
1010    Query(params): Query<HashMap<String, String>>,
1011) -> ApiResult<Json<HashMap<String, f32>>> {
1012    // Parse neuron_ids from query params
1013    let neuron_ids_str = params
1014        .get("neuron_ids")
1015        .ok_or_else(|| ApiError::invalid_input("neuron_ids parameter required"))?;
1016
1017    // TODO: Parse comma-separated neuron IDs and fetch from NPU
1018    // For now, return empty
1019    tracing::debug!(target: "feagi-api", "GET membrane_potentials for neuron_ids: {}", neuron_ids_str);
1020
1021    Ok(Json(HashMap::new()))
1022}
1023
1024/// Update membrane potentials for specific neurons.
1025#[utoipa::path(
1026    put,
1027    path = "/v1/burst_engine/membrane_potentials",
1028    tag = "burst_engine",
1029    responses(
1030        (status = 200, description = "Membrane potentials updated", body = HashMap<String, String>),
1031        (status = 400, description = "Invalid input"),
1032        (status = 500, description = "Internal server error")
1033    )
1034)]
1035pub async fn put_membrane_potentials(
1036    State(_state): State<ApiState>,
1037    Json(potentials): Json<HashMap<String, f32>>,
1038) -> ApiResult<Json<HashMap<String, String>>> {
1039    // TODO: Update membrane potentials in NPU
1040    tracing::info!(target: "feagi-api", "PUT membrane_potentials: {} neurons", potentials.len());
1041
1042    Ok(Json(HashMap::from([(
1043        "message".to_string(),
1044        format!("Updated {} neuron membrane potentials", potentials.len()),
1045    )])))
1046}
1047
1048// ============================================================================
1049// FREQUENCY MEASUREMENT ENDPOINTS
1050// ============================================================================
1051
1052/// Get current burst frequency measurement status.
1053#[utoipa::path(
1054    get,
1055    path = "/v1/burst_engine/frequency_status",
1056    tag = "burst_engine",
1057    responses(
1058        (status = 200, description = "Frequency status", body = HashMap<String, serde_json::Value>),
1059        (status = 500, description = "Internal server error")
1060    )
1061)]
1062pub async fn get_frequency_status(
1063    State(state): State<ApiState>,
1064) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
1065    let runtime_service = state.runtime_service.as_ref();
1066
1067    let status = runtime_service
1068        .get_status()
1069        .await
1070        .map_err(|e| ApiError::internal(format!("Failed to get status: {}", e)))?;
1071
1072    let mut response = HashMap::new();
1073    response.insert(
1074        "target_frequency_hz".to_string(),
1075        serde_json::json!(status.frequency_hz),
1076    );
1077    response.insert(
1078        "actual_frequency_hz".to_string(),
1079        serde_json::json!(status.frequency_hz),
1080    );
1081    response.insert(
1082        "burst_count".to_string(),
1083        serde_json::json!(status.burst_count),
1084    );
1085    response.insert("is_measuring".to_string(), serde_json::json!(false));
1086
1087    Ok(Json(response))
1088}
1089
1090/// Trigger burst frequency measurement to analyze current processing rate.
1091#[utoipa::path(
1092    post,
1093    path = "/v1/burst_engine/measure_frequency",
1094    tag = "burst_engine",
1095    responses(
1096        (status = 200, description = "Measurement started", body = HashMap<String, serde_json::Value>),
1097        (status = 500, description = "Internal server error")
1098    )
1099)]
1100pub async fn post_measure_frequency(
1101    State(_state): State<ApiState>,
1102    Json(request): Json<HashMap<String, serde_json::Value>>,
1103) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
1104    let duration = request
1105        .get("duration_seconds")
1106        .and_then(|v| v.as_f64())
1107        .unwrap_or(5.0);
1108    let sample_count = request
1109        .get("sample_count")
1110        .and_then(|v| v.as_i64())
1111        .unwrap_or(100) as i32;
1112
1113    tracing::info!(target: "feagi-api", "Starting frequency measurement: {}s, {} samples", duration, sample_count);
1114
1115    // TODO: Implement frequency measurement
1116    let mut response = HashMap::new();
1117    response.insert("status".to_string(), serde_json::json!("started"));
1118    response.insert("duration_seconds".to_string(), serde_json::json!(duration));
1119    response.insert("sample_count".to_string(), serde_json::json!(sample_count));
1120
1121    Ok(Json(response))
1122}
1123
1124/// Get burst frequency measurement history over time.
1125#[utoipa::path(
1126    get,
1127    path = "/v1/burst_engine/frequency_history",
1128    tag = "burst_engine",
1129    params(
1130        ("limit" = Option<i32>, Query, description = "Number of measurements to return")
1131    ),
1132    responses(
1133        (status = 200, description = "Frequency history", body = HashMap<String, serde_json::Value>),
1134        (status = 500, description = "Internal server error")
1135    )
1136)]
1137pub async fn get_frequency_history(
1138    State(_state): State<ApiState>,
1139    Query(params): Query<HashMap<String, String>>,
1140) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
1141    let limit = params
1142        .get("limit")
1143        .and_then(|s| s.parse::<i32>().ok())
1144        .unwrap_or(10);
1145
1146    // TODO: Implement frequency history retrieval
1147    let mut response = HashMap::new();
1148    response.insert("measurements".to_string(), serde_json::json!([]));
1149    response.insert("limit".to_string(), serde_json::json!(limit));
1150
1151    Ok(Json(response))
1152}
1153
1154/// Force connectome integration to rebuild neural connections immediately.
1155#[utoipa::path(
1156    post,
1157    path = "/v1/burst_engine/force_connectome_integration",
1158    tag = "burst_engine",
1159    responses(
1160        (status = 200, description = "Integration forced", body = HashMap<String, String>),
1161        (status = 500, description = "Internal server error")
1162    )
1163)]
1164pub async fn post_force_connectome_integration(
1165    State(_state): State<ApiState>,
1166) -> ApiResult<Json<HashMap<String, String>>> {
1167    // TODO: Implement connectome integration forcing
1168    tracing::info!(target: "feagi-api", "Force connectome integration requested");
1169
1170    Ok(Json(HashMap::from([
1171        (
1172            "message".to_string(),
1173            "Connectome integration initiated".to_string(),
1174        ),
1175        ("status".to_string(), "not_yet_implemented".to_string()),
1176    ])))
1177}