Skip to main content

feagi_api/endpoints/
burst_engine.rs

1// Copyright 2025 Neuraville Inc.
2// Licensed under the Apache License, Version 2.0
3
4//! Burst Engine API Endpoints - Exact port from Python `/v1/burst_engine/*`
5
6// Removed - using crate::common::State instead
7use crate::common::ApiState;
8use crate::common::{ApiError, ApiResult, Json, Path, Query, State};
9use serde::Deserialize;
10use std::collections::HashMap;
11use utoipa::{IntoParams, ToSchema};
12
13/// Get the current simulation timestep in seconds.
14#[utoipa::path(
15    get,
16    path = "/v1/burst_engine/simulation_timestep",
17    tag = "burst_engine"
18)]
19pub async fn get_simulation_timestep(State(state): State<ApiState>) -> ApiResult<Json<f64>> {
20    let runtime_service = state.runtime_service.as_ref();
21    match runtime_service.get_status().await {
22        Ok(status) => {
23            // Convert frequency to timestep (1/Hz = seconds)
24            let timestep = if status.frequency_hz > 0.0 {
25                1.0 / status.frequency_hz
26            } else {
27                0.0
28            };
29            Ok(Json(timestep))
30        }
31        Err(e) => Err(ApiError::internal(format!("Failed to get timestep: {}", e))),
32    }
33}
34
35/// Set the simulation timestep in seconds (converted to burst frequency).
36#[utoipa::path(
37    post,
38    path = "/v1/burst_engine/simulation_timestep",
39    tag = "burst_engine"
40)]
41pub async fn post_simulation_timestep(
42    State(state): State<ApiState>,
43    Json(request): Json<HashMap<String, f64>>,
44) -> ApiResult<Json<HashMap<String, String>>> {
45    let runtime_service = state.runtime_service.as_ref();
46
47    if let Some(&timestep) = request.get("simulation_timestep") {
48        // Convert timestep (seconds) to frequency (Hz)
49        let frequency = if timestep > 0.0 { 1.0 / timestep } else { 0.0 };
50
51        match runtime_service.set_frequency(frequency).await {
52            Ok(_) => Ok(Json(HashMap::from([(
53                "message".to_string(),
54                format!("Timestep set to {}", timestep),
55            )]))),
56            Err(e) => Err(ApiError::internal(format!("Failed to set timestep: {}", e))),
57        }
58    } else {
59        Err(ApiError::invalid_input("simulation_timestep required"))
60    }
61}
62
63// ============================================================================
64// FCL (Fire Candidate List) ENDPOINTS
65// ============================================================================
66
67/// Get the Fire Candidate List (FCL) content showing neurons ready to fire.
68#[utoipa::path(
69    get,
70    path = "/v1/burst_engine/fcl",
71    tag = "burst_engine",
72    responses(
73        (status = 200, description = "FCL content", body = HashMap<String, serde_json::Value>),
74        (status = 500, description = "Internal server error")
75    )
76)]
77pub async fn get_fcl(
78    State(state): State<ApiState>,
79) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
80    use std::collections::BTreeMap;
81    use tracing::{debug, warn};
82
83    warn!("[API] /v1/burst_engine/fcl endpoint called - this acquires NPU lock!");
84
85    let runtime_service = state.runtime_service.as_ref();
86    let connectome_service = state.connectome_service.as_ref();
87
88    // CRITICAL FIX: Get FCL snapshot WITH cortical_idx from NPU (not extracted from neuron_id bits!)
89    // Old code was doing (neuron_id >> 32) which is WRONG - neuron_id is u32, not packed!
90    let call_start = std::time::Instant::now();
91    let fcl_data = runtime_service
92        .get_fcl_snapshot_with_cortical_idx()
93        .await
94        .map_err(|e| ApiError::internal(format!("Failed to get FCL snapshot: {}", e)))?;
95    let call_duration = call_start.elapsed();
96    warn!(
97        "[API] /v1/burst_engine/fcl completed in {:.2}ms (returned {} neurons)",
98        call_duration.as_secs_f64() * 1000.0,
99        fcl_data.len()
100    );
101
102    // Get burst count for timestep
103    let timestep = runtime_service
104        .get_burst_count()
105        .await
106        .map_err(|e| ApiError::internal(format!("Failed to get burst count: {}", e)))?;
107
108    // Get all cortical areas to map cortical_idx -> cortical_id
109    let areas = connectome_service
110        .list_cortical_areas()
111        .await
112        .map_err(|e| ApiError::internal(format!("Failed to list cortical areas: {}", e)))?;
113
114    // Build cortical_idx -> cortical_id mapping
115    let mut idx_to_id: HashMap<u32, String> = HashMap::new();
116    for area in &areas {
117        idx_to_id.insert(area.cortical_idx, area.cortical_id.clone());
118    }
119
120    // Group FCL neurons by cortical area
121    // Use BTreeMap for consistent ordering in JSON output
122    let mut cortical_areas: BTreeMap<String, Vec<u64>> = BTreeMap::new();
123
124    for (neuron_id, cortical_idx, _potential) in &fcl_data {
125        // Map cortical_idx to cortical_id using actual stored values
126        let cortical_id = idx_to_id.get(cortical_idx).cloned().ok_or_else(|| {
127            ApiError::internal(format!(
128                "Unmapped cortical_idx in FCL snapshot: idx={}. Refusing fallback (would corrupt determinism).",
129                cortical_idx
130            ))
131        })?;
132
133        cortical_areas
134            .entry(cortical_id)
135            .or_default()
136            .push(*neuron_id);
137    }
138
139    // Limit to first 20 neuron IDs per area (matching Python behavior for network efficiency)
140    for neuron_list in cortical_areas.values_mut() {
141        neuron_list.truncate(20);
142    }
143
144    let active_cortical_count = cortical_areas.len();
145    let total_neurons: usize = cortical_areas.values().map(|v| v.len()).sum();
146
147    // Build response (NO global_fcl per user request)
148    let mut response = HashMap::new();
149    response.insert("timestep".to_string(), serde_json::json!(timestep));
150    response.insert(
151        "total_neurons".to_string(),
152        serde_json::json!(total_neurons),
153    );
154    response.insert(
155        "cortical_areas".to_string(),
156        serde_json::json!(cortical_areas),
157    );
158    response.insert("default_window_size".to_string(), serde_json::json!(20));
159    response.insert(
160        "active_cortical_count".to_string(),
161        serde_json::json!(active_cortical_count),
162    );
163
164    debug!(target: "feagi-api", "GET /fcl - {} neurons across {} cortical areas (limited to 20/area)",
165           total_neurons, active_cortical_count);
166
167    Ok(Json(response))
168}
169
170/// Get the Fire Queue (FQ) showing neurons that actually fired this timestep.
171#[utoipa::path(
172    get,
173    path = "/v1/burst_engine/fire_queue",
174    tag = "burst_engine",
175    responses(
176        (status = 200, description = "Fire queue content", body = HashMap<String, serde_json::Value>),
177        (status = 500, description = "Internal server error")
178    )
179)]
180pub async fn get_fire_queue(
181    State(state): State<ApiState>,
182) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
183    use tracing::{debug, info};
184
185    let runtime_service = state.runtime_service.as_ref();
186    let connectome_service = state.connectome_service.as_ref();
187
188    // Get Fire Queue sample from RuntimeService
189    let fq_sample = runtime_service
190        .get_fire_queue_sample()
191        .await
192        .map_err(|e| ApiError::internal(format!("Failed to get fire queue: {}", e)))?;
193
194    if fq_sample.is_empty() {
195        debug!("[FIRE-QUEUE-API] ⚠️ Fire queue sample is EMPTY - no areas");
196    } else {
197        let total_neurons: usize = fq_sample
198            .values()
199            .map(|(x, y, z, _, _)| x.len() + y.len() + z.len())
200            .sum();
201        info!(
202            "[FIRE-QUEUE-API] ✓ Received fire queue sample: {} areas, {} total neurons",
203            fq_sample.len(),
204            total_neurons
205        );
206    }
207
208    // Get burst count for timestep
209    let timestep = runtime_service
210        .get_burst_count()
211        .await
212        .map_err(|e| ApiError::internal(format!("Failed to get burst count: {}", e)))?;
213
214    // CRITICAL FIX: Build cortical_idx -> cortical_id mapping from ConnectomeService
215    // This uses the actual stored cortical_idx values instead of fabricating names
216    let areas = connectome_service
217        .list_cortical_areas()
218        .await
219        .map_err(|e| ApiError::internal(format!("Failed to list cortical areas: {}", e)))?;
220
221    let idx_to_id: HashMap<u32, String> = areas
222        .iter()
223        .map(|a| (a.cortical_idx, a.cortical_id.clone()))
224        .collect();
225
226    // Convert cortical_idx to cortical_id and report only per-area fired neuron COUNT.
227    // Caller explicitly does not need individual neuron IDs.
228    let mut cortical_areas: HashMap<String, u64> = HashMap::new();
229    let mut total_fired: u64 = 0;
230
231    for (cortical_idx, (neuron_ids, _, _, _, _)) in fq_sample {
232        let cortical_id = idx_to_id.get(&cortical_idx).cloned().ok_or_else(|| {
233            ApiError::internal(format!(
234                "Unmapped cortical_idx in Fire Queue sample: idx={}. Refusing fallback (would corrupt determinism).",
235                cortical_idx
236            ))
237        })?;
238
239        let fired_count = neuron_ids.len() as u64;
240        info!(
241            "[FIRE-QUEUE-API] Area {} (idx={}): {} neurons fired",
242            cortical_id, cortical_idx, fired_count
243        );
244
245        total_fired += fired_count;
246        cortical_areas.insert(cortical_id, fired_count);
247    }
248
249    info!("[FIRE-QUEUE-API] Total fired neurons: {}", total_fired);
250
251    let mut response = HashMap::new();
252    response.insert("timestep".to_string(), serde_json::json!(timestep));
253    response.insert("total_fired".to_string(), serde_json::json!(total_fired));
254    response.insert(
255        "cortical_areas".to_string(),
256        serde_json::json!(cortical_areas),
257    );
258
259    debug!(target: "feagi-api", "GET /fire_queue - returned {} fired neurons", total_fired);
260
261    Ok(Json(response))
262}
263
264#[derive(Debug, Clone, Deserialize, IntoParams, ToSchema)]
265#[into_params(parameter_in = Query)]
266pub struct FclNeuronQuery {
267    /// Neuron ID to look up in the current FCL snapshot
268    pub neuron_id: u64,
269}
270
271/// Get the current FCL candidate potential for a specific neuron.
272#[utoipa::path(
273    get,
274    path = "/v1/burst_engine/fcl/neuron",
275    tag = "burst_engine",
276    params(FclNeuronQuery)
277)]
278pub async fn get_fcl_neuron(
279    State(state): State<ApiState>,
280    Query(params): Query<FclNeuronQuery>,
281) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
282    let runtime_service = state.runtime_service.as_ref();
283
284    let fcl_data = runtime_service
285        .get_fcl_snapshot_with_cortical_idx()
286        .await
287        .map_err(|e| ApiError::internal(format!("Failed to get FCL snapshot: {}", e)))?;
288
289    let mut response = HashMap::new();
290    response.insert("neuron_id".to_string(), serde_json::json!(params.neuron_id));
291
292    match fcl_data.iter().find(|(id, _, _)| *id == params.neuron_id) {
293        Some((_id, cortical_idx, potential)) => {
294            response.insert("present".to_string(), serde_json::json!(true));
295            response.insert("cortical_idx".to_string(), serde_json::json!(*cortical_idx));
296            response.insert(
297                "candidate_potential".to_string(),
298                serde_json::json!(*potential),
299            );
300        }
301        None => {
302            response.insert("present".to_string(), serde_json::json!(false));
303            response.insert("candidate_potential".to_string(), serde_json::json!(0.0));
304        }
305    }
306
307    Ok(Json(response))
308}
309
310#[derive(Debug, Clone, Deserialize, IntoParams, ToSchema)]
311#[into_params(parameter_in = Query)]
312pub struct FireQueueNeuronQuery {
313    /// Neuron ID to check in the last Fire Queue sample
314    pub neuron_id: u64,
315}
316
317/// Check whether a specific neuron fired in the last burst (Fire Queue sample).
318#[utoipa::path(
319    get,
320    path = "/v1/burst_engine/fire_queue/neuron",
321    tag = "burst_engine",
322    params(FireQueueNeuronQuery)
323)]
324pub async fn get_fire_queue_neuron(
325    State(state): State<ApiState>,
326    Query(params): Query<FireQueueNeuronQuery>,
327) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
328    let runtime_service = state.runtime_service.as_ref();
329
330    let fq_sample = runtime_service
331        .get_fire_queue_sample()
332        .await
333        .map_err(|e| ApiError::internal(format!("Failed to get fire queue sample: {}", e)))?;
334
335    let mut response = HashMap::new();
336    response.insert("neuron_id".to_string(), serde_json::json!(params.neuron_id));
337
338    let needle = params.neuron_id as u32;
339    for (cortical_idx, (neuron_ids, _xs, _ys, _zs, mps)) in fq_sample {
340        if let Some(pos) = neuron_ids.iter().position(|&id| id == needle) {
341            response.insert("fired".to_string(), serde_json::json!(true));
342            response.insert("cortical_idx".to_string(), serde_json::json!(cortical_idx));
343            response.insert(
344                "membrane_potential_at_fire".to_string(),
345                serde_json::json!(mps.get(pos).copied().unwrap_or(0.0)),
346            );
347            return Ok(Json(response));
348        }
349    }
350
351    response.insert("fired".to_string(), serde_json::json!(false));
352    Ok(Json(response))
353}
354
355/// Reset the Fire Candidate List, clearing all pending fire candidates.
356#[utoipa::path(
357    post,
358    path = "/v1/burst_engine/fcl_reset",
359    tag = "burst_engine",
360    responses(
361        (status = 200, description = "FCL reset successfully", body = HashMap<String, String>),
362        (status = 500, description = "Internal server error")
363    )
364)]
365pub async fn post_fcl_reset(
366    State(_state): State<ApiState>,
367) -> ApiResult<Json<HashMap<String, String>>> {
368    use tracing::info;
369
370    // TODO: Reset FCL in BurstLoopRunner/NPU
371    info!(target: "feagi-api", "FCL reset requested (implementation pending)");
372
373    Ok(Json(HashMap::from([(
374        "message".to_string(),
375        "Fire Candidate List reset successfully".to_string(),
376    )])))
377}
378
379/// Get Fire Candidate List sampler status including frequency and consumer state.
380#[utoipa::path(
381    get,
382    path = "/v1/burst_engine/fcl_status",
383    tag = "burst_engine",
384    responses(
385        (status = 200, description = "FCL status", body = HashMap<String, serde_json::Value>),
386        (status = 500, description = "Internal server error")
387    )
388)]
389pub async fn get_fcl_status(
390    State(_state): State<ApiState>,
391) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
392    use tracing::debug;
393
394    // TODO: Get FCL manager status
395    debug!(target: "feagi-api", "GET /fcl_status - returning stub (implementation pending)");
396
397    let mut response = HashMap::new();
398    response.insert("available".to_string(), serde_json::json!(false));
399    response.insert(
400        "error".to_string(),
401        serde_json::json!("FCL manager not yet implemented in Rust"),
402    );
403
404    Ok(Json(response))
405}
406
407// ============================================================================
408// FIRE LEDGER WINDOW SIZE ENDPOINTS
409// ============================================================================
410
411/// GET /v1/burst_engine/fire_ledger/default_window_size
412#[utoipa::path(
413    get,
414    path = "/v1/burst_engine/fire_ledger/default_window_size",
415    tag = "burst_engine",
416    responses(
417        (status = 200, description = "Default window size", body = i32),
418        (status = 500, description = "Internal server error")
419    )
420)]
421pub async fn get_fire_ledger_default_window_size(
422    State(_state): State<ApiState>,
423) -> ApiResult<Json<i32>> {
424    // Get default window size from Fire Ledger configuration
425    // TODO: Add get_default_window_size to RuntimeService
426    // For now, return standard default
427    Ok(Json(20))
428}
429
430/// Set the default fire history window size for all cortical areas.
431#[utoipa::path(
432    put,
433    path = "/v1/burst_engine/fire_ledger/default_window_size",
434    tag = "burst_engine",
435    responses(
436        (status = 200, description = "Window size updated", body = HashMap<String, serde_json::Value>),
437        (status = 500, description = "Internal server error")
438    )
439)]
440pub async fn put_fire_ledger_default_window_size(
441    State(_state): State<ApiState>,
442    Json(request): Json<HashMap<String, i32>>,
443) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
444    let window_size = request.get("window_size").copied().unwrap_or(20);
445
446    if window_size <= 0 {
447        return Err(ApiError::invalid_input("Window size must be positive"));
448    }
449
450    // TODO: Update default window size configuration
451    tracing::info!(target: "feagi-api", "Default Fire Ledger window size set to {}", window_size);
452
453    let mut response = HashMap::new();
454    response.insert("success".to_string(), serde_json::json!(true));
455    response.insert("window_size".to_string(), serde_json::json!(window_size));
456    response.insert(
457        "message".to_string(),
458        serde_json::json!(format!("Default window size set to {}", window_size)),
459    );
460
461    Ok(Json(response))
462}
463
464/// Get fire history window configuration for all cortical areas.
465#[utoipa::path(
466    get,
467    path = "/v1/burst_engine/fire_ledger/areas_window_config",
468    tag = "burst_engine",
469    responses(
470        (status = 200, description = "Areas window configuration", body = HashMap<String, serde_json::Value>),
471        (status = 500, description = "Internal server error")
472    )
473)]
474pub async fn get_fire_ledger_areas_window_config(
475    State(state): State<ApiState>,
476) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
477    tracing::debug!("[NPU-LOCK] API: GET /v1/burst_engine/fire_ledger/areas_window_config called - this acquires NPU lock!");
478    let runtime_service = state.runtime_service.as_ref();
479    let connectome_service = state.connectome_service.as_ref();
480
481    // Get Fire Ledger configurations from RuntimeService
482    let configs = runtime_service
483        .get_fire_ledger_configs()
484        .await
485        .map_err(|e| ApiError::internal(format!("Failed to get fire ledger configs: {}", e)))?;
486
487    // CRITICAL FIX: Build cortical_idx -> cortical_id mapping from ConnectomeService
488    // This uses the actual stored cortical_idx values instead of fabricating names
489    let cortical_areas_list = connectome_service
490        .list_cortical_areas()
491        .await
492        .map_err(|e| ApiError::internal(format!("Failed to list cortical areas: {}", e)))?;
493
494    let idx_to_id: HashMap<u32, String> = cortical_areas_list
495        .iter()
496        .map(|a| (a.cortical_idx, a.cortical_id.clone()))
497        .collect();
498
499    // Convert to area_id -> window_size HashMap using actual cortical_id
500    let mut areas: HashMap<String, usize> = HashMap::new();
501    for (cortical_idx, window_size) in configs {
502        let cortical_id = idx_to_id.get(&cortical_idx).cloned().ok_or_else(|| {
503            ApiError::internal(format!(
504                "Unmapped cortical_idx in Fire Ledger window config: idx={}. Refusing fallback (would corrupt determinism).",
505                cortical_idx
506            ))
507        })?;
508        areas.insert(cortical_id, window_size);
509    }
510
511    let mut response = HashMap::new();
512    response.insert("default_window_size".to_string(), serde_json::json!(20));
513    response.insert("areas".to_string(), serde_json::json!(areas));
514    response.insert(
515        "total_configured_areas".to_string(),
516        serde_json::json!(areas.len()),
517    );
518
519    Ok(Json(response))
520}
521
522// ============================================================================
523// BURST ENGINE CONTROL & STATUS ENDPOINTS
524// ============================================================================
525
526/// Get burst engine statistics including burst count, frequency, and performance metrics.
527#[utoipa::path(
528    get,
529    path = "/v1/burst_engine/stats",
530    tag = "burst_engine",
531    responses(
532        (status = 200, description = "Burst engine statistics", body = HashMap<String, serde_json::Value>),
533        (status = 500, description = "Internal server error")
534    )
535)]
536pub async fn get_stats(
537    State(state): State<ApiState>,
538) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
539    let runtime_service = state.runtime_service.as_ref();
540
541    match runtime_service.get_status().await {
542        Ok(status) => {
543            let mut response = HashMap::new();
544            response.insert(
545                "burst_count".to_string(),
546                serde_json::json!(status.burst_count),
547            );
548            response.insert(
549                "frequency_hz".to_string(),
550                serde_json::json!(status.frequency_hz),
551            );
552            response.insert("active".to_string(), serde_json::json!(status.is_running));
553            response.insert("paused".to_string(), serde_json::json!(status.is_paused));
554
555            Ok(Json(response))
556        }
557        Err(e) => Err(ApiError::internal(format!("Failed to get stats: {}", e))),
558    }
559}
560
561/// Get burst engine status including active state, pause state, and burst count.
562#[utoipa::path(
563    get,
564    path = "/v1/burst_engine/status",
565    tag = "burst_engine",
566    responses(
567        (status = 200, description = "Burst engine status", body = HashMap<String, serde_json::Value>),
568        (status = 500, description = "Internal server error")
569    )
570)]
571pub async fn get_status(
572    State(state): State<ApiState>,
573) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
574    let runtime_service = state.runtime_service.as_ref();
575
576    match runtime_service.get_status().await {
577        Ok(status) => {
578            let mut response = HashMap::new();
579            response.insert("active".to_string(), serde_json::json!(status.is_running));
580            response.insert("paused".to_string(), serde_json::json!(status.is_paused));
581            response.insert(
582                "burst_count".to_string(),
583                serde_json::json!(status.burst_count),
584            );
585            response.insert(
586                "frequency_hz".to_string(),
587                serde_json::json!(status.frequency_hz),
588            );
589
590            Ok(Json(response))
591        }
592        Err(e) => Err(ApiError::internal(format!("Failed to get status: {}", e))),
593    }
594}
595
596/// Control burst engine with actions: start, pause, stop, or resume.
597#[utoipa::path(
598    post,
599    path = "/v1/burst_engine/control",
600    tag = "burst_engine",
601    responses(
602        (status = 200, description = "Control command executed", body = HashMap<String, String>),
603        (status = 500, description = "Internal server error")
604    )
605)]
606pub async fn post_control(
607    State(state): State<ApiState>,
608    Json(request): Json<HashMap<String, String>>,
609) -> ApiResult<Json<HashMap<String, String>>> {
610    let runtime_service = state.runtime_service.as_ref();
611    let action = request.get("action").map(|s| s.as_str());
612
613    match action {
614        Some("start") | Some("resume") => {
615            runtime_service
616                .start()
617                .await
618                .map_err(|e| ApiError::internal(format!("Failed to start: {}", e)))?;
619            Ok(Json(HashMap::from([(
620                "message".to_string(),
621                "Burst engine started".to_string(),
622            )])))
623        }
624        Some("pause") => {
625            runtime_service
626                .pause()
627                .await
628                .map_err(|e| ApiError::internal(format!("Failed to pause: {}", e)))?;
629            Ok(Json(HashMap::from([(
630                "message".to_string(),
631                "Burst engine paused".to_string(),
632            )])))
633        }
634        Some("stop") => {
635            runtime_service
636                .stop()
637                .await
638                .map_err(|e| ApiError::internal(format!("Failed to stop: {}", e)))?;
639            Ok(Json(HashMap::from([(
640                "message".to_string(),
641                "Burst engine stopped".to_string(),
642            )])))
643        }
644        _ => Err(ApiError::invalid_input(
645            "Invalid action: must be 'start', 'pause', or 'stop'",
646        )),
647    }
648}
649
650// ============================================================================
651// FCL SAMPLER CONFIGURATION ENDPOINTS
652// ============================================================================
653
654/// Get FCL/FQ sampler configuration including frequency and consumer settings.
655#[utoipa::path(
656    get,
657    path = "/v1/burst_engine/fcl_sampler/config",
658    tag = "burst_engine",
659    responses(
660        (status = 200, description = "FCL sampler configuration", body = HashMap<String, serde_json::Value>),
661        (status = 500, description = "Internal server error")
662    )
663)]
664pub async fn get_fcl_sampler_config(
665    State(state): State<ApiState>,
666) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
667    let runtime_service = state.runtime_service.as_ref();
668
669    let (frequency, consumer) = runtime_service
670        .get_fcl_sampler_config()
671        .await
672        .map_err(|e| ApiError::internal(format!("Failed to get FCL sampler config: {}", e)))?;
673
674    let mut response = HashMap::new();
675    response.insert("frequency".to_string(), serde_json::json!(frequency));
676    response.insert("consumer".to_string(), serde_json::json!(consumer));
677
678    Ok(Json(response))
679}
680
681/// Update FCL/FQ sampler configuration including frequency and consumer settings.
682#[utoipa::path(
683    post,
684    path = "/v1/burst_engine/fcl_sampler/config",
685    tag = "burst_engine",
686    responses(
687        (status = 200, description = "FCL sampler configuration updated", body = HashMap<String, serde_json::Value>),
688        (status = 400, description = "Invalid input"),
689        (status = 500, description = "Internal server error")
690    )
691)]
692pub async fn post_fcl_sampler_config(
693    State(state): State<ApiState>,
694    Json(request): Json<HashMap<String, serde_json::Value>>,
695) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
696    let runtime_service = state.runtime_service.as_ref();
697
698    let frequency = request.get("frequency").and_then(|v| v.as_f64());
699    let consumer = request
700        .get("consumer")
701        .and_then(|v| v.as_u64().map(|n| n as u32));
702
703    runtime_service
704        .set_fcl_sampler_config(frequency, consumer)
705        .await
706        .map_err(|e| ApiError::internal(format!("Failed to update FCL sampler config: {}", e)))?;
707
708    // Return the updated config
709    let (freq, cons) = runtime_service
710        .get_fcl_sampler_config()
711        .await
712        .map_err(|e| ApiError::internal(format!("Failed to get updated config: {}", e)))?;
713
714    let mut response = HashMap::new();
715    response.insert("frequency".to_string(), serde_json::json!(freq));
716    response.insert("consumer".to_string(), serde_json::json!(cons));
717
718    Ok(Json(response))
719}
720
721/// Get FCL sample rate for a specific cortical area.
722#[utoipa::path(
723    get,
724    path = "/v1/burst_engine/fcl_sampler/area/{area_id}/sample_rate",
725    tag = "burst_engine",
726    params(
727        ("area_id" = u32, Path, description = "Cortical area ID (cortical_idx)")
728    ),
729    responses(
730        (status = 200, description = "Sample rate", body = HashMap<String, f64>),
731        (status = 500, description = "Internal server error")
732    )
733)]
734pub async fn get_area_fcl_sample_rate(
735    State(state): State<ApiState>,
736    Path(area_id): Path<u32>,
737) -> ApiResult<Json<HashMap<String, f64>>> {
738    let runtime_service = state.runtime_service.as_ref();
739
740    let sample_rate = runtime_service
741        .get_area_fcl_sample_rate(area_id)
742        .await
743        .map_err(|e| ApiError::internal(format!("Failed to get sample rate: {}", e)))?;
744
745    let mut response = HashMap::new();
746    response.insert("sample_rate".to_string(), sample_rate);
747
748    Ok(Json(response))
749}
750
751/// Set FCL sample rate for a specific cortical area.
752#[utoipa::path(
753    post,
754    path = "/v1/burst_engine/fcl_sampler/area/{area_id}/sample_rate",
755    tag = "burst_engine",
756    params(
757        ("area_id" = u32, Path, description = "Cortical area ID (cortical_idx)")
758    ),
759    responses(
760        (status = 200, description = "Sample rate updated", body = HashMap<String, f64>),
761        (status = 400, description = "Invalid input"),
762        (status = 500, description = "Internal server error")
763    )
764)]
765pub async fn post_area_fcl_sample_rate(
766    State(state): State<ApiState>,
767    Path(area_id): Path<u32>,
768    Json(request): Json<HashMap<String, f64>>,
769) -> ApiResult<Json<HashMap<String, f64>>> {
770    let runtime_service = state.runtime_service.as_ref();
771
772    let sample_rate = request
773        .get("sample_rate")
774        .copied()
775        .ok_or_else(|| ApiError::invalid_input("sample_rate required"))?;
776
777    if sample_rate <= 0.0 {
778        return Err(ApiError::invalid_input("Sample rate must be positive"));
779    }
780
781    runtime_service
782        .set_area_fcl_sample_rate(area_id, sample_rate)
783        .await
784        .map_err(|e| ApiError::internal(format!("Failed to set sample rate: {}", e)))?;
785
786    let mut response = HashMap::new();
787    response.insert("sample_rate".to_string(), sample_rate);
788
789    Ok(Json(response))
790}
791
792// ============================================================================
793// BURST ENGINE RUNTIME CONTROL ENDPOINTS
794// ============================================================================
795
796/// Get the total number of bursts executed since start.
797#[utoipa::path(
798    get,
799    path = "/v1/burst_engine/burst_counter",
800    tag = "burst_engine",
801    responses(
802        (status = 200, description = "Burst counter", body = u64),
803        (status = 500, description = "Internal server error")
804    )
805)]
806pub async fn get_burst_counter(State(state): State<ApiState>) -> ApiResult<Json<u64>> {
807    let runtime_service = state.runtime_service.as_ref();
808
809    let burst_count = runtime_service
810        .get_burst_count()
811        .await
812        .map_err(|e| ApiError::internal(format!("Failed to get burst counter: {}", e)))?;
813
814    Ok(Json(burst_count))
815}
816
817/// Start the burst engine to begin neural processing.
818#[utoipa::path(
819    post,
820    path = "/v1/burst_engine/start",
821    tag = "burst_engine",
822    responses(
823        (status = 200, description = "Burst engine started", body = HashMap<String, String>),
824        (status = 400, description = "Invalid state"),
825        (status = 500, description = "Internal server error")
826    )
827)]
828pub async fn post_start(State(state): State<ApiState>) -> ApiResult<Json<HashMap<String, String>>> {
829    let runtime_service = state.runtime_service.as_ref();
830
831    runtime_service
832        .start()
833        .await
834        .map_err(|e| ApiError::internal(format!("Failed to start burst engine: {}", e)))?;
835
836    Ok(Json(HashMap::from([(
837        "message".to_string(),
838        "Burst engine started successfully".to_string(),
839    )])))
840}
841
842/// Stop the burst engine and halt neural processing.
843#[utoipa::path(
844    post,
845    path = "/v1/burst_engine/stop",
846    tag = "burst_engine",
847    responses(
848        (status = 200, description = "Burst engine stopped", body = HashMap<String, String>),
849        (status = 500, description = "Internal server error")
850    )
851)]
852pub async fn post_stop(State(state): State<ApiState>) -> ApiResult<Json<HashMap<String, String>>> {
853    let runtime_service = state.runtime_service.as_ref();
854
855    runtime_service
856        .stop()
857        .await
858        .map_err(|e| ApiError::internal(format!("Failed to stop burst engine: {}", e)))?;
859
860    Ok(Json(HashMap::from([(
861        "message".to_string(),
862        "Burst engine stopped successfully".to_string(),
863    )])))
864}
865
866/// Pause the burst engine temporarily (alias for pause).
867#[utoipa::path(
868    post,
869    path = "/v1/burst_engine/hold",
870    tag = "burst_engine",
871    responses(
872        (status = 200, description = "Burst engine paused", body = HashMap<String, String>),
873        (status = 400, description = "Invalid state"),
874        (status = 500, description = "Internal server error")
875    )
876)]
877pub async fn post_hold(State(state): State<ApiState>) -> ApiResult<Json<HashMap<String, String>>> {
878    let runtime_service = state.runtime_service.as_ref();
879
880    runtime_service
881        .pause()
882        .await
883        .map_err(|e| ApiError::internal(format!("Failed to pause burst engine: {}", e)))?;
884
885    Ok(Json(HashMap::from([(
886        "message".to_string(),
887        "Burst engine paused successfully".to_string(),
888    )])))
889}
890
891/// Resume the burst engine after pause to continue neural processing.
892#[utoipa::path(
893    post,
894    path = "/v1/burst_engine/resume",
895    tag = "burst_engine",
896    responses(
897        (status = 200, description = "Burst engine resumed", body = HashMap<String, String>),
898        (status = 400, description = "Invalid state"),
899        (status = 500, description = "Internal server error")
900    )
901)]
902pub async fn post_resume(
903    State(state): State<ApiState>,
904) -> ApiResult<Json<HashMap<String, String>>> {
905    let runtime_service = state.runtime_service.as_ref();
906
907    runtime_service
908        .resume()
909        .await
910        .map_err(|e| ApiError::internal(format!("Failed to resume burst engine: {}", e)))?;
911
912    Ok(Json(HashMap::from([(
913        "message".to_string(),
914        "Burst engine resumed successfully".to_string(),
915    )])))
916}
917
918/// Get burst engine configuration including frequency and timing settings.
919#[utoipa::path(
920    get,
921    path = "/v1/burst_engine/config",
922    tag = "burst_engine",
923    responses(
924        (status = 200, description = "Burst engine configuration", body = HashMap<String, serde_json::Value>),
925        (status = 500, description = "Internal server error")
926    )
927)]
928pub async fn get_config(
929    State(state): State<ApiState>,
930) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
931    let runtime_service = state.runtime_service.as_ref();
932
933    let status = runtime_service
934        .get_status()
935        .await
936        .map_err(|e| ApiError::internal(format!("Failed to get config: {}", e)))?;
937
938    let mut response = HashMap::new();
939    response.insert(
940        "burst_frequency_hz".to_string(),
941        serde_json::json!(status.frequency_hz),
942    );
943    response.insert(
944        "burst_interval_seconds".to_string(),
945        serde_json::json!(1.0 / status.frequency_hz),
946    );
947    response.insert(
948        "target_frequency_hz".to_string(),
949        serde_json::json!(status.frequency_hz),
950    );
951    response.insert(
952        "is_running".to_string(),
953        serde_json::json!(status.is_running),
954    );
955    response.insert("is_paused".to_string(), serde_json::json!(status.is_paused));
956
957    Ok(Json(response))
958}
959
960/// Update burst engine configuration including frequency and timing parameters.
961#[utoipa::path(
962    put,
963    path = "/v1/burst_engine/config",
964    tag = "burst_engine",
965    responses(
966        (status = 200, description = "Configuration updated", body = HashMap<String, serde_json::Value>),
967        (status = 400, description = "Invalid input"),
968        (status = 500, description = "Internal server error")
969    )
970)]
971pub async fn put_config(
972    State(state): State<ApiState>,
973    Json(request): Json<HashMap<String, serde_json::Value>>,
974) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
975    let runtime_service = state.runtime_service.as_ref();
976
977    // Extract burst_frequency_hz from request
978    if let Some(freq) = request.get("burst_frequency_hz").and_then(|v| v.as_f64()) {
979        if freq <= 0.0 {
980            return Err(ApiError::invalid_input("Frequency must be positive"));
981        }
982
983        runtime_service
984            .set_frequency(freq)
985            .await
986            .map_err(|e| ApiError::internal(format!("Failed to set frequency: {}", e)))?;
987    }
988
989    // Return updated config
990    get_config(State(state)).await
991}
992
993// ============================================================================
994// FIRE LEDGER ENDPOINTS
995// ============================================================================
996
997/// Get fire ledger window size for a specific cortical area.
998#[utoipa::path(
999    get,
1000    path = "/v1/burst_engine/fire_ledger/area/{area_id}/window_size",
1001    tag = "burst_engine",
1002    params(
1003        ("area_id" = u32, Path, description = "Cortical area ID (cortical_idx)")
1004    ),
1005    responses(
1006        (status = 200, description = "Window size", body = i32),
1007        (status = 500, description = "Internal server error")
1008    )
1009)]
1010pub async fn get_fire_ledger_area_window_size(
1011    State(state): State<ApiState>,
1012    Path(area_id): Path<u32>,
1013) -> ApiResult<Json<i32>> {
1014    let runtime_service = state.runtime_service.as_ref();
1015
1016    let configs = runtime_service
1017        .get_fire_ledger_configs()
1018        .await
1019        .map_err(|e| ApiError::internal(format!("Failed to get fire ledger configs: {}", e)))?;
1020
1021    // Find the window size for this area
1022    for (idx, window_size) in configs {
1023        if idx == area_id {
1024            return Ok(Json(window_size as i32));
1025        }
1026    }
1027
1028    Err(ApiError::not_found(
1029        "FireLedgerArea",
1030        &format!("cortical_idx={}", area_id),
1031    ))
1032}
1033
1034/// Set fire ledger window size for a specific cortical area.
1035#[utoipa::path(
1036    put,
1037    path = "/v1/burst_engine/fire_ledger/area/{area_id}/window_size",
1038    tag = "burst_engine",
1039    params(
1040        ("area_id" = u32, Path, description = "Cortical area ID (cortical_idx)")
1041    ),
1042    responses(
1043        (status = 200, description = "Window size updated", body = HashMap<String, serde_json::Value>),
1044        (status = 400, description = "Invalid input"),
1045        (status = 500, description = "Internal server error")
1046    )
1047)]
1048pub async fn put_fire_ledger_area_window_size(
1049    State(state): State<ApiState>,
1050    Path(area_id): Path<u32>,
1051    Json(request): Json<HashMap<String, i32>>,
1052) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
1053    let runtime_service = state.runtime_service.as_ref();
1054
1055    let window_size = request
1056        .get("window_size")
1057        .copied()
1058        .ok_or_else(|| ApiError::invalid_input("window_size required"))?;
1059
1060    if window_size <= 0 {
1061        return Err(ApiError::invalid_input("Window size must be positive"));
1062    }
1063
1064    runtime_service
1065        .configure_fire_ledger_window(area_id, window_size as usize)
1066        .await
1067        .map_err(|e| ApiError::internal(format!("Failed to configure window: {}", e)))?;
1068
1069    let mut response = HashMap::new();
1070    response.insert("success".to_string(), serde_json::json!(true));
1071    response.insert("area_id".to_string(), serde_json::json!(area_id));
1072    response.insert("window_size".to_string(), serde_json::json!(window_size));
1073
1074    Ok(Json(response))
1075}
1076
1077/// Get fire ledger historical data for a specific cortical area.
1078#[utoipa::path(
1079    get,
1080    path = "/v1/burst_engine/fire_ledger/area/{area_id}/history",
1081    tag = "burst_engine",
1082    params(
1083        ("area_id" = String, Path, description = "Cortical area ID or index"),
1084        ("lookback_steps" = Option<i32>, Query, description = "Number of timesteps to retrieve")
1085    ),
1086    responses(
1087        (status = 200, description = "Fire ledger history", body = HashMap<String, serde_json::Value>),
1088        (status = 400, description = "Invalid area ID"),
1089        (status = 500, description = "Internal server error")
1090    )
1091)]
1092pub async fn get_fire_ledger_history(
1093    State(_state): State<ApiState>,
1094    Path(area_id): Path<String>,
1095    Query(params): Query<HashMap<String, String>>,
1096) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
1097    // Parse area_id as cortical_idx
1098    let cortical_idx = area_id
1099        .parse::<u32>()
1100        .map_err(|_| ApiError::invalid_input(format!("Invalid area_id: {}", area_id)))?;
1101
1102    let _lookback_steps = params
1103        .get("lookback_steps")
1104        .and_then(|s| s.parse::<i32>().ok());
1105
1106    Err(ApiError::internal(format!(
1107        "Fire ledger history retrieval is not yet implemented (requested cortical_idx={})",
1108        cortical_idx
1109    )))
1110}
1111
1112// ============================================================================
1113// MEMBRANE POTENTIALS ENDPOINTS
1114// ============================================================================
1115
1116/// Get membrane potentials for specific neurons.
1117#[utoipa::path(
1118    get,
1119    path = "/v1/burst_engine/membrane_potentials",
1120    tag = "burst_engine",
1121    params(
1122        ("neuron_ids" = Vec<u64>, Query, description = "List of neuron IDs")
1123    ),
1124    responses(
1125        (status = 200, description = "Membrane potentials", body = HashMap<String, f32>),
1126        (status = 500, description = "Internal server error")
1127    )
1128)]
1129pub async fn get_membrane_potentials(
1130    State(_state): State<ApiState>,
1131    Query(params): Query<HashMap<String, String>>,
1132) -> ApiResult<Json<HashMap<String, f32>>> {
1133    // Parse neuron_ids from query params
1134    let neuron_ids_str = params
1135        .get("neuron_ids")
1136        .ok_or_else(|| ApiError::invalid_input("neuron_ids parameter required"))?;
1137
1138    // TODO: Parse comma-separated neuron IDs and fetch from NPU
1139    // For now, return empty
1140    tracing::debug!(target: "feagi-api", "GET membrane_potentials for neuron_ids: {}", neuron_ids_str);
1141
1142    Ok(Json(HashMap::new()))
1143}
1144
1145/// Update membrane potentials for specific neurons.
1146#[utoipa::path(
1147    put,
1148    path = "/v1/burst_engine/membrane_potentials",
1149    tag = "burst_engine",
1150    responses(
1151        (status = 200, description = "Membrane potentials updated", body = HashMap<String, String>),
1152        (status = 400, description = "Invalid input"),
1153        (status = 500, description = "Internal server error")
1154    )
1155)]
1156pub async fn put_membrane_potentials(
1157    State(_state): State<ApiState>,
1158    Json(potentials): Json<HashMap<String, f32>>,
1159) -> ApiResult<Json<HashMap<String, String>>> {
1160    // TODO: Update membrane potentials in NPU
1161    tracing::info!(target: "feagi-api", "PUT membrane_potentials: {} neurons", potentials.len());
1162
1163    Ok(Json(HashMap::from([(
1164        "message".to_string(),
1165        format!("Updated {} neuron membrane potentials", potentials.len()),
1166    )])))
1167}
1168
1169// ============================================================================
1170// FREQUENCY MEASUREMENT ENDPOINTS
1171// ============================================================================
1172
1173/// Get current burst frequency measurement status.
1174#[utoipa::path(
1175    get,
1176    path = "/v1/burst_engine/frequency_status",
1177    tag = "burst_engine",
1178    responses(
1179        (status = 200, description = "Frequency status", body = HashMap<String, serde_json::Value>),
1180        (status = 500, description = "Internal server error")
1181    )
1182)]
1183pub async fn get_frequency_status(
1184    State(state): State<ApiState>,
1185) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
1186    let runtime_service = state.runtime_service.as_ref();
1187
1188    let status = runtime_service
1189        .get_status()
1190        .await
1191        .map_err(|e| ApiError::internal(format!("Failed to get status: {}", e)))?;
1192
1193    let mut response = HashMap::new();
1194    response.insert(
1195        "target_frequency_hz".to_string(),
1196        serde_json::json!(status.frequency_hz),
1197    );
1198    response.insert(
1199        "actual_frequency_hz".to_string(),
1200        serde_json::json!(status.frequency_hz),
1201    );
1202    response.insert(
1203        "burst_count".to_string(),
1204        serde_json::json!(status.burst_count),
1205    );
1206    response.insert("is_measuring".to_string(), serde_json::json!(false));
1207
1208    Ok(Json(response))
1209}
1210
1211/// Trigger burst frequency measurement to analyze current processing rate.
1212#[utoipa::path(
1213    post,
1214    path = "/v1/burst_engine/measure_frequency",
1215    tag = "burst_engine",
1216    responses(
1217        (status = 200, description = "Measurement started", body = HashMap<String, serde_json::Value>),
1218        (status = 500, description = "Internal server error")
1219    )
1220)]
1221pub async fn post_measure_frequency(
1222    State(_state): State<ApiState>,
1223    Json(request): Json<HashMap<String, serde_json::Value>>,
1224) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
1225    let duration = request
1226        .get("duration_seconds")
1227        .and_then(|v| v.as_f64())
1228        .unwrap_or(5.0);
1229    let sample_count = request
1230        .get("sample_count")
1231        .and_then(|v| v.as_i64())
1232        .unwrap_or(100) as i32;
1233
1234    tracing::info!(target: "feagi-api", "Starting frequency measurement: {}s, {} samples", duration, sample_count);
1235
1236    // TODO: Implement frequency measurement
1237    let mut response = HashMap::new();
1238    response.insert("status".to_string(), serde_json::json!("started"));
1239    response.insert("duration_seconds".to_string(), serde_json::json!(duration));
1240    response.insert("sample_count".to_string(), serde_json::json!(sample_count));
1241
1242    Ok(Json(response))
1243}
1244
1245/// Get burst frequency measurement history over time.
1246#[utoipa::path(
1247    get,
1248    path = "/v1/burst_engine/frequency_history",
1249    tag = "burst_engine",
1250    params(
1251        ("limit" = Option<i32>, Query, description = "Number of measurements to return")
1252    ),
1253    responses(
1254        (status = 200, description = "Frequency history", body = HashMap<String, serde_json::Value>),
1255        (status = 500, description = "Internal server error")
1256    )
1257)]
1258pub async fn get_frequency_history(
1259    State(_state): State<ApiState>,
1260    Query(params): Query<HashMap<String, String>>,
1261) -> ApiResult<Json<HashMap<String, serde_json::Value>>> {
1262    let limit = params
1263        .get("limit")
1264        .and_then(|s| s.parse::<i32>().ok())
1265        .unwrap_or(10);
1266
1267    // TODO: Implement frequency history retrieval
1268    let mut response = HashMap::new();
1269    response.insert("measurements".to_string(), serde_json::json!([]));
1270    response.insert("limit".to_string(), serde_json::json!(limit));
1271
1272    Ok(Json(response))
1273}
1274
1275/// Force connectome integration to rebuild neural connections immediately.
1276#[utoipa::path(
1277    post,
1278    path = "/v1/burst_engine/force_connectome_integration",
1279    tag = "burst_engine",
1280    responses(
1281        (status = 200, description = "Integration forced", body = HashMap<String, String>),
1282        (status = 500, description = "Internal server error")
1283    )
1284)]
1285pub async fn post_force_connectome_integration(
1286    State(_state): State<ApiState>,
1287) -> ApiResult<Json<HashMap<String, String>>> {
1288    // TODO: Implement connectome integration forcing
1289    tracing::info!(target: "feagi-api", "Force connectome integration requested");
1290
1291    Ok(Json(HashMap::from([
1292        (
1293            "message".to_string(),
1294            "Connectome integration initiated".to_string(),
1295        ),
1296        ("status".to_string(), "not_yet_implemented".to_string()),
1297    ])))
1298}