use crate::{GpsPoint, RouteGroup, RouteSignature, elapsed_ms, init_logging};
use log::info;
use std::time::Instant;
#[uniffi::export(callback_interface)]
pub trait FetchProgressCallback: Send + Sync {
fn on_progress(&self, completed: u32, total: u32);
}
#[derive(Debug, Clone, uniffi::Record)]
pub struct DownloadProgressResult {
pub completed: u32,
pub total: u32,
pub active: bool,
}
#[derive(Debug, Clone, uniffi::Record)]
pub struct ActivitySportType {
pub activity_id: String,
pub sport_type: String,
}
#[uniffi::export]
pub fn default_scale_presets() -> Vec<crate::ScalePreset> {
crate::ScalePreset::default_presets()
}
#[uniffi::export]
pub fn ffi_detect_sections_multiscale(
activity_ids: Vec<String>,
all_coords: Vec<f64>,
offsets: Vec<u32>,
sport_types: Vec<ActivitySportType>,
groups: Vec<RouteGroup>,
config: crate::SectionConfig,
) -> crate::MultiScaleSectionResult {
init_logging();
let ffi_start = Instant::now();
info!(
"[RUST: detect_sections_multiscale] FFI called with {} activities, {} coords, {} scales",
activity_ids.len(),
all_coords.len() / 2,
config.scale_presets.len()
);
let convert_start = Instant::now();
let mut tracks: Vec<(String, Vec<GpsPoint>)> = Vec::with_capacity(activity_ids.len());
for (i, activity_id) in activity_ids.iter().enumerate() {
let start_offset = offsets[i] as usize;
let end_offset = offsets
.get(i + 1)
.map(|&o| o as usize)
.unwrap_or(all_coords.len() / 2);
let mut points = Vec::with_capacity(end_offset - start_offset);
for j in start_offset..end_offset {
let coord_idx = j * 2;
if coord_idx + 1 < all_coords.len() {
points.push(GpsPoint::new(
all_coords[coord_idx],
all_coords[coord_idx + 1],
));
}
}
if !points.is_empty() {
tracks.push((activity_id.clone(), points));
}
}
info!(
"[RUST: detect_sections_multiscale] Converted {} tracks ({} ms)",
tracks.len(),
elapsed_ms(convert_start)
);
let sport_map_start = Instant::now();
let sport_map: std::collections::HashMap<String, String> = sport_types
.into_iter()
.map(|st| (st.activity_id, st.sport_type))
.collect();
info!(
"[RUST: detect_sections_multiscale] Built sport map ({} ms)",
elapsed_ms(sport_map_start)
);
let detect_start = Instant::now();
let result = crate::sections::detect_sections_multiscale(&tracks, &sport_map, &groups, &config);
info!(
"[RUST: detect_sections_multiscale] Detection complete: {} raw sections, {} raw potentials ({} ms)",
result.sections.len(),
result.potentials.len(),
elapsed_ms(detect_start)
);
let filter_start = Instant::now();
let filtered_sections: Vec<_> = result
.sections
.into_iter()
.filter(|s| s.polyline.len() >= 2)
.collect();
let filtered_potentials: Vec<_> = result
.potentials
.into_iter()
.filter(|p| p.polyline.len() >= 2)
.collect();
info!(
"[RUST: detect_sections_multiscale] Filtered to {} sections, {} potentials ({} ms)",
filtered_sections.len(),
filtered_potentials.len(),
elapsed_ms(filter_start)
);
info!(
"[RUST: detect_sections_multiscale] Complete ({} ms)",
elapsed_ms(ffi_start)
);
crate::MultiScaleSectionResult {
sections: filtered_sections,
potentials: filtered_potentials,
stats: result.stats,
}
}
#[uniffi::export]
pub fn ffi_generate_heatmap(
signatures: Vec<RouteSignature>,
activity_data: Vec<crate::ActivityHeatmapData>,
config: crate::HeatmapConfig,
) -> crate::HeatmapResult {
init_logging();
let ffi_start = Instant::now();
info!(
"[RUST: generate_heatmap] FFI called with {} signatures, {}m cells",
signatures.len(),
config.cell_size_meters
);
let map_start = Instant::now();
let data_map: std::collections::HashMap<String, crate::ActivityHeatmapData> = activity_data
.into_iter()
.map(|d| (d.activity_id.clone(), d))
.collect();
info!(
"[RUST: generate_heatmap] Built data map with {} entries ({} ms)",
data_map.len(),
elapsed_ms(map_start)
);
let gen_start = Instant::now();
let result = crate::generate_heatmap(&signatures, &data_map, &config);
info!(
"[RUST: generate_heatmap] Generated {} cells, {} routes, {} activities ({} ms)",
result.cells.len(),
result.total_routes,
result.total_activities,
elapsed_ms(gen_start)
);
info!(
"[RUST: generate_heatmap] Complete ({} ms)",
elapsed_ms(ffi_start)
);
result
}
#[cfg(feature = "http")]
#[derive(Debug, Clone, uniffi::Record)]
pub struct FfiActivityMapResult {
pub activity_id: String,
pub bounds: Vec<f64>,
pub latlngs: Vec<f64>,
pub success: bool,
pub error: Option<String>,
}
#[cfg(feature = "http")]
#[uniffi::export]
pub fn fetch_activity_maps(
auth_header: String,
activity_ids: Vec<String>,
) -> Vec<FfiActivityMapResult> {
init_logging();
let ffi_start = Instant::now();
let count = activity_ids.len();
info!(
"[RUST: fetch_activity_maps] FFI called with {} activities",
count
);
let fetch_start = Instant::now();
let results = crate::http::fetch_activity_maps_sync(auth_header, activity_ids, None);
let success_count = results.iter().filter(|r| r.success).count();
info!(
"[RUST: fetch_activity_maps] Fetched {}/{} successfully ({} ms)",
success_count,
count,
elapsed_ms(fetch_start)
);
let convert_start = Instant::now();
let ffi_results: Vec<FfiActivityMapResult> = results
.into_iter()
.map(|r| FfiActivityMapResult {
activity_id: r.activity_id,
bounds: r
.bounds
.map_or(vec![], |b| vec![b.ne[0], b.ne[1], b.sw[0], b.sw[1]]),
latlngs: r.latlngs.map_or(vec![], |coords| {
coords.into_iter().flat_map(|p| vec![p[0], p[1]]).collect()
}),
success: r.success,
error: r.error,
})
.collect();
info!(
"[RUST: fetch_activity_maps] Converted to FFI format ({} ms)",
elapsed_ms(convert_start)
);
info!(
"[RUST: fetch_activity_maps] Complete ({} ms)",
elapsed_ms(ffi_start)
);
ffi_results
}
#[cfg(feature = "http")]
#[uniffi::export]
pub fn fetch_activity_maps_with_progress(
auth_header: String,
activity_ids: Vec<String>,
callback: Box<dyn FetchProgressCallback>,
) -> Vec<FfiActivityMapResult> {
use std::sync::Arc;
init_logging();
let ffi_start = Instant::now();
let count = activity_ids.len();
info!(
"[RUST: fetch_activity_maps_with_progress] FFI called with {} activities",
count
);
let callback = Arc::new(callback);
let progress_callback: crate::http::ProgressCallback = Arc::new(move |completed, total| {
callback.on_progress(completed, total);
});
let fetch_start = Instant::now();
let results =
crate::http::fetch_activity_maps_sync(auth_header, activity_ids, Some(progress_callback));
let success_count = results.iter().filter(|r| r.success).count();
info!(
"[RUST: fetch_activity_maps_with_progress] Fetched {}/{} successfully ({} ms)",
success_count,
count,
elapsed_ms(fetch_start)
);
let convert_start = Instant::now();
let ffi_results: Vec<FfiActivityMapResult> = results
.into_iter()
.map(|r| FfiActivityMapResult {
activity_id: r.activity_id,
bounds: r
.bounds
.map_or(vec![], |b| vec![b.ne[0], b.ne[1], b.sw[0], b.sw[1]]),
latlngs: r.latlngs.map_or(vec![], |coords| {
coords.into_iter().flat_map(|p| vec![p[0], p[1]]).collect()
}),
success: r.success,
error: r.error,
})
.collect();
info!(
"[RUST: fetch_activity_maps_with_progress] Converted to FFI format ({} ms)",
elapsed_ms(convert_start)
);
info!(
"[RUST: fetch_activity_maps_with_progress] Complete ({} ms)",
elapsed_ms(ffi_start)
);
ffi_results
}
#[cfg(feature = "http")]
#[uniffi::export]
pub fn get_download_progress() -> DownloadProgressResult {
let (completed, total, active) = crate::http::get_download_progress();
DownloadProgressResult {
completed,
total,
active,
}
}
#[cfg(feature = "http")]
#[uniffi::export]
pub fn start_background_fetch(auth_header: String, activity_ids: Vec<String>) {
init_logging();
let ffi_start = Instant::now();
info!(
"[RUST: start_background_fetch] FFI called with {} activities",
activity_ids.len()
);
crate::http::start_background_fetch(auth_header, activity_ids);
info!(
"[RUST: start_background_fetch] Thread spawned, returning to caller ({} ms)",
elapsed_ms(ffi_start)
);
}
#[cfg(feature = "http")]
#[uniffi::export]
pub fn take_background_fetch_results() -> Option<Vec<FfiActivityMapResult>> {
init_logging();
let ffi_start = Instant::now();
let result = crate::http::take_background_fetch_results().map(|results| {
let count = results.len();
let success_count = results.iter().filter(|r| r.success).count();
info!(
"[RUST: take_background_fetch_results] Converting {} results ({} successful)",
count, success_count
);
let convert_start = Instant::now();
let converted: Vec<FfiActivityMapResult> = results
.into_iter()
.map(|r| FfiActivityMapResult {
activity_id: r.activity_id,
bounds: r
.bounds
.map_or(vec![], |b| vec![b.ne[0], b.ne[1], b.sw[0], b.sw[1]]),
latlngs: r.latlngs.map_or(vec![], |coords| {
coords.into_iter().flat_map(|p| vec![p[0], p[1]]).collect()
}),
success: r.success,
error: r.error,
})
.collect();
info!(
"[RUST: take_background_fetch_results] Converted to FFI format ({} ms)",
elapsed_ms(convert_start)
);
converted
});
if result.is_some() {
info!(
"[RUST: take_background_fetch_results] Complete with results ({} ms)",
elapsed_ms(ffi_start)
);
}
result
}
#[cfg(all(feature = "http", feature = "persistence"))]
#[derive(Debug, Clone, uniffi::Record)]
pub struct FetchAndStoreResult {
pub synced_ids: Vec<String>,
pub failed_ids: Vec<String>,
pub total: u32,
pub success_count: u32,
pub total_points: u32,
pub fetch_time_ms: u32,
pub storage_time_ms: u32,
pub total_time_ms: u32,
}
#[cfg(all(feature = "http", feature = "persistence"))]
#[derive(Debug, Clone, uniffi::Record)]
pub struct ActivitySportMapping {
pub activity_id: String,
pub sport_type: String,
}
#[cfg(all(feature = "http", feature = "persistence"))]
#[uniffi::export]
pub fn start_fetch_and_store(
auth_header: String,
activity_ids: Vec<String>,
sport_types: Vec<ActivitySportMapping>,
) {
use crate::elapsed_ms;
use std::collections::HashMap;
init_logging();
let ffi_start = Instant::now();
let activity_count = activity_ids.len();
info!(
"[RUST: start_fetch_and_store] FFI called with {} activities",
activity_count
);
eprintln!(
"[RUST: start_fetch_and_store] FFI called with {} activities",
activity_count
);
let sport_map_start = Instant::now();
let sport_map: HashMap<String, String> = sport_types
.into_iter()
.map(|m| (m.activity_id, m.sport_type))
.collect();
info!(
"[RUST: start_fetch_and_store] Built sport map with {} entries ({} ms)",
sport_map.len(),
elapsed_ms(sport_map_start)
);
if let Ok(mut results) = FETCH_AND_STORE_RESULT.lock() {
*results = None;
}
crate::http::reset_download_progress(activity_ids.len() as u32);
info!(
"[RUST: start_fetch_and_store] Spawning background thread ({} ms)",
elapsed_ms(ffi_start)
);
let activity_ids_clone = activity_ids.clone();
std::thread::spawn(move || {
let thread_start = Instant::now();
info!(
"[RUST: start_fetch_and_store] Thread started for {} activities",
activity_ids.len()
);
eprintln!(
"[RUST: start_fetch_and_store] Thread started for {} activities",
activity_ids.len()
);
let runtime_start = Instant::now();
let rt = match tokio::runtime::Builder::new_multi_thread()
.worker_threads(4)
.enable_all()
.build()
{
Ok(rt) => {
info!(
"[RUST: start_fetch_and_store] Created tokio runtime ({} ms)",
elapsed_ms(runtime_start)
);
rt
}
Err(e) => {
info!(
"[RUST: start_fetch_and_store] Failed to create runtime: {} ({} ms)",
e,
elapsed_ms(runtime_start)
);
crate::http::finish_download_progress();
store_fetch_and_store_result(FetchAndStoreResult {
synced_ids: vec![],
failed_ids: activity_ids,
total: 0,
success_count: 0,
total_points: 0,
fetch_time_ms: 0,
storage_time_ms: 0,
total_time_ms: elapsed_ms(thread_start) as u32,
});
return;
}
};
let client_start = Instant::now();
let fetcher = match crate::http::ActivityFetcher::with_auth_header(auth_header) {
Ok(f) => {
info!(
"[RUST: start_fetch_and_store] Created HTTP client ({} ms)",
elapsed_ms(client_start)
);
f
}
Err(e) => {
info!(
"[RUST: start_fetch_and_store] Failed to create HTTP client: {} ({} ms)",
e,
elapsed_ms(client_start)
);
crate::http::finish_download_progress();
store_fetch_and_store_result(FetchAndStoreResult {
synced_ids: vec![],
failed_ids: activity_ids,
total: 0,
success_count: 0,
total_points: 0,
fetch_time_ms: 0,
storage_time_ms: 0,
total_time_ms: elapsed_ms(thread_start) as u32,
});
return;
}
};
let fetch_start = Instant::now();
let fetch_results =
rt.block_on(fetcher.fetch_activity_maps(activity_ids_clone.clone(), None));
let fetch_success_count = fetch_results.iter().filter(|r| r.success).count();
info!(
"[RUST: start_fetch_and_store] Fetch complete: {}/{} successful ({} ms)",
fetch_success_count,
fetch_results.len(),
elapsed_ms(fetch_start)
);
let storage_start = Instant::now();
let mut synced_ids = Vec::new();
let mut failed_ids = Vec::new();
let mut total_points: usize = 0;
let num_results = fetch_results.len();
info!(
"[RUST: PERF] Storage: processing {} activities SEQUENTIALLY (SQLite limitation)",
num_results
);
for (idx, result) in fetch_results.into_iter().enumerate() {
let activity_start = Instant::now();
if result.success {
if let Some(latlngs) = result.latlngs {
if latlngs.len() >= 2 {
let coords: Vec<GpsPoint> = latlngs
.iter()
.filter_map(|p| {
let lat = p[0];
let lng = p[1];
if lat.is_finite()
&& lng.is_finite()
&& (-90.0..=90.0).contains(&lat)
&& (-180.0..=180.0).contains(&lng)
{
Some(GpsPoint::new(lat, lng))
} else {
None
}
})
.collect();
if coords.len() >= 2 {
total_points += coords.len();
let sport = sport_map
.get(&result.activity_id)
.cloned()
.unwrap_or_else(|| "Ride".to_string());
let point_count = coords.len();
let stored = crate::persistence::with_persistent_engine(|engine| {
engine
.add_activity(result.activity_id.clone(), coords, sport)
.is_ok()
})
.unwrap_or(false);
let activity_time = elapsed_ms(activity_start);
if stored {
if idx == 0 || idx == num_results - 1 || activity_time > 10 {
info!(
"[RUST: PERF] Storage[{}/{}]: {} ({} points) in {} ms",
idx + 1,
num_results,
result.activity_id,
point_count,
activity_time
);
}
synced_ids.push(result.activity_id);
} else {
failed_ids.push(result.activity_id);
}
} else {
failed_ids.push(result.activity_id);
}
} else {
failed_ids.push(result.activity_id);
}
} else {
failed_ids.push(result.activity_id);
}
} else {
failed_ids.push(result.activity_id);
}
}
let storage_time = elapsed_ms(storage_start);
let avg_per_activity = if !synced_ids.is_empty() {
storage_time / synced_ids.len() as u64
} else {
0
};
info!(
"[RUST: PERF] Storage complete: {} activities, {} points in {} ms (avg {} ms/activity)",
synced_ids.len(),
total_points,
storage_time,
avg_per_activity
);
let success_count = synced_ids.len() as u32;
let total = (synced_ids.len() + failed_ids.len()) as u32;
info!(
"[RUST: start_fetch_and_store] Storage complete: {} synced, {} failed, {} total points ({} ms)",
success_count,
failed_ids.len(),
total_points,
elapsed_ms(storage_start)
);
eprintln!(
"[RUST: start_fetch_and_store] Storage complete: {} synced, {} failed, {} total points ({} ms)",
success_count,
failed_ids.len(),
total_points,
elapsed_ms(storage_start)
);
let fetch_time = elapsed_ms(fetch_start) as u32;
let storage_time = elapsed_ms(storage_start) as u32;
let total_time = elapsed_ms(thread_start) as u32;
store_fetch_and_store_result(FetchAndStoreResult {
synced_ids,
failed_ids,
total,
success_count,
total_points: total_points as u32,
fetch_time_ms: fetch_time,
storage_time_ms: storage_time,
total_time_ms: total_time,
});
crate::http::finish_download_progress();
info!(
"[RUST: start_fetch_and_store] Thread complete ({} ms)",
total_time
);
eprintln!(
"[RUST: start_fetch_and_store] Thread complete ({} ms)",
total_time
);
});
}
#[cfg(all(feature = "http", feature = "persistence"))]
static FETCH_AND_STORE_RESULT: std::sync::Mutex<Option<FetchAndStoreResult>> =
std::sync::Mutex::new(None);
#[cfg(all(feature = "http", feature = "persistence"))]
fn store_fetch_and_store_result(result: FetchAndStoreResult) {
if let Ok(mut guard) = FETCH_AND_STORE_RESULT.lock() {
*guard = Some(result);
}
}
#[cfg(all(feature = "http", feature = "persistence"))]
#[uniffi::export]
pub fn take_fetch_and_store_result() -> Option<FetchAndStoreResult> {
init_logging();
let result = if let Ok(mut guard) = FETCH_AND_STORE_RESULT.lock() {
guard.take()
} else {
None
};
if let Some(ref r) = result {
info!(
"[RUST: take_fetch_and_store_result] Returning result: {} synced, {} failed",
r.success_count,
r.failed_ids.len()
);
}
result
}