use bevy_app::prelude::*;
use bevy_ecs::prelude::*;
use bevy_math::FloatExt;
use bevy_time::{Time, TimeSystems};
use core::sync::atomic::AtomicU64;
use firewheel::{
Volume,
clock::{DurationSeconds, InstantSeconds},
diff::{Diff, EventQueue, ParamPath, Patch, PatchError, PathBuilder},
event::{NodeEventType, ParamData},
nodes::volume::VolumeNode,
};
use crate::{error::SeedlingError, time::Audio};
pub(crate) struct EventsPlugin;
impl Plugin for EventsPlugin {
fn build(&self, app: &mut App) {
app.add_systems(First, update_events_instant.after(TimeSystems));
}
}
#[derive(Component)]
pub struct AudioEvents {
pub(super) queue: Vec<NodeEventType>,
pub(super) timeline: Vec<EventTimeline>,
now: InstantSeconds,
}
impl AudioEvents {
pub fn new(now: &Time<Audio>) -> Self {
Self {
queue: Default::default(),
timeline: Default::default(),
now: now.context().instant(),
}
}
pub(crate) fn now(&self) -> InstantSeconds {
self.now
}
pub fn merge_timelines(&mut self, other: &Self) {
for event in &other.timeline {
if !self.timeline.iter().any(|ev| ev.id() == event.id()) {
self.timeline.push(event.clone());
}
}
}
pub(crate) fn merge_timelines_and_clear(&mut self, other: &mut Self, now: InstantSeconds) {
other.timeline.retain(|event| {
if !self.timeline.iter().any(|ev| ev.id() == event.id()) {
self.timeline.push(event.clone());
}
!event.completely_elapsed(now)
});
}
pub(super) fn clear_elapsed_events(&mut self, now: InstantSeconds) {
self.timeline
.retain(|event| !event.completely_elapsed(now) || !event.render_progress.complete);
}
#[expect(unused)]
fn timeline(&self) -> &[EventTimeline] {
&self.timeline
}
pub fn schedule<T, F>(&mut self, time: InstantSeconds, value: &T, change: F)
where
T: Diff + Patch + Send + Sync + Clone + 'static,
F: FnOnce(&mut T),
{
let initial_value = self.get_value_at(time, value);
let mut new_value = initial_value.clone();
change(&mut new_value);
let mut events = Vec::new();
let mut func = |ev, time| match ev {
NodeEventType::Param { data, path } => {
events.push(TimelineParam { data, path, time });
}
_ => {
bevy_log::warn!("failed to schedule audio event: invalid event type");
}
};
let mut queue = TimelineQueue::new(time, &mut func);
new_value.diff(&initial_value, Default::default(), &mut queue);
self.timeline.push(EventTimeline::new(events));
}
pub fn schedule_tween<T, F>(
&mut self,
start: InstantSeconds,
end: InstantSeconds,
start_value: T,
end_value: T,
total_events: usize,
interpolate: F,
) where
T: Diff + Patch + Send + Sync + Clone + 'static,
F: Fn(&T, &T, f32) -> T,
{
if total_events == 0 {
return;
}
let mut events = Vec::new();
let mut func = |ev, time| match ev {
NodeEventType::Param { data, path } => {
events.push(TimelineParam { data, path, time });
}
_ => {
bevy_log::warn!("failed to schedule audio event: invalid event type");
}
};
let mut queue = TimelineQueue::new(start, &mut func);
let duration = (end - start).0;
for i in 1..=total_events {
let proportion = i as f64 / total_events as f64;
let instant = start.0 + proportion * duration;
queue.instant = InstantSeconds(instant);
let new_value = (interpolate)(&start_value, &end_value, proportion as f32);
new_value.diff(&start_value, PathBuilder::default(), &mut queue);
}
self.timeline.push(EventTimeline::new(events));
}
pub(crate) fn active_within(&self, start: InstantSeconds, end: InstantSeconds) -> bool {
for event in &self.timeline {
if event.active_within(start..=end) {
return true;
}
}
false
}
pub fn value_at<T>(
&self,
start: InstantSeconds,
end: InstantSeconds,
value: &mut T,
) -> Result<(), SeedlingError>
where
T: Diff + Patch + Clone,
{
for event in &self.timeline {
event
.apply(start..=end, value)
.map_err(|e| SeedlingError::PatchError {
ty: core::any::type_name::<T>(),
error: e,
})?;
}
Ok(())
}
pub fn get_value_at<T>(&self, instant: InstantSeconds, value: &T) -> T
where
T: Diff + Patch + Clone,
{
let mut new_value = value.clone();
let _ = self.value_at(InstantSeconds(0.0), instant, &mut new_value);
new_value
}
}
impl EventQueue for AudioEvents {
fn push(&mut self, data: firewheel::event::NodeEventType) {
self.queue.push(data);
}
}
impl core::fmt::Debug for AudioEvents {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("AudioEvents")
.field("queue", &())
.field("timeline", &self.timeline)
.field("now", &self.now)
.finish()
}
}
struct TimelineQueue<'a> {
queue: &'a mut dyn FnMut(NodeEventType, InstantSeconds),
pub instant: InstantSeconds,
}
impl core::fmt::Debug for TimelineQueue<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("TimelineQueue")
.field("instant", &self.instant)
.finish_non_exhaustive()
}
}
impl<'a> TimelineQueue<'a> {
pub fn new(
initial_instant: InstantSeconds,
f: &'a mut impl FnMut(NodeEventType, InstantSeconds),
) -> Self {
Self {
queue: f,
instant: initial_instant,
}
}
}
impl EventQueue for TimelineQueue<'_> {
fn push(&mut self, data: NodeEventType) {
(self.queue)(data, self.instant);
}
}
static TIMELINE_ID: AtomicU64 = AtomicU64::new(0);
#[derive(Clone, Debug)]
pub(super) struct EventTimeline {
tween: Vec<TimelineParam>,
pub render_progress: RenderProgress,
id: u64,
}
#[derive(Clone, Debug)]
struct TimelineParam {
pub data: ParamData,
pub path: ParamPath,
pub time: InstantSeconds,
}
#[derive(Clone, Debug)]
pub struct RenderProgress {
pub range: core::ops::Range<InstantSeconds>,
pub complete: bool,
}
impl RenderProgress {
pub fn new(range: core::ops::Range<InstantSeconds>) -> Self {
Self {
range,
complete: false,
}
}
}
fn time_range(events: &[TimelineParam]) -> core::ops::Range<InstantSeconds> {
match events {
[] => panic!("an event timeline should never be empty"),
&[TimelineParam { time, .. }] => time..time,
&[
TimelineParam { time: start, .. },
..,
TimelineParam { time: end, .. },
] => start..end,
}
}
impl EventTimeline {
fn new(tween: Vec<TimelineParam>) -> Self {
let render_progress = RenderProgress::new(time_range(&tween));
EventTimeline {
tween,
render_progress,
id: TIMELINE_ID.fetch_add(1, core::sync::atomic::Ordering::Relaxed),
}
}
pub fn completely_elapsed(&self, now: InstantSeconds) -> bool {
self.time_range().end < now
}
pub fn id(&self) -> u64 {
self.id
}
fn render_range(
&self,
full_range: core::ops::Range<InstantSeconds>,
) -> Option<core::ops::Range<InstantSeconds>> {
if self.render_progress.complete {
return None;
}
let range = self.time_range();
let new_start = self.render_progress.range.start.0.max(full_range.start.0);
let new_end = range.end.0.min(full_range.end.0);
Some(InstantSeconds(new_start)..InstantSeconds(new_end))
}
pub fn time_range(&self) -> core::ops::Range<InstantSeconds> {
time_range(&self.tween)
}
fn active_within(&self, probe: core::ops::RangeInclusive<InstantSeconds>) -> bool {
let range = self.time_range();
range.start <= *probe.end() && range.end >= *probe.start()
}
fn params_in(
&self,
range: core::ops::RangeInclusive<InstantSeconds>,
) -> impl Iterator<Item = &TimelineParam> {
self.tween.iter().filter(move |p| range.contains(&p.time))
}
pub fn apply<T: Patch>(
&self,
range: core::ops::RangeInclusive<InstantSeconds>,
value: &mut T,
) -> Result<(), PatchError> {
for TimelineParam { data, path, .. } in self.params_in(range) {
let patch = T::patch(data, path)?;
value.apply(patch);
}
Ok(())
}
pub fn render<F>(
&mut self,
start: InstantSeconds,
end: InstantSeconds,
mut buffer: F,
) -> Result<(), SeedlingError>
where
F: FnMut(NodeEventType, InstantSeconds),
{
let Some(render_range) = self.render_range(start..end) else {
return Ok(());
};
for param in self.params_in(render_range.start..=render_range.end) {
let event = NodeEventType::Param {
data: param.data.clone(),
path: param.path.clone(),
};
buffer(event, param.time);
}
self.render_progress.range.start = render_range.end;
if self.render_progress.range.is_empty() {
self.render_progress.complete = true;
}
Ok(())
}
}
fn update_events_instant(mut q: Query<&mut AudioEvents>, time: Res<Time<crate::time::Audio>>) {
for mut event in &mut q {
event.now = time.context().instant();
}
}
trait AudioLerp: Default + Clone + Send + Sync + 'static {
fn audio_lerp(&self, other: Self, amount: f32) -> Self;
}
fn clamp(db: f32) -> f32 {
if db < -60.0 { -60.0 } else { db }
}
impl AudioLerp for Volume {
fn audio_lerp(&self, other: Self, amount: f32) -> Self {
match (self, other) {
(Self::Linear(a), Self::Linear(b)) => Self::Linear(a.lerp(b, amount)),
(Self::Decibels(a), Self::Decibels(b)) => Self::Decibels(a.lerp(b, amount)),
(Self::Decibels(a), b) => Self::Decibels(a.lerp(clamp(b.decibels()), amount)),
(a, Self::Decibels(b)) => Self::Decibels(clamp(a.decibels()).lerp(b, amount)),
}
}
}
pub trait VolumeFade {
fn fade_to(&self, volume: Volume, duration: DurationSeconds, events: &mut AudioEvents);
fn fade_at(
&self,
volume: Volume,
start: InstantSeconds,
end: InstantSeconds,
events: &mut AudioEvents,
);
}
pub(crate) fn max_event_rate(duration: f64, time_step: f64) -> usize {
(duration / time_step).ceil() as usize
}
impl VolumeFade for VolumeNode {
fn fade_to(&self, target: Volume, duration: DurationSeconds, events: &mut AudioEvents) {
let start = events.now;
let end = events.now + duration;
let start_value = events.get_value_at(events.now, self);
let mut end_value = start_value;
end_value.volume = target;
let db_span = (clamp(start_value.volume.decibels()) - clamp(target.decibels())).abs();
let total_events = (db_span * 1.25).max(1.0) as usize;
let total_events = max_event_rate(duration.0, 0.001).min(total_events);
events.schedule_tween(
start,
end,
start_value,
end_value,
total_events,
|a, b, t| {
let mut output = *a;
output.volume = a.volume.audio_lerp(b.volume, t);
output
},
);
}
fn fade_at(
&self,
target: Volume,
start: InstantSeconds,
end: InstantSeconds,
events: &mut AudioEvents,
) {
let start_value = events.get_value_at(start, self);
let mut end_value = start_value;
end_value.volume = target;
let db_span = (clamp(start_value.volume.decibels()) - clamp(target.decibels())).abs();
let total_events = (db_span * 1.25).max(1.0) as usize;
let total_events = max_event_rate(end.0 - start.0, 0.001).min(total_events);
events.schedule_tween(
start,
end,
start_value,
end_value,
total_events,
|a, b, t| {
let mut output = *a;
output.volume = a.volume.audio_lerp(b.volume, t);
output
},
);
}
}