bevy_seedling 0.7.0

A sprouting integration of the Firewheel audio engine
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
//! Events that synchronize the ECS and audio thread.

use bevy_app::prelude::*;
use bevy_ecs::prelude::*;
use bevy_math::FloatExt;
use bevy_time::{Time, TimeSystems};
use core::sync::atomic::AtomicU64;
use firewheel::{
    Volume,
    clock::{DurationSeconds, InstantSeconds},
    diff::{Diff, EventQueue, ParamPath, Patch, PatchError, PathBuilder},
    event::{NodeEventType, ParamData},
    nodes::volume::VolumeNode,
};

use crate::{error::SeedlingError, time::Audio};

pub(crate) struct EventsPlugin;

impl Plugin for EventsPlugin {
    fn build(&self, app: &mut App) {
        app.add_systems(First, update_events_instant.after(TimeSystems));
    }
}

/// The audio event queue.
///
/// When inserted into an entity that contains a [`FirewheelNode`],
/// these events will automatically be drained and sent
/// to the audio context in the [`SeedlingSystems::Flush`] set.
///
/// [`FirewheelNode`]: crate::prelude::FirewheelNode
/// [`SeedlingSystems::Flush`]: crate::prelude::SeedlingSystems::Flush
///
/// ## Scheduled events
///
/// This component can be used to schedule events with sample-accurate timing.
///
/// ```
/// # use bevy::prelude::*;
/// # use bevy_seedling::prelude::*;
/// fn scheduling(time: Res<Time<Audio>>, server: Res<AssetServer>, mut commands: Commands) {
///     let mut events = AudioEvents::new(&time);
///     let settings = PlaybackSettings::default().with_playback(false);
///
///     // Start playing exactly one second from now.
///     settings.play_at(None, time.delay(DurationSeconds(1.0)), &mut events);
///
///     commands.spawn((
///         events,
///         settings,
///         SamplePlayer::new(server.load("my_sample.wav")),
///     ));
/// }
/// ```
///
/// Scheduled events form a timeline, managing synchronization with both
/// the ECS and the audio thread. When an event in the timeline elapses,
/// the value is sent to the audio thread _and_ written to the value in the
/// ECS. The scheduled event is subsequently removed from the timeline.
///
/// Convenience methods like [`PlaybackSettings::play_at`] and [`VolumeFade::fade_to`]
/// don't have privileged access to schedling; they simply wrap the fundamental
/// scheduling primitives [`AudioEvents::schedule`] and [`AudioEvents::schedule_tween`].
/// These primitives can be used to schedule _arbitrary_ events.
///
/// [`PlaybackSettings::play_at`]: crate::prelude::PlaybackSettings::play_at
///
/// ```
/// # use bevy::prelude::*;
/// # use bevy_seedling::prelude::*;
/// fn arbitrary(lpf: Single<(&LowPassNode, &mut AudioEvents)>, time: Res<Time<Audio>>) {
///     let (filter, mut events) = lpf.into_inner();
///
///     // In exactly 2.5 seconds from now, set the filter's cutoff frequency
///     // to 250 Hz.
///     events.schedule(time.delay(DurationSeconds(2.5)), filter, |filter| {
///         filter.frequency = 250.0;
///     });
/// }
/// ```
///
/// ## Automatic scheduling
///
/// By default, _all_ events, whether specifically scheduled or generated by
/// `bevy_seedling`'s diffing, are presented to Firewheel with a timestamp.
/// This ensures consistent and intuitive event timings, but may increase CPU
/// time. If you'd like to opt out of automatic scheduling, see the [`ScheduleDiffing`]
/// component.
///
/// [`ScheduleDiffing`]: super::ScheduleDiffing
#[derive(Component)]
pub struct AudioEvents {
    pub(super) queue: Vec<NodeEventType>,
    /// We keep a timeline like this because a simple queue of rendered events is not sufficient.
    ///
    /// Since we'll send out the scheduled events a little bit in advance, there will be some
    /// amount of time in the ECS where we don't have access to the patches -- which is exactly
    /// when we need them! Keep in mind that the events are not `Clone`.
    ///
    /// If we can instead render the events on-demand, we can fetch them whenever we need.
    /// It's also much easier to detect overlapping events.
    pub(super) timeline: Vec<EventTimeline>,
    now: InstantSeconds,
}

impl AudioEvents {
    /// Create a new instant of [`AudioEvents`], primed
    /// with the current audio context time.
    pub fn new(now: &Time<Audio>) -> Self {
        Self {
            queue: Default::default(),
            timeline: Default::default(),
            now: now.context().instant(),
        }
    }

    /// Essentially a duplicate of [`AudioTime::now`][crate::time::AudioTime::now].
    ///
    /// Given this duplicated information, this method is just an internal convenience
    /// and will remain private for now.
    pub(crate) fn now(&self) -> InstantSeconds {
        self.now
    }

    /// Clone any timeline events from `other` that aren't present in `self`.
    pub fn merge_timelines(&mut self, other: &Self) {
        for event in &other.timeline {
            if !self.timeline.iter().any(|ev| ev.id() == event.id()) {
                self.timeline.push(event.clone());
            }
        }
    }

    /// Like `merge_timelines`, but clear all the events in `other` that
    /// have elapsed.
    pub(crate) fn merge_timelines_and_clear(&mut self, other: &mut Self, now: InstantSeconds) {
        other.timeline.retain(|event| {
            if !self.timeline.iter().any(|ev| ev.id() == event.id()) {
                self.timeline.push(event.clone());
            }

            !event.completely_elapsed(now)
        });
    }

    /// Clear the timeline of any elapsed events.
    pub(super) fn clear_elapsed_events(&mut self, now: InstantSeconds) {
        self.timeline
            .retain(|event| !event.completely_elapsed(now) || !event.render_progress.complete);
    }

    /// Get the full timeline of events.
    ///
    /// These events are used to provide scheduled events directly to
    /// the audio thread and animate values in the ECS. Events that
    /// have elapsed are automatically removed in the [`Last`] schedule.
    #[expect(unused)]
    fn timeline(&self) -> &[EventTimeline] {
        &self.timeline
    }

    /// Schedule an event at an absolute time in terms of the audio clock.
    ///
    /// This method will apply any patches to the value before passing it to the closure,
    /// ensuring any previous scheduled events are respected.
    pub fn schedule<T, F>(&mut self, time: InstantSeconds, value: &T, change: F)
    where
        T: Diff + Patch + Send + Sync + Clone + 'static,
        F: FnOnce(&mut T),
    {
        // let's make sure to apply any patches that may exist in this queue before the start time
        let initial_value = self.get_value_at(time, value);

        let mut new_value = initial_value.clone();
        change(&mut new_value);

        let mut events = Vec::new();
        let mut func = |ev, time| match ev {
            NodeEventType::Param { data, path } => {
                events.push(TimelineParam { data, path, time });
            }
            _ => {
                bevy_log::warn!("failed to schedule audio event: invalid event type");
            }
        };
        let mut queue = TimelineQueue::new(time, &mut func);
        new_value.diff(&initial_value, Default::default(), &mut queue);

        // A valid tween should never be empty.
        if events.is_empty() {
            return;
        }

        self.timeline.push(EventTimeline::new(events));
    }

    /// Schedule a tween with a custom interpolator.
    pub fn schedule_tween<T, F>(
        &mut self,
        start: InstantSeconds,
        end: InstantSeconds,
        start_value: T,
        end_value: T,
        total_events: usize,
        interpolate: F,
    ) where
        T: Diff + Patch + Send + Sync + Clone + 'static,
        F: Fn(&T, &T, f32) -> T,
    {
        let mut events = Vec::new();
        let mut func = |ev, time| match ev {
            NodeEventType::Param { data, path } => {
                events.push(TimelineParam { data, path, time });
            }
            _ => {
                bevy_log::warn!("failed to schedule audio event: invalid event type");
            }
        };
        let mut queue = TimelineQueue::new(start, &mut func);

        let duration = (end - start).0;
        for i in 1..=total_events {
            let proportion = i as f64 / total_events as f64;
            let instant = start.0 + proportion * duration;

            queue.instant = InstantSeconds(instant);
            let new_value = (interpolate)(&start_value, &end_value, proportion as f32);
            new_value.diff(&start_value, PathBuilder::default(), &mut queue);
        }

        // A valid tween should never be empty.
        if events.is_empty() {
            return;
        }

        self.timeline.push(EventTimeline::new(events));
    }

    pub(crate) fn active_within(&self, start: InstantSeconds, end: InstantSeconds) -> bool {
        for event in &self.timeline {
            if event.active_within(start..=end) {
                return true;
            }
        }

        false
    }

    /// Apply all scheduled events before `Instant` in this event queue to `value`.
    pub fn value_at<T>(
        &self,
        start: InstantSeconds,
        end: InstantSeconds,
        value: &mut T,
    ) -> Result<(), SeedlingError>
    where
        T: Diff + Patch + Clone,
    {
        for event in &self.timeline {
            event
                .apply(start..=end, value)
                .map_err(|e| SeedlingError::PatchError {
                    ty: core::any::type_name::<T>(),
                    error: e,
                })?;
        }

        Ok(())
    }

    /// Apply all scheduled events before `Instant` in this event queue to `value`.
    pub fn get_value_at<T>(&self, instant: InstantSeconds, value: &T) -> T
    where
        T: Diff + Patch + Clone,
    {
        let mut new_value = value.clone();
        // TODO: consider handling
        let _ = self.value_at(InstantSeconds(0.0), instant, &mut new_value);
        new_value
    }
}

impl EventQueue for AudioEvents {
    fn push(&mut self, data: firewheel::event::NodeEventType) {
        self.queue.push(data);
    }
}

impl core::fmt::Debug for AudioEvents {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        f.debug_struct("AudioEvents")
            .field("queue", &())
            .field("timeline", &self.timeline)
            .field("now", &self.now)
            .finish()
    }
}

/// A queue providing an easily modifiable `instant` for
/// scheduled patches.
struct TimelineQueue<'a> {
    queue: &'a mut dyn FnMut(NodeEventType, InstantSeconds),
    pub instant: InstantSeconds,
}

impl core::fmt::Debug for TimelineQueue<'_> {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        f.debug_struct("TimelineQueue")
            .field("instant", &self.instant)
            .finish_non_exhaustive()
    }
}

impl<'a> TimelineQueue<'a> {
    pub fn new(
        initial_instant: InstantSeconds,
        f: &'a mut impl FnMut(NodeEventType, InstantSeconds),
    ) -> Self {
        Self {
            queue: f,
            instant: initial_instant,
        }
    }
}

impl EventQueue for TimelineQueue<'_> {
    fn push(&mut self, data: NodeEventType) {
        (self.queue)(data, self.instant);
    }
}

static TIMELINE_ID: AtomicU64 = AtomicU64::new(0);

/// A distinct timeline event, composed of
/// one or more [`TimelineParam`]s.
#[derive(Clone, Debug)]
pub(super) struct EventTimeline {
    tween: Vec<TimelineParam>,
    /// The current render progress.
    pub render_progress: RenderProgress,
    id: u64,
}

#[derive(Clone, Debug)]
struct TimelineParam {
    pub data: ParamData,
    pub path: ParamPath,
    pub time: InstantSeconds,
}

/// An event's audio rendering progress.
///
/// This tracks what's been sent to the audio thread
/// independently of the ECS changes, since we generally
/// want to send the events a bit in advance. This helps
/// bridge the gap between the likely different update rates
/// of the ECS and audio thread, and can smooth over frame rate
/// hitches.
#[derive(Clone, Debug)]
pub struct RenderProgress {
    /// The rendered range.
    ///
    /// As the rendering advances, the beginning of this range
    /// marches forward, until the range is empty.
    pub range: core::ops::Range<InstantSeconds>,
    /// Tracks whether the rendering is complete.
    ///
    /// This is distinct from simple tracking an empty range because a single
    /// event will start with an empty range.
    pub complete: bool,
}

impl RenderProgress {
    /// Construct a new [`RenderProgress`].
    pub fn new(range: core::ops::Range<InstantSeconds>) -> Self {
        Self {
            range,
            complete: false,
        }
    }
}

fn time_range(events: &[TimelineParam]) -> core::ops::Range<InstantSeconds> {
    match events {
        [] => panic!("an event timeline should never be empty"),
        &[TimelineParam { time, .. }] => time..time,
        &[
            TimelineParam { time: start, .. },
            ..,
            TimelineParam { time: end, .. },
        ] => start..end,
    }
}

impl EventTimeline {
    /// Construct a new [`EventTimeline`] from a collection of params.
    fn new(tween: Vec<TimelineParam>) -> Self {
        assert!(!tween.is_empty(), "an event timeline should never be empty");

        let render_progress = RenderProgress::new(time_range(&tween));

        EventTimeline {
            tween,
            render_progress,
            id: TIMELINE_ID.fetch_add(1, core::sync::atomic::Ordering::Relaxed),
        }
    }

    /// Report whether this event has completely elapsed by `now`.
    pub fn completely_elapsed(&self, now: InstantSeconds) -> bool {
        self.time_range().end < now
    }

    /// Get this event's unique ID.
    ///
    /// This accelerates event merging for [`FollowerOf`] relationships.
    ///
    /// [`FollowerOf`]: crate::node::follower::FollowerOf
    pub fn id(&self) -> u64 {
        self.id
    }

    /// Provides the subset of `full_range` that has not yet been rendered.
    fn render_range(
        &self,
        full_range: core::ops::Range<InstantSeconds>,
    ) -> Option<core::ops::Range<InstantSeconds>> {
        if self.render_progress.complete {
            return None;
        }

        let range = self.time_range();
        let new_start = self.render_progress.range.start.0.max(full_range.start.0);
        let new_end = range.end.0.min(full_range.end.0);

        Some(InstantSeconds(new_start)..InstantSeconds(new_end))
    }

    /// Get the full span of this event's timeline.
    pub fn time_range(&self) -> core::ops::Range<InstantSeconds> {
        time_range(&self.tween)
    }

    /// Indicates whether the timeline contains any events within `probe`.
    fn active_within(&self, probe: core::ops::RangeInclusive<InstantSeconds>) -> bool {
        let range = self.time_range();

        range.start <= *probe.end() && range.end >= *probe.start()
    }

    /// Returns an iterator over the parameters contained within `range`.
    fn params_in(
        &self,
        range: core::ops::RangeInclusive<InstantSeconds>,
    ) -> impl Iterator<Item = &TimelineParam> {
        self.tween.iter().filter(move |p| range.contains(&p.time))
    }

    /// Apply the events within `range` to `value`.
    pub fn apply<T: Patch>(
        &self,
        range: core::ops::RangeInclusive<InstantSeconds>,
        value: &mut T,
    ) -> Result<(), PatchError> {
        for TimelineParam { data, path, .. } in self.params_in(range) {
            let patch = T::patch(data, path)?;
            value.apply(patch);
        }

        Ok(())
    }

    /// Render out this event's steps, advancing the progress.
    pub fn render<F>(
        &mut self,
        start: InstantSeconds,
        end: InstantSeconds,
        mut buffer: F,
    ) -> Result<(), SeedlingError>
    where
        F: FnMut(NodeEventType, InstantSeconds),
    {
        let Some(render_range) = self.render_range(start..end) else {
            return Ok(());
        };

        for param in self.params_in(render_range.start..=render_range.end) {
            let event = NodeEventType::Param {
                data: param.data.clone(),
                path: param.path.clone(),
            };

            buffer(event, param.time);
        }

        self.render_progress.range.start = render_range.end;
        if self.render_progress.range.is_empty() {
            self.render_progress.complete = true;
        }

        Ok(())
    }
}

fn update_events_instant(mut q: Query<&mut AudioEvents>, time: Res<Time<crate::time::Audio>>) {
    for mut event in &mut q {
        event.now = time.context().instant();
    }
}

trait AudioLerp: Default + Clone + Send + Sync + 'static {
    fn audio_lerp(&self, other: Self, amount: f32) -> Self;
}

fn clamp(db: f32) -> f32 {
    if db < -60.0 { -60.0 } else { db }
}

impl AudioLerp for Volume {
    fn audio_lerp(&self, other: Self, amount: f32) -> Self {
        match (self, other) {
            (Self::Linear(a), Self::Linear(b)) => Self::Linear(a.lerp(b, amount)),
            (Self::Decibels(a), Self::Decibels(b)) => Self::Decibels(a.lerp(b, amount)),
            (Self::Decibels(a), b) => Self::Decibels(a.lerp(clamp(b.decibels()), amount)),
            (a, Self::Decibels(b)) => Self::Decibels(clamp(a.decibels()).lerp(b, amount)),
        }
    }
}

/// An extension trait that provides convenience methods for volume animation.
pub trait VolumeFade {
    /// Linearly interpolate a [`VolumeNode`]'s volume from its current value to `volume`.
    ///
    /// The interpolation will favor the [`Volume::Decibels`] variant, meaning if either
    /// the start or end value is in decibels, the opposite value will be converted to
    /// decibels before interpolating. Linear interpolation in each space will produce
    /// distinct perceptual changes.
    ///
    /// The interpolation uses an approximation of the average just noticeable
    /// different (JND) for amplitude to calculate how many events are required to
    /// sound perfectly smooth. Since we are not especially sensitive to changes in
    /// amplitude, this generates relatively few events.
    ///
    /// ```
    /// # use bevy::prelude::*;
    /// # use bevy_seedling::prelude::*;
    /// fn fade_to(main: Single<(&VolumeNode, &mut AudioEvents), With<MainBus>>) {
    ///     let (volume, mut events) = main.into_inner();
    ///
    ///     // Fade the main bus to zero, silencing all sound.
    ///     volume.fade_to(Volume::SILENT, DurationSeconds(2.5), &mut events);
    /// }
    /// ```
    fn fade_to(&self, volume: Volume, duration: DurationSeconds, events: &mut AudioEvents);

    /// Linearly interpolate a [`VolumeNode`]'s volume from its value at `start` to `volume`.
    ///
    /// The interpolation will favor the [`Volume::Decibels`] variant, meaning if either
    /// the start or end value is in decibels, the opposite value will be converted to
    /// decibels before interpolating. Linear interpolation in each space will produce
    /// distinct perceptual changes.
    ///
    /// The interpolation uses an approximation of the average just noticeable
    /// different (JND) for amplitude to calculate how many events are required to
    /// sound perfectly smooth. Since we are not especially sensitive to changes in
    /// amplitude, this generates relatively few events.
    ///
    /// ```
    /// # use bevy::prelude::*;
    /// # use bevy_seedling::prelude::*;
    /// fn fade_to(
    ///     main: Single<(&VolumeNode, &mut AudioEvents), With<MainBus>>,
    ///     time: Res<Time<Audio>>,
    /// ) {
    ///     let (volume, mut events) = main.into_inner();
    ///
    ///     // Fade the main bus to zero starting exactly one
    ///     // second from now.
    ///     volume.fade_at(
    ///         Volume::SILENT,
    ///         time.now() + DurationSeconds(1.0),
    ///         time.now() + DurationSeconds(3.5),
    ///         &mut events,
    ///     );
    /// }
    /// ```
    fn fade_at(
        &self,
        volume: Volume,
        start: InstantSeconds,
        end: InstantSeconds,
        events: &mut AudioEvents,
    );
}

// Limit events to one per time step in seconds.
pub(crate) fn max_event_rate(duration: f64, time_step: f64) -> usize {
    (duration / time_step).ceil() as usize
}

impl VolumeFade for VolumeNode {
    fn fade_to(&self, target: Volume, duration: DurationSeconds, events: &mut AudioEvents) {
        let start = events.now;
        let end = events.now + duration;
        let start_value = events.get_value_at(events.now, self);
        let mut end_value = start_value;
        end_value.volume = target;

        // Here, we use the just noticeable difference, around 1 dB, to roughly calculate
        // how many total steps we need. We give a bit of margin just in case.
        let db_span = (clamp(start_value.volume.decibels()) - clamp(target.decibels())).abs();
        let total_events = (db_span * 1.25).max(1.0) as usize;
        let total_events = max_event_rate(duration.0, 0.001).min(total_events);

        events.schedule_tween(
            start,
            end,
            start_value,
            end_value,
            total_events,
            |a, b, t| {
                let mut output = *a;
                output.volume = a.volume.audio_lerp(b.volume, t);
                output
            },
        );
    }

    fn fade_at(
        &self,
        target: Volume,
        start: InstantSeconds,
        end: InstantSeconds,
        events: &mut AudioEvents,
    ) {
        let start_value = events.get_value_at(start, self);
        let mut end_value = start_value;
        end_value.volume = target;

        // Here, we use the just noticeable difference, around 1 dB, to roughly calculate
        // how many total steps we need. We give a bit of margin just in case.
        let db_span = (clamp(start_value.volume.decibels()) - clamp(target.decibels())).abs();
        let total_events = (db_span * 1.25).max(1.0) as usize;
        let total_events = max_event_rate(end.0 - start.0, 0.001).min(total_events);

        events.schedule_tween(
            start,
            end,
            start_value,
            end_value,
            total_events,
            |a, b, t| {
                let mut output = *a;
                output.volume = a.volume.audio_lerp(b.volume, t);
                output
            },
        );
    }
}