Trait web_audio_api::context::AsBaseAudioContext [−][src]
pub trait AsBaseAudioContext {}Show methods
fn base(&self) -> &BaseAudioContext; fn create_oscillator(&self) -> OscillatorNode<'_> { ... } fn create_gain(&self) -> GainNode<'_> { ... } fn create_constant_source(&self) -> ConstantSourceNode<'_> { ... } fn create_delay(&self) -> DelayNode<'_> { ... } fn create_channel_splitter(
&self,
number_of_outputs: u32
) -> ChannelSplitterNode<'_> { ... } fn create_channel_merger(
&self,
number_of_inputs: u32
) -> ChannelMergerNode<'_> { ... } fn create_media_stream_source<M: MediaStream>(
&self,
media: M
) -> MediaStreamAudioSourceNode<'_> { ... } fn create_media_element_source<M: MediaStream>(
&self,
media: MediaElement<M>
) -> MediaElementAudioSourceNode<'_> { ... } fn create_buffer_source(&self) -> AudioBufferSourceNode<'_> { ... } fn create_panner(&self) -> PannerNode<'_> { ... } fn create_analyser(&self) -> AnalyserNode<'_> { ... } fn create_audio_param(
&self,
opts: AudioParamOptions,
dest: &AudioNodeId
) -> (AudioParam<'_>, AudioParamId) { ... } fn destination(&self) -> DestinationNode<'_> { ... } fn listener(&self) -> AudioListener<'_> { ... } fn sample_rate(&self) -> SampleRate { ... } fn current_time(&self) -> f64 { ... }
Retrieve the BaseAudioContext from the concrete AudioContext
Required methods
fn base(&self) -> &BaseAudioContext
[src]
Provided methods
fn create_oscillator(&self) -> OscillatorNode<'_>
[src]
Creates an OscillatorNode, a source representing a periodic waveform. It basically generates a tone.
fn create_gain(&self) -> GainNode<'_>
[src]
Creates an GainNode, to control audio volume
fn create_constant_source(&self) -> ConstantSourceNode<'_>
[src]
Creates an ConstantSourceNode, a source representing a constant value
fn create_delay(&self) -> DelayNode<'_>
[src]
Creates a DelayNode, delaying the audio signal
fn create_channel_splitter(
&self,
number_of_outputs: u32
) -> ChannelSplitterNode<'_>
[src]
&self,
number_of_outputs: u32
) -> ChannelSplitterNode<'_>
Creates a ChannelSplitterNode
fn create_channel_merger(&self, number_of_inputs: u32) -> ChannelMergerNode<'_>
[src]
Creates a ChannelMergerNode
fn create_media_stream_source<M: MediaStream>(
&self,
media: M
) -> MediaStreamAudioSourceNode<'_>
[src]
&self,
media: M
) -> MediaStreamAudioSourceNode<'_>
Creates a MediaStreamAudioSourceNode from a MediaElement
fn create_media_element_source<M: MediaStream>(
&self,
media: MediaElement<M>
) -> MediaElementAudioSourceNode<'_>
[src]
&self,
media: MediaElement<M>
) -> MediaElementAudioSourceNode<'_>
Creates a MediaElementAudioSourceNode from a MediaElement
fn create_buffer_source(&self) -> AudioBufferSourceNode<'_>
[src]
Creates an AudioBufferSourceNode
fn create_panner(&self) -> PannerNode<'_>
[src]
Creates a PannerNode
fn create_analyser(&self) -> AnalyserNode<'_>
[src]
Creates a AnalyserNode
fn create_audio_param(
&self,
opts: AudioParamOptions,
dest: &AudioNodeId
) -> (AudioParam<'_>, AudioParamId)
[src]
&self,
opts: AudioParamOptions,
dest: &AudioNodeId
) -> (AudioParam<'_>, AudioParamId)
Create an AudioParam.
Call this inside the register
closure when setting up your AudioNode
fn destination(&self) -> DestinationNode<'_>
[src]
Returns an AudioDestinationNode representing the final destination of all audio in the context. It can be thought of as the audio-rendering device.
fn listener(&self) -> AudioListener<'_>
[src]
Returns the AudioListener which is used for 3D spatialization
fn sample_rate(&self) -> SampleRate
[src]
The sample rate (in sample-frames per second) at which the AudioContext handles audio.
fn current_time(&self) -> f64
[src]
This is the time in seconds of the sample frame immediately following the last sample-frame in the block of audio most recently processed by the context’s rendering graph.