diff --git a/examples/script_processor.rs b/examples/script_processor.rs index f37c525c..e23e6ea6 100644 --- a/examples/script_processor.rs +++ b/examples/script_processor.rs @@ -28,7 +28,7 @@ fn main() { }); let node = context.create_script_processor(512, 1, 1); - node.set_onaudioprocess(|e| { + node.set_onaudioprocess(|mut e| { let mut rng = rand::thread_rng(); e.output_buffer .get_channel_data_mut(0) diff --git a/src/context/mod.rs b/src/context/mod.rs index d96477bb..621b632b 100644 --- a/src/context/mod.rs +++ b/src/context/mod.rs @@ -88,9 +88,8 @@ impl From for AudioContextState { /// Only when implementing the AudioNode trait manually, this struct is of any concern. /// /// This object allows for communication with the render thread and dynamic lifetime management. -// -// The only way to construct this object is by calling [`BaseAudioContext::register`] -#[derive(Clone)] +// The only way to construct this object is by calling [`BaseAudioContext::register`]. +// This struct should not derive Clone because of the Drop handler. pub struct AudioContextRegistration { /// the audio context in which nodes and connections lives context: ConcreteBaseAudioContext, diff --git a/src/events.rs b/src/events.rs index 42beb282..a1009c31 100644 --- a/src/events.rs +++ b/src/events.rs @@ -1,3 +1,4 @@ +use crate::context::ConcreteBaseAudioContext; use crate::context::{AudioContextState, AudioNodeId}; use crate::{AudioBuffer, AudioRenderCapacityEvent}; @@ -52,6 +53,19 @@ pub struct AudioProcessingEvent { /// The time when the audio will be played in the same time coordinate system as the /// AudioContext's currentTime. pub playback_time: f64, + pub(crate) registration: Option<(ConcreteBaseAudioContext, AudioNodeId)>, +} + +impl Drop for AudioProcessingEvent { + fn drop(&mut self) { + if let Some((context, id)) = self.registration.take() { + let wrapped = crate::message::ControlMessage::NodeMessage { + id, + msg: llq::Node::new(Box::new(self.output_buffer.clone())), + }; + context.send_control_msg(wrapped); + } + } } /// The OfflineAudioCompletionEvent Event interface diff --git a/src/node/script_processor.rs b/src/node/script_processor.rs index 77a97969..be993a3a 100644 --- a/src/node/script_processor.rs +++ b/src/node/script_processor.rs @@ -137,20 +137,26 @@ impl ScriptProcessorNode { /// the inputBuffer attribute. The audio data which is the result of the processing (or the /// synthesized data if there are no inputs) is then placed into the outputBuffer. /// + /// The output buffer is shipped back to the render thread when the AudioProcessingEvent goes + /// out of scope, so be sure not to store it somewhere. + /// /// Only a single event handler is active at any time. Calling this method multiple times will /// override the previous event handler. - pub fn set_onaudioprocess( + pub fn set_onaudioprocess( &self, mut callback: F, ) { - let registration = self.registration.clone(); + // We need these fields to ship the output buffer to the render thread + let base = self.registration().context().clone(); + let id = self.registration().id(); + let callback = move |v| { let mut payload = match v { EventPayload::AudioProcessing(v) => v, _ => unreachable!(), }; - callback(&mut payload); - registration.post_message(payload.output_buffer); + payload.registration = Some((base.clone(), id)); + callback(payload); }; self.context().set_event_handler( @@ -305,7 +311,7 @@ mod tests { let node = context.create_script_processor(BUFFER_SIZE, 0, 1); node.connect(&context.destination()); - node.set_onaudioprocess(|e| { + node.set_onaudioprocess(|mut e| { e.output_buffer.get_channel_data_mut(0).fill(1.); // set all samples to 1. }); @@ -336,7 +342,7 @@ mod tests { // 2 input channels, 2 output channels let node = context.create_script_processor(BUFFER_SIZE, 2, 2); node.connect(&context.destination()); - node.set_onaudioprocess(|e| { + node.set_onaudioprocess(|mut e| { // left output buffer is left input * 2 e.output_buffer .get_channel_data_mut(0) diff --git a/src/render/processor.rs b/src/render/processor.rs index 3376dea6..a543f9f2 100644 --- a/src/render/processor.rs +++ b/src/render/processor.rs @@ -68,6 +68,7 @@ impl AudioWorkletGlobalScope { input_buffer, output_buffer, playback_time, + registration: None, }; let dispatch = EventDispatch::audio_processing(self.node_id.get(), event); let _ = self.event_sender.try_send(dispatch);