diff --git a/.idea/deployment.xml b/.idea/deployment.xml
new file mode 100644
index 0000000..fa53927
--- /dev/null
+++ b/.idea/deployment.xml
@@ -0,0 +1,21 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..64d456c
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,5 @@
+{
+ "rust-analyzer.cargo.features": [
+ "pipewire"
+ ]
+}
\ No newline at end of file
diff --git a/crates/interflow-core/Cargo.toml b/crates/interflow-core/Cargo.toml
new file mode 100644
index 0000000..a1a4e23
--- /dev/null
+++ b/crates/interflow-core/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "interflow-core"
+version.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+license.workspace = true
+
+[dependencies]
+bitflags.workspace = true
+duplicate.workspace = true
+thiserror.workspace = true
+zerocopy = { version = "0.8.39", features = ["alloc"] }
\ No newline at end of file
diff --git a/crates/interflow-core/src/buffer.rs b/crates/interflow-core/src/buffer.rs
new file mode 100644
index 0000000..bca0248
--- /dev/null
+++ b/crates/interflow-core/src/buffer.rs
@@ -0,0 +1,411 @@
+use std::num::NonZeroUsize;
+use std::ops;
+use zerocopy::FromZeros;
+
+/// Audio buffer type. Data is stored in a contiguous array, in non-interleaved format.
+#[derive(Clone)]
+pub struct AudioBuffer {
+ data: Box<[T]>,
+ frames: NonZeroUsize,
+}
+
+impl ops::Index for AudioBuffer {
+ type Output = [T];
+ fn index(&self, index: usize) -> &Self::Output {
+ self.channel(index)
+ }
+}
+
+impl ops::IndexMut for AudioBuffer {
+ fn index_mut(&mut self, index: usize) -> &mut Self::Output {
+ &mut self.data[index * self.frames.get()..(index + 1) * self.frames.get()]
+ }
+}
+
+impl ops::Index<(usize, usize)> for AudioBuffer {
+ type Output = T;
+ fn index(&self, (index, channel): (usize, usize)) -> &Self::Output {
+ &self.data[channel * self.frames.get() + index]
+ }
+}
+
+impl ops::IndexMut<(usize, usize)> for AudioBuffer {
+ fn index_mut(&mut self, (index, channel): (usize, usize)) -> &mut Self::Output {
+ &mut self.data[channel * self.frames.get() + index]
+ }
+}
+
+impl AudioBuffer
+where
+ [T]: FromZeros,
+{
+ /// Creates a new buffer with the given number of frames and channels. The audio buffer will be zeroed out.
+ pub fn zeroed(frames: NonZeroUsize, channels: NonZeroUsize) -> Self {
+ let len = frames.get() * channels.get();
+ AudioBuffer {
+ data: <[T] as FromZeros>::new_box_zeroed_with_elems(len).unwrap(),
+ frames,
+ }
+ }
+
+ pub fn resize_channels(&mut self, channels: NonZeroUsize) {
+ let mut data =
+ <[T] as FromZeros>::new_box_zeroed_with_elems(channels.get() * self.frames.get())
+ .unwrap();
+ for channel in 0..channels.get() {
+ let old_channel = channel % self.channels();
+ let old_data =
+ &self.data[old_channel * self.frames.get()..(old_channel + 1) * self.frames.get()];
+ data[channel * self.frames.get()..(channel + 1) * self.frames.get()]
+ .copy_from_slice(old_data);
+ }
+ self.data = data;
+ }
+
+ pub fn resize_frames(&mut self, frames: NonZeroUsize) {
+ let mut data =
+ <[T] as FromZeros>::new_box_zeroed_with_elems(frames.get() * self.channels()).unwrap();
+ let min_frames = self.frames.min(frames).get();
+ data[..min_frames * self.channels()]
+ .copy_from_slice(&self.data[..min_frames * self.channels()]);
+ self.frames = frames;
+ self.data = data;
+ }
+
+ pub fn copy_to_interleaved(&self, out: &mut [T]) {
+ debug_assert!(out.len() >= self.channels() * self.frames());
+ for (i, sample) in out.iter_mut().enumerate() {
+ let frame = i / self.channels();
+ let channel = i % self.channels();
+ let i = channel * self.frames.get() + frame;
+ *sample = self.data[i];
+ }
+ }
+
+ pub fn copy_from_interleaved(&mut self, data: &[T]) {
+ debug_assert!(data.len() <= self.channels() * self.frames());
+ for (i, sample) in data.iter().enumerate() {
+ let frame = i / self.channels();
+ let channel = i % self.channels();
+ let i = channel * self.frames.get() + frame;
+ self.data[i] = *sample;
+ }
+ }
+
+ pub fn as_ref(&self) -> AudioRef<'_, T> {
+ AudioRef {
+ buffer: self,
+ frame_slice: (0, self.frames.get()),
+ }
+ }
+
+ pub fn as_mut(&mut self) -> AudioMut<'_, T> {
+ let end = self.frames.get();
+ AudioMut {
+ buffer: self,
+ frame_slice: (0, end),
+ }
+ }
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum FromDataError {
+ #[error("Empty buffer")]
+ Empty,
+ #[error("Invalid number of channels: {channels} for buffer length {len} (len % channels) == {}", len % channels)]
+ InvalidChannelCount { len: usize, channels: usize },
+ #[error("Invalid number of frames: {frames} for buffer length {len} (len % frames) == {}", len % frames)]
+ InvalidFrameCount { len: usize, frames: usize },
+}
+
+impl AudioBuffer {
+ pub fn from_fn(
+ channels: NonZeroUsize,
+ frames: NonZeroUsize,
+ f: impl Fn(usize, usize) -> T,
+ ) -> Self {
+ let mut data = Vec::with_capacity(channels.get() * frames.get());
+ for channel in 0..channels.get() {
+ for frame in 0..frames.get() {
+ data.push(f(channel, frame));
+ }
+ }
+ AudioBuffer {
+ data: data.into_boxed_slice(),
+ frames,
+ }
+ }
+
+ pub fn from_data_channels(
+ data: Box<[T]>,
+ channels: NonZeroUsize,
+ ) -> Result {
+ if data.is_empty() {
+ return Err(FromDataError::Empty);
+ }
+ if data.len() % channels.get() != 0 {
+ return Err(FromDataError::InvalidChannelCount {
+ len: data.len(),
+ channels: channels.get(),
+ });
+ }
+
+ let frames = NonZeroUsize::new(data.len() / channels.get()).unwrap();
+ Ok(AudioBuffer { data, frames })
+ }
+
+ pub fn from_data_frames(data: Box<[T]>, frames: NonZeroUsize) -> Result {
+ if data.is_empty() {
+ return Err(FromDataError::Empty);
+ }
+ if data.len() % frames.get() != 0 {
+ return Err(FromDataError::InvalidFrameCount {
+ len: data.len(),
+ frames: frames.get(),
+ });
+ }
+
+ Ok(AudioBuffer { data, frames })
+ }
+
+ pub fn frames(&self) -> usize {
+ self.frames.get()
+ }
+
+ pub fn channels(&self) -> usize {
+ self.data.len() / self.frames.get()
+ }
+
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.data.is_empty()
+ }
+
+ //noinspection ALL
+ #[duplicate::duplicate_item(
+ name reference(type) out;
+ [frame] [&'_ type] [FrameRef::<'_, T>];
+ [frame_mut] [&'_ mut type] [FrameMut::<'_, T>];
+ )]
+ pub fn name(self: reference([Self]), frame: usize) -> out {
+ debug_assert!(frame < self.frames());
+ out {
+ buffer: self,
+ frame,
+ }
+ }
+
+ #[duplicate::duplicate_item(
+ name reference(type);
+ [channel] [& type];
+ [channel_mut] [&mut type];
+ )]
+ pub fn name(self: reference([Self]), channel: usize) -> reference([[T]]) {
+ debug_assert!(channel < self.channels());
+ reference([self.data[channel * self.frames.get()..(channel + 1) * self.frames.get()]])
+ }
+
+ // noinspection ALL
+ #[duplicate::duplicate_item(
+ name reference(type) out;
+ [slice] [& type] [AudioRef::<'_, T>];
+ [slice_mut] [&mut type] [AudioMut::<'_, T>];
+ )]
+ pub fn name(self: reference([Self]), index: impl ops::RangeBounds) -> out {
+ let begin = match index.start_bound() {
+ ops::Bound::Included(i) => *i,
+ ops::Bound::Excluded(i) => *i + 1,
+ ops::Bound::Unbounded => 0,
+ };
+ let end = match index.end_bound() {
+ ops::Bound::Included(i) => *i - 1,
+ ops::Bound::Excluded(i) => *i,
+ ops::Bound::Unbounded => self.frames(),
+ };
+ debug_assert!(begin <= end);
+ debug_assert!(end <= self.frames());
+ out {
+ buffer: self,
+ frame_slice: (begin, end + 1),
+ }
+ }
+
+ pub fn chunks(&self, size: usize) -> impl Iterator- > {
+ (0..self.frames())
+ .step_by(size)
+ .map(move |frame| self.slice(frame..(frame + size).min(self.frames())))
+ }
+
+ pub fn chunks_exact(&self, size: usize) -> impl Iterator
- > {
+ (0..self.frames()).step_by(size).filter_map(move |frame| {
+ let end = frame + size;
+ if end > self.frames() {
+ return None;
+ }
+ Some(self.slice(frame..end))
+ })
+ }
+
+ pub fn windows(&self, size: usize) -> impl Iterator
- > {
+ (0..self.frames() - size).map(move |frame| self.slice(frame..(frame + size)))
+ }
+
+ pub fn iter_frames(&self) -> impl Iterator
- > {
+ (0..self.frames()).map(move |frame| self.frame(frame))
+ }
+
+ pub fn iter_frames_mut(&mut self) -> impl Iterator
- > {
+ IterFramesMut {
+ buffer: self,
+ frame: 0,
+ }
+ }
+
+ pub fn iter_channels(&self) -> impl Iterator
- {
+ self.data.chunks(self.frames.get())
+ }
+
+ pub fn iter_channels_mut(&mut self) -> impl Iterator
- {
+ self.data.chunks_mut(self.frames.get())
+ }
+
+ pub fn get_channels(&self, indices: [usize; N]) -> [&[T]; N] {
+ indices.map(|i| self.channel(i))
+ }
+
+ pub fn get_channels_mut(&mut self, indices: [usize; N]) -> [&mut [T]; N] {
+ self.data.get_disjoint_mut(indices.map(|i| i * self.frames.get()..(i + 1) * self.frames.get())).unwrap()
+ }
+}
+
+#[duplicate::duplicate_item(
+name reference(lifetime, type) derive;
+[FrameRef] [&'lifetime type] [derive(Clone, Copy)];
+[FrameMut] [&'lifetime mut type] [derive()]
+)]
+#[derive]
+pub struct name<'a, T> {
+ buffer: reference([a], [AudioBuffer]),
+ frame: usize,
+}
+
+#[duplicate::duplicate_item(
+name;
+[FrameRef];
+[FrameMut];
+)]
+impl name<'_, T> {
+ pub fn get(&self, channel: usize) -> T {
+ debug_assert!(channel < self.buffer.channels());
+ self.buffer[channel][self.frame]
+ }
+
+ pub fn get_frame(&self, out: &mut [T]) {
+ debug_assert!(out.len() >= self.buffer.channels());
+ for (channel, value) in out.iter_mut().enumerate() {
+ *value = self.get(channel);
+ }
+ }
+}
+
+impl FrameMut<'_, T> {
+ pub fn set(&mut self, channel: usize, value: T) {
+ debug_assert!(channel < self.buffer.channels());
+ self.buffer[channel][self.frame] = value;
+ }
+
+ pub fn set_frame(&mut self, data: &[T]) {
+ debug_assert!(data.len() >= self.buffer.channels());
+ for (channel, value) in data.iter().enumerate() {
+ self.set(channel, *value);
+ }
+ }
+}
+
+struct IterFramesMut<'a, T> {
+ buffer: &'a mut AudioBuffer,
+ frame: usize,
+}
+
+impl<'a, T> Iterator for IterFramesMut<'a, T> {
+ type Item = FrameMut<'a, T>;
+ fn next(&mut self) -> Option {
+ if self.frame < self.buffer.frames() {
+ let frame = self.frame;
+ self.frame += 1;
+ // SAFETY:
+ // Lifetime of the frame is actually 'a, but the compiler cannot see that
+ unsafe {
+ let buffer_ptr = self.buffer as *mut AudioBuffer;
+ Some((*buffer_ptr).frame_mut(frame))
+ }
+ } else {
+ None
+ }
+ }
+}
+
+#[duplicate::duplicate_item(
+name reference(lifetime, type) derive;
+[AudioRef] [&'lifetime type] [derive(Clone, Copy)];
+[AudioMut] [&'lifetime mut type] [derive()]
+)]
+#[derive]
+pub struct name<'a, T> {
+ buffer: reference([a], [AudioBuffer]),
+ frame_slice: (usize, usize),
+}
+
+#[duplicate::duplicate_item(
+name;
+[AudioRef];
+[AudioMut];
+)]
+impl ops::Index for name<'_, T> {
+ type Output = [T];
+
+ fn index(&self, index: usize) -> &Self::Output {
+ &self.buffer[self.frame_slice.0 + index]
+ }
+}
+
+impl ops::IndexMut for AudioMut<'_, T> {
+ fn index_mut(&mut self, index: usize) -> &mut Self::Output {
+ &mut self.buffer[self.frame_slice.0 + index]
+ }
+}
+
+#[duplicate::duplicate_item(
+name;
+[AudioRef];
+[AudioMut];
+)]
+impl name<'_, T> {
+ pub fn frame(&self, index: usize) -> FrameRef<'_, T> {
+ self.buffer.frame(self.frame_slice.0 + index)
+ }
+
+ pub fn channel(&self, channel: usize) -> &[T] {
+ let slice = self.buffer.channel(channel);
+ &slice[self.frame_slice.0..self.frame_slice.1]
+ }
+}
+
+impl AudioMut<'_, T> {
+ pub fn frame_mut(&mut self, index: usize) -> FrameMut<'_, T> {
+ let frame = index + self.frame_slice.0;
+ debug_assert!(frame < self.frame_slice.1);
+ FrameMut {
+ buffer: self.buffer,
+ frame,
+ }
+ }
+
+ pub fn channel_mut(&mut self, channel: usize) -> &mut [T] {
+ let slice = self.buffer.channel_mut(channel);
+ &mut slice[self.frame_slice.0..self.frame_slice.1]
+ }
+}
diff --git a/crates/interflow-core/src/device.rs b/crates/interflow-core/src/device.rs
new file mode 100644
index 0000000..aaa1079
--- /dev/null
+++ b/crates/interflow-core/src/device.rs
@@ -0,0 +1,139 @@
+use std::borrow::Cow;
+use crate::DeviceType;
+use crate::stream::{self, StreamHandle};
+use crate::traits::ExtensionProvider;
+
+/// Configuration for an audio stream.
+#[derive(Debug, Clone, PartialEq)]
+pub struct StreamConfig {
+ /// Configured sample rate of the requested stream. The opened stream can have a different
+ /// sample rate, so don't rely on this parameter being correct at runtime.
+ pub sample_rate: f64,
+ /// Number of input channels requested
+ pub input_channels: usize,
+ /// Number of output channels requested
+ pub output_channels: usize,
+ /// Range of preferential buffer sizes, in units of audio samples per channel.
+ /// The library will make a best-effort attempt at honoring this setting, and in future versions
+ /// may provide additional buffering to ensure it, but for now you should not make assumptions
+ /// on buffer sizes based on this setting.
+ pub buffer_size_range: (Option, Option),
+ /// Whether the device should be exclusively held (meaning no other application can open the
+ /// same device).
+ pub exclusive: bool,
+}
+
+impl StreamConfig {
+ /// Returns a [`DeviceType`] that describes this [`StreamConfig`]. Only [`DeviceType::INPUT`] and
+ /// [`DeviceType::OUTPUT`] are set.
+ pub fn requested_device_type(&self) -> DeviceType {
+ let mut ret = DeviceType::empty();
+ ret.set(DeviceType::INPUT, self.input_channels > 0);
+ ret.set(DeviceType::OUTPUT, self.output_channels > 0);
+ ret
+ }
+
+ /// Changes the [`StreamConfig`] such that it matches the configuration of a stream created with a device with
+ /// the given [`DeviceType`].
+ ///
+ /// This method returns a copy of the input [`StreamConfig`].
+ pub fn restrict(mut self, requested_type: DeviceType) -> Self {
+ if !requested_type.is_input() {
+ self.input_channels = 0;
+ }
+ if !requested_type.is_output() {
+ self.output_channels = 0;
+ }
+ self
+ }
+}
+
+/// Configuration for an audio stream.
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub struct ResolvedStreamConfig {
+ /// Configured sample rate of the requested stream. The opened stream can have a different
+ /// sample rate, so don't rely on this parameter being correct at runtime.
+ pub sample_rate: f64,
+ /// Number of input channels requested
+ pub input_channels: usize,
+ /// Number of output channels requested
+ pub output_channels: usize,
+ /// Maximum number of frames the audio callback will receive
+ pub max_frame_count: usize,
+}
+
+/// Trait for types describing audio devices. Audio devices have zero or more inputs and outputs,
+/// and depending on the driver, can be duplex devices which can provide both of them at the same
+/// time natively.
+pub trait Device: ExtensionProvider {
+ type Error: Send + Sync + std::error::Error;
+ type StreamHandle: StreamHandle>;
+
+ fn name(&self) -> Cow<'_, str>;
+
+ fn device_type(&self) -> DeviceType;
+
+ /// Default configuration for this device. If [`Ok`], should return a [`StreamConfig`] that is supported (i.e.,
+ /// returns `true` when passed to [`Self::is_config_supported`]).
+ fn default_config(&self) -> Result;
+
+ /// Returns the supported I/O buffer size range for the device.
+ fn buffer_size_range(&self) -> Result<(Option, Option), Self::Error> {
+ Ok((None, None))
+ }
+
+ /// Not all configuration values make sense for a particular device, and this method tests a
+ /// configuration to see if it can be used in an audio stream.
+ fn is_config_supported(&self, config: &StreamConfig) -> bool;
+
+ /// Creates an output stream with the provided stream configuration. For this call to be
+ /// valid, [`AudioDevice::is_config_supported`] should have returned `true` on the provided
+ /// configuration.
+ ///
+ /// An output callback is required to process the audio, whose ownership will be transferred
+ /// to the audio stream.
+ fn create_stream(
+ &self,
+ stream_config: StreamConfig,
+ callback: Callback,
+ ) -> Result, Self::Error>;
+
+ /// Create an output stream using the default configuration as returned by [`Self::default_output_config`].
+ ///
+ /// # Arguments
+ ///
+ /// - `callback`: Output callback to generate audio data with.
+ fn default_stream(
+ &self,
+ requested_type: DeviceType,
+ callback: Callback,
+ ) -> Result, Self::Error> {
+ let config = self.default_config()?.restrict(requested_type);
+ debug_assert!(
+ self.is_config_supported(&config),
+ "Default configuration is not supported"
+ );
+ self.create_stream(config, callback)
+ }
+}
+
+/// Audio channel description.
+#[derive(Debug, Clone)]
+pub struct Channel<'a> {
+ /// Index of the channel in the device
+ pub index: usize,
+ /// Display the name for the channel, if available, else a generic name like "Channel 1"
+ pub name: Cow<'a, str>,
+}
+
+pub trait NamedChannels {
+ fn channel_map(&self) -> impl Iterator
- >;
+}
+
+pub trait ConfigurationList {
+ fn enumerate_configurations(&self) -> impl Iterator
- ;
+}
+
+pub trait DeviceState {
+ fn connected(&self) -> bool;
+}
\ No newline at end of file
diff --git a/crates/interflow-core/src/lib.rs b/crates/interflow-core/src/lib.rs
new file mode 100644
index 0000000..9beb874
--- /dev/null
+++ b/crates/interflow-core/src/lib.rs
@@ -0,0 +1,65 @@
+pub mod platform;
+pub mod traits;
+pub mod device;
+pub mod stream;
+pub mod proxies;
+pub mod timing;
+pub mod buffer;
+
+use bitflags::bitflags;
+
+bitflags! {
+ /// Represents the types/capabilities of an audio device.
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub struct DeviceType: u32 {
+ /// Device supports audio input.
+ const INPUT = 1 << 0;
+
+ /// Device supports audio output.
+ const OUTPUT = 1 << 1;
+
+ /// Physical audio device (hardware).
+ const PHYSICAL = 1 << 2;
+
+ /// Virtual/software application device.
+ const APPLICATION = 1 << 3;
+
+ /// This device is set as default
+ const DEFAULT = 1 << 4;
+
+ /// Device that supports both input and output.
+ const DUPLEX = Self::INPUT.bits() | Self::OUTPUT.bits();
+ }
+}
+
+impl DeviceType {
+ /// Returns true if this device type has the input capability.
+ pub fn is_input(&self) -> bool {
+ self.contains(Self::INPUT)
+ }
+
+ /// Returns true if this device type has the output capability.
+ pub fn is_output(&self) -> bool {
+ self.contains(Self::OUTPUT)
+ }
+
+ /// Returns true if this device type is a physical device.
+ pub fn is_physical(&self) -> bool {
+ self.contains(Self::PHYSICAL)
+ }
+
+ /// Returns true if this device type is an application/virtual device.
+ pub fn is_application(&self) -> bool {
+ self.contains(Self::APPLICATION)
+ }
+
+ /// Returns true if this device is set as default
+ pub fn is_default(&self) -> bool {
+ self.contains(Self::DEFAULT)
+ }
+
+ /// Returns true if this device type supports both input and output.
+ pub fn is_duplex(&self) -> bool {
+ self.contains(Self::DUPLEX)
+ }
+}
diff --git a/crates/interflow-core/src/platform.rs b/crates/interflow-core/src/platform.rs
new file mode 100644
index 0000000..a1b5f90
--- /dev/null
+++ b/crates/interflow-core/src/platform.rs
@@ -0,0 +1,19 @@
+use std::borrow::Cow;
+use crate::device::Device;
+use crate::DeviceType;
+use crate::traits::ExtensionProvider;
+
+/// Trait for platforms which provide audio devices.
+pub trait Platform: ExtensionProvider {
+ type Error: Send + Sync + std::error::Error;
+ type Device: Device>;
+ const NAME: &'static str;
+
+ fn default_device(device_type: DeviceType) -> Result;
+
+ fn list_devices(&self) -> Result, Self::Error>;
+}
+
+pub trait ServerInfo {
+ fn version(&self) -> Cow<'_, str>;
+}
\ No newline at end of file
diff --git a/crates/interflow-core/src/proxies.rs b/crates/interflow-core/src/proxies.rs
new file mode 100644
index 0000000..3c9f5dc
--- /dev/null
+++ b/crates/interflow-core/src/proxies.rs
@@ -0,0 +1,48 @@
+use std::borrow::Cow;
+use crate::device::{Device, StreamConfig};
+use crate::DeviceType;
+use crate::traits::ExtensionProvider;
+
+pub type Error = Box;
+
+pub trait DeviceProxy: ExtensionProvider {
+ fn name(&self) -> Cow<'_, str>;
+ fn device_type(&self) -> DeviceType;
+ fn default_config(&self) -> Result;
+ fn is_config_supported(&self, config: &StreamConfig) -> bool;
+ fn buffer_size_range(&self) -> Result<(Option, Option), Error>;
+}
+
+impl DeviceProxy for D {
+ #[inline]
+ fn name(&self) -> Cow<'_, str> {
+ Device::name(self)
+ }
+
+ fn device_type(&self) -> DeviceType {
+ Device::device_type(self)
+ }
+
+ fn default_config(&self) -> Result {
+ Ok(Device::default_config(self)?)
+ }
+
+ fn is_config_supported(&self, config: &StreamConfig) -> bool {
+ Device::is_config_supported(self, config)
+ }
+
+ fn buffer_size_range(&self) -> Result<(Option, Option), Error> {
+ Ok(Device::buffer_size_range(self)?)
+ }
+}
+
+pub trait IntoDeviceProxy {
+ fn into_device_proxy(self) -> Box;
+}
+
+impl IntoDeviceProxy for D {
+ #[inline]
+ fn into_device_proxy(self) -> Box {
+ Box::new(self)
+ }
+}
diff --git a/crates/interflow-core/src/stream.rs b/crates/interflow-core/src/stream.rs
new file mode 100644
index 0000000..29b9fe0
--- /dev/null
+++ b/crates/interflow-core/src/stream.rs
@@ -0,0 +1,78 @@
+use bitflags::bitflags;
+use crate::buffer::{AudioMut, AudioRef};
+use crate::device::ResolvedStreamConfig;
+use crate::timing::Timestamp;
+use crate::traits::ExtensionProvider;
+
+pub trait StreamProxy: Send + Sync + ExtensionProvider {}
+
+bitflags! {
+ pub struct ChannelFlags: u32 {
+ const INACTIVE = 0x0000_0001;
+ const UNDERFLOW = 0x0000_0002;
+ const OVERFLOW = 0x0000_0004;
+ }
+}
+
+pub trait StreamLatency {
+ fn input_latency(&self, channel: usize) -> Option;
+ fn output_latency(&self, channel: usize) -> Option;
+}
+
+#[duplicate::duplicate_item(
+ name bufty;
+ [AudioInput] [AudioRef < 'a, T >];
+ [AudioOutput] [AudioMut < 'a, T >];
+)]
+/// Plain-old-data object holding references to the audio buffer and the associated time-keeping
+/// [`Timestamp`]. This timestamp is associated with the stream, and in the cases where the
+/// driver provides timing information, it is used instead of relying on sample-counting.
+pub struct name<'a, T> {
+ /// Associated time stamp for this callback. The time represents the duration for which the
+ /// stream has been opened, and is either provided by the driver if available, or is kept up
+ /// manually by the library.
+ pub timestamp: Timestamp,
+ /// Audio buffer data.
+ pub buffer: bufty,
+ pub channel_flags: &'a [ChannelFlags],
+}
+
+/// Plain-old-data object holding the passed-in stream configuration, as well as a general
+/// callback timestamp, which can be different from the input and output streams in case of
+/// cross-stream latencies; differences in timing can indicate desync.
+pub struct CallbackContext<'a> {
+ /// Passed-in stream configuration. Values have been updated where necessary to correspond to
+ /// the actual stream properties.
+ pub stream_config: &'a ResolvedStreamConfig,
+ /// Callback-wide timestamp.
+ pub timestamp: Timestamp,
+ pub stream_proxy: &'a dyn StreamProxy,
+}
+
+/// Trait for types which handles an audio stream (input or output).
+pub trait StreamHandle {
+ /// Type of errors which have caused the stream to fail.
+ type Error: Send + std::error::Error;
+
+ /// Eject the stream, returning ownership of the callback.
+ ///
+ /// An error can occur when an irrecoverable error has occured and ownership has been lost
+ /// already.
+ fn eject(self) -> Result;
+}
+
+/// Trait of types which process audio data. This is the trait that users will want to
+/// implement when processing audio from a device.
+pub trait Callback: Send {
+ /// Prepare the audio callback to process audio. This function is *not* real-time safe (i.e., allocations can be
+ /// performed), in preparation for processing the stream with [`Self::process_audio`].
+ fn prepare(&mut self, context: CallbackContext);
+
+ /// Callback called when audio data can be processed.
+ fn process_audio(
+ &mut self,
+ context: CallbackContext,
+ input: AudioInput,
+ output: AudioOutput,
+ );
+}
diff --git a/crates/interflow-core/src/timing.rs b/crates/interflow-core/src/timing.rs
new file mode 100644
index 0000000..5b01269
--- /dev/null
+++ b/crates/interflow-core/src/timing.rs
@@ -0,0 +1,175 @@
+use std::ops;
+use std::ops::AddAssign;
+use std::sync::atomic::AtomicU64;
+use std::time::Duration;
+
+/// Timestamp value, which computes duration information from a provided samplerate and a running
+/// sample counter.
+///
+/// You can update the timestamp by add-assigning sample counts to it:
+///
+/// ```rust
+/// use std::time::Duration;
+/// use interflow::timestamp::Timestamp;
+/// let mut ts = Timestamp::new(48000.);
+/// assert_eq!(ts.as_duration(), Duration::from_nanos(0));
+/// ts += 48;
+/// assert_eq!(ts.as_duration(), Duration::from_millis(1));
+/// ```
+///
+/// Adding also works, returning a new timestamp:
+///
+/// ```rust
+/// use std::time::Duration;
+/// use interflow::timestamp::Timestamp;
+/// let mut ts = Timestamp::new(48000.);
+/// assert_eq!(ts.as_duration(), Duration::from_nanos(0));
+/// let ts2 = ts + 48;
+/// assert_eq!(ts.as_duration(), Duration::from_millis(0));
+/// assert_eq!(ts2.as_duration(), Duration::from_millis(1));
+/// ```
+///
+/// Similarly, you can compute sample offsets by adding a [`Duration`] to it:
+///
+/// ```rust
+/// use std::time::Duration;
+/// use interflow::timestamp::Timestamp;
+/// let ts = Timestamp::from_count(48000., 48);
+/// let ts_off = ts + Duration::from_millis(100);
+/// assert_eq!(ts_off.as_duration(), Duration::from_millis(101));
+/// assert_eq!(ts_off.counter, 4848);
+/// ```
+///
+/// Or simply construct a [`Timestamp`] from a specified duration:
+///
+/// ```rust
+/// use std::time::Duration;
+/// use interflow::timestamp::Timestamp;
+/// let ts = Timestamp::from_duration(44100., Duration::from_millis(1));
+/// assert_eq!(ts.counter, 44); // Note that the conversion is lossy, as only whole samples are
+/// // stored in the timestamp.
+/// ```
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct Timestamp {
+ /// Number of samples counted in this timestamp.
+ pub counter: u64,
+ /// Samplerate of the audio stream associated with the counter.
+ pub samplerate: f64,
+}
+
+impl AddAssign for Timestamp {
+ fn add_assign(&mut self, rhs: Duration) {
+ let samples = rhs.as_secs_f64() * self.samplerate;
+ self.counter += samples as u64;
+ }
+}
+
+impl AddAssign for Timestamp {
+ fn add_assign(&mut self, rhs: u64) {
+ self.counter += rhs;
+ }
+}
+
+impl ops::Add for Timestamp
+where
+ Self: AddAssign,
+{
+ type Output = Self;
+
+ fn add(mut self, rhs: T) -> Self {
+ self.add_assign(rhs);
+ self
+ }
+}
+
+impl Timestamp {
+ /// Create a zeroed timestamp with the provided sample rate.
+ pub fn new(samplerate: f64) -> Self {
+ Self {
+ counter: 0,
+ samplerate,
+ }
+ }
+
+ /// Create a timestamp from the given sample rate and existing sample count.
+ pub fn from_count(samplerate: f64, counter: u64) -> Self {
+ Self {
+ samplerate,
+ counter,
+ }
+ }
+
+ /// Compute the sample offset that most closely matches the provided duration for the given
+ /// sample rate.
+ pub fn from_duration(samplerate: f64, duration: Duration) -> Self {
+ Self::from_seconds(samplerate, duration.as_secs_f64())
+ }
+
+ /// Compute the sample offset that most closely matches the provided duration in seconds for
+ /// the given sample rate.
+ pub fn from_seconds(samplerate: f64, seconds: f64) -> Self {
+ let samples = samplerate * seconds;
+ Self {
+ samplerate,
+ counter: samples as _,
+ }
+ }
+
+ /// Compute the number of seconds represented in this [`Timestamp`].
+ pub fn as_seconds(&self) -> f64 {
+ self.counter as f64 / self.samplerate
+ }
+
+ /// Compute the duration represented by this [`Timestamp`].
+ pub fn as_duration(&self) -> Duration {
+ Duration::from_secs_f64(self.as_seconds())
+ }
+}
+
+/// Atomic version of [`Timestamp`] to be shared between threads. Mainly used by the [`crate::duplex`] module, but
+/// may be useful in user code as well.
+pub struct AtomicTimestamp {
+ samplerate: AtomicU64,
+ counter: AtomicU64,
+}
+
+impl AtomicTimestamp {
+ /// Update the contents with the provided [`Timestamp`].
+ pub fn update(&self, ts: Timestamp) {
+ self.samplerate.store(
+ ts.samplerate.to_bits(),
+ std::sync::atomic::Ordering::Relaxed,
+ );
+ self.counter
+ .store(ts.counter, std::sync::atomic::Ordering::Relaxed);
+ }
+
+ /// Load values and return them as a [`Timestamp`].
+ pub fn as_timestamp(&self) -> Timestamp {
+ Timestamp {
+ samplerate: f64::from_bits(self.samplerate.load(std::sync::atomic::Ordering::Relaxed)),
+ counter: self.counter.load(std::sync::atomic::Ordering::Relaxed),
+ }
+ }
+
+ /// Add the provided number of frames to this.
+ pub fn add_frames(&self, frames: u64) {
+ self.counter
+ .fetch_add(frames, std::sync::atomic::Ordering::Relaxed);
+ }
+}
+
+impl From for AtomicTimestamp {
+ fn from(value: Timestamp) -> Self {
+ Self {
+ samplerate: AtomicU64::new(value.samplerate.to_bits()),
+ counter: AtomicU64::new(value.counter),
+ }
+ }
+}
+
+impl From for Timestamp {
+ fn from(value: AtomicTimestamp) -> Self {
+ value.as_timestamp()
+ }
+}
diff --git a/crates/interflow-core/src/traits.rs b/crates/interflow-core/src/traits.rs
new file mode 100644
index 0000000..35beb5f
--- /dev/null
+++ b/crates/interflow-core/src/traits.rs
@@ -0,0 +1,100 @@
+use std::any::TypeId;
+use std::marker::PhantomData;
+use std::mem::MaybeUninit;
+
+/// A fully type-erased pointer, that can work with both thin and fat pointers.
+/// Copied from .
+#[derive(Copy, Clone)]
+struct ErasedPtr {
+ value: MaybeUninit<[usize; 2]>,
+}
+
+impl ErasedPtr {
+ /// Erase `ptr`.
+ fn new(ptr: *const T) -> Self {
+ let mut res = ErasedPtr {
+ value: MaybeUninit::zeroed(),
+ };
+
+ let len = size_of::<*const T>();
+ assert!(len <= size_of::<[usize; 2]>());
+
+ let ptr_val = (&ptr) as *const *const T as *const u8;
+ let target = res.value.as_mut_ptr() as *mut u8;
+ // SAFETY: The target is valid for at least `len` bytes, and has no
+ // requirements on the value.
+ unsafe {
+ core::ptr::copy_nonoverlapping(ptr_val, target, len);
+ }
+
+ res
+ }
+
+ /// Convert the type erased pointer back into a pointer.
+ ///
+ /// # Safety
+ ///
+ /// The type `T` must be the same type as the one used with `new`.
+ unsafe fn as_ptr(&self) -> *const T {
+ // SAFETY: The constructor ensures that the first `size_of::()`
+ // bytes of `&self.value` are a valid `*const T` pointer.
+ unsafe {
+ core::mem::transmute_copy(&self.value)
+ }
+ }
+}
+
+/// Type that can dynamically retrieve a type registered by an [`ExtensionProvider`] object.
+/// Types are queried on-demand, every time an extension is requested.
+///
+/// Consumers of [`ExtensionProvider`] should instead use [`ExtensionExt::lookup`].
+pub struct Selector<'a> {
+ __lifetime: PhantomData<&'a ()>,
+ target: TypeId,
+ found: Option,
+}
+
+impl<'a> Selector<'a> {
+ pub(crate) const fn new() -> Self {
+ Self {
+ __lifetime: PhantomData,
+ target: TypeId::of::(),
+ found: None,
+ }
+ }
+
+ pub fn register(&mut self, value: &T) -> &mut Self {
+ if self.target == TypeId::of::() {
+ self.found = Some(ErasedPtr::new(value));
+ }
+ self
+ }
+
+ pub(crate) fn finish(self) -> Option<&'a I> {
+ assert_eq!(self.target, TypeId::of::());
+ Some(unsafe { &*self.found?.as_ptr() })
+ }
+}
+
+/// Trait for types that have optional data available on-demand.
+pub trait ExtensionProvider: 'static {
+ /// Register on-demand types. Note that the implementation details around registering extensions mean that this
+ /// function will be called for every request. Runtime checks are expected, but this function should remain as fast
+ /// as possible.
+ fn register<'a, 'sel>(&'a self, selector: &'sel mut Selector<'a>) -> &'sel mut Selector<'a>;
+}
+
+const _EXTENSION_TRAIT_ASSERTS: () = {
+ const fn typeable() {}
+ typeable::();
+};
+
+/// Additional extensions for [`ExtensionProvider`] objects.
+pub trait ExtensionProviderExt: ExtensionProvider {
+ /// Look up [`T`] from the extension if it is registered.
+ fn lookup(&self) -> Option<&T> {
+ let mut selector = Selector::new::();
+ { self.register(&mut selector); }
+ selector.finish::()
+ }
+}
\ No newline at end of file
diff --git a/crates/interflow-wasapi/Cargo.toml b/crates/interflow-wasapi/Cargo.toml
new file mode 100644
index 0000000..8b8f599
--- /dev/null
+++ b/crates/interflow-wasapi/Cargo.toml
@@ -0,0 +1,26 @@
+[package]
+name = "interflow-wasapi"
+version.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+license.workspace = true
+
+[dependencies]
+bitflags.workspace = true
+duplicate.workspace = true
+interflow-core.workspace = true
+thiserror.workspace = true
+windows = { version = "0.62.2", features = [
+ "Win32_Media_Audio",
+ "Win32_Foundation",
+ "Win32_Devices_Properties",
+ "Win32_Media_KernelStreaming",
+ "Win32_System_Com_StructuredStorage",
+ "Win32_System_Threading",
+ "Win32_Security",
+ "Win32_System_SystemServices",
+ "Win32_System_Variant",
+ "Win32_Media_Multimedia",
+ "Win32_UI_Shell_PropertiesSystem",
+] }
+zerocopy = { version = "0.8.39", features = ["std"] }
diff --git a/crates/interflow-wasapi/src/device.rs b/crates/interflow-wasapi/src/device.rs
new file mode 100644
index 0000000..f5f3a37
--- /dev/null
+++ b/crates/interflow-wasapi/src/device.rs
@@ -0,0 +1,151 @@
+use crate::util::MMDevice;
+use interflow_core::device::{ResolvedStreamConfig, StreamConfig};
+use interflow_core::stream;
+use interflow_core::traits::{ExtensionProvider, Selector};
+use interflow_core::{device, DeviceType};
+use std::borrow::Cow;
+use windows::Win32::Media::Audio;
+use windows::Win32::Media::Audio::{IAudioClient, IAudioClient3};
+
+#[derive(Debug, Clone)]
+pub struct Device {
+ pub(crate) handle: MMDevice,
+ pub(crate) device_type: DeviceType,
+}
+
+impl ExtensionProvider for Device {
+ fn register<'a, 'sel>(&'a self, selector: &'sel mut Selector<'a>) -> &'sel mut Selector<'a> {
+ selector
+ }
+}
+
+impl device::Device for Device {
+ type Error = crate::Error;
+ type StreamHandle = ();
+
+ fn name(&self) -> Cow<'_, str> {
+ Cow::Owned(self.handle.name())
+ }
+
+ fn device_type(&self) -> DeviceType {
+ self.device_type
+ }
+
+ fn default_config(&self) -> Result {
+ self.get_mix_format_iac3()
+ .or_else(|_| self.get_mix_format())
+ }
+
+ fn is_config_supported(&self, config: &StreamConfig) -> bool {
+ todo!()
+ }
+
+ fn create_stream(
+ &self,
+ stream_config: StreamConfig,
+ callback: Callback,
+ ) -> Result, Self::Error> {
+ todo!()
+ }
+}
+
+impl Device {
+ fn get_mix_format(&self) -> Result {
+ let client = self.handle.activate::()?;
+ let mix_format = unsafe { client.GetMixFormat() }?;
+ let format = unsafe { mix_format.read_unaligned() };
+ let channels = format.nChannels as usize;
+ let input_channels = if self.device_type.is_input() {
+ channels
+ } else {
+ 0
+ };
+ let output_channels = if self.device_type.is_output() {
+ channels
+ } else {
+ 0
+ };
+ Ok(StreamConfig {
+ sample_rate: format.nSamplesPerSec as _,
+ input_channels,
+ output_channels,
+ buffer_size_range: (None, None),
+ exclusive: false,
+ })
+ }
+
+ fn get_mix_format_iac3(&self) -> Result {
+ let client = self.handle.activate::()?;
+ let mut period_default = 0u32;
+ let mut period_min = 0u32;
+ let mut period_max = 0u32;
+ let format = unsafe { client.GetMixFormat() }?;
+ unsafe {
+ let mut _fundamental_period = 0u32;
+ client.GetSharedModeEnginePeriod(
+ format.cast_const(),
+ &mut period_default,
+ &mut _fundamental_period,
+ &mut period_min,
+ &mut period_max,
+ )?;
+ }
+ let format = unsafe { format.read_unaligned() };
+ let channels = format.nChannels as usize;
+ let input_channels = if self.device_type.is_input() {
+ channels
+ } else {
+ 0
+ };
+ let output_channels = if self.device_type.is_output() {
+ channels
+ } else {
+ 0
+ };
+ Ok(StreamConfig {
+ sample_rate: format.nSamplesPerSec as _,
+ input_channels,
+ output_channels,
+ buffer_size_range: (Some(period_min as usize), Some(period_max as usize)),
+ exclusive: false,
+ })
+ }
+}
+
+/// An iterable collection WASAPI devices.
+pub(crate) struct DeviceList {
+ pub(crate) collection: Audio::IMMDeviceCollection,
+ pub(crate) total_count: u32,
+ pub(crate) next_item: u32,
+ pub(crate) device_type: DeviceType,
+}
+
+unsafe impl Send for DeviceList {}
+
+unsafe impl Sync for DeviceList {}
+
+impl Iterator for DeviceList {
+ type Item = Device;
+
+ fn next(&mut self) -> Option {
+ if self.next_item >= self.total_count {
+ return None;
+ }
+
+ unsafe {
+ let device = self.collection.Item(self.next_item).unwrap();
+ self.next_item += 1;
+ Some(Device {
+ handle: MMDevice::new(device),
+ device_type: self.device_type,
+ })
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option) {
+ let rest = (self.total_count - self.next_item) as usize;
+ (rest, Some(rest))
+ }
+}
+
+impl ExactSizeIterator for DeviceList {}
diff --git a/crates/interflow-wasapi/src/lib.rs b/crates/interflow-wasapi/src/lib.rs
new file mode 100644
index 0000000..9357430
--- /dev/null
+++ b/crates/interflow-wasapi/src/lib.rs
@@ -0,0 +1,162 @@
+pub mod device;
+mod util;
+mod stream;
+
+use std::sync::OnceLock;
+use bitflags::bitflags_match;
+use windows::Win32::Media::Audio;
+use windows::Win32::System::Com;
+use device::Device;
+use interflow_core::{platform, DeviceType};
+use interflow_core::traits::{ExtensionProvider, Selector};
+use crate::util::MMDevice;
+
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+ /// Error originating from WASAPI.
+ #[error("{} (code {})", .0.message(), .0.code())]
+ BackendError(#[from] windows::core::Error),
+ /// Requested WASAPI device configuration is not available
+ #[error("Configuration not available")]
+ ConfigurationNotAvailable,
+ /// Windows Foundation error
+ #[error("Win32 error: {0}")]
+ FoundationError(String),
+ /// Duplex stream requested, unsupported by WASAPI
+ #[error("Unsupported duplex stream requested")]
+ DuplexStreamRequested,
+}
+
+#[derive(Debug, Copy, Clone)]
+pub struct Platform;
+
+impl ExtensionProvider for Platform {
+ fn register<'a, 'sel>(&'a self, selector: &'sel mut Selector<'a>) -> &'sel mut Selector<'a> {
+ selector.register::(self)
+ }
+}
+
+impl platform::Platform for Platform {
+ type Error = Error;
+ type Device = Device;
+ const NAME: &'static str = "";
+
+ fn default_device(device_type: DeviceType) -> Result {
+ let Some(device) = audio_device_enumerator().get_default_device(device_type)? else {
+ return Err(Error::ConfigurationNotAvailable);
+ };
+ Ok(device)
+ }
+
+ fn list_devices(&self) -> Result, Self::Error> {
+ audio_device_enumerator().get_device_list()
+ }
+}
+
+pub trait DefaultByRole {
+ fn default_by_role(&self, flow: Audio::EDataFlow, role: Audio::ERole) -> Result;
+}
+
+impl DefaultByRole for Platform {
+ fn default_by_role(&self, flow: Audio::EDataFlow, role: Audio::ERole) -> Result {
+ audio_device_enumerator().get_default_device_with_role(flow, role)
+ }
+}
+
+fn audio_device_enumerator() -> &'static AudioDeviceEnumerator {
+ static ENUMERATOR: OnceLock = OnceLock::new();
+ ENUMERATOR.get_or_init(|| {
+ // Make sure COM is initialised.
+ let com = util::com().unwrap();
+
+ unsafe {
+ let enumerator = com.create_instance::<_, Audio::IMMDeviceEnumerator>(
+ &Audio::MMDeviceEnumerator,
+ None,
+ Com::CLSCTX_ALL,
+ ).unwrap();
+
+ AudioDeviceEnumerator(enumerator)
+ }
+ })
+}
+
+/// Send/Sync wrapper around `IMMDeviceEnumerator`.
+pub struct AudioDeviceEnumerator(Audio::IMMDeviceEnumerator);
+
+impl AudioDeviceEnumerator {
+ // Returns the default output device.
+ fn get_default_device(
+ &self,
+ device_type: DeviceType,
+ ) -> Result