using SoundIOSharp;
using System;
using System.Collections.Concurrent;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.SoundIo
{
internal class SoundIoAudioTrack : IDisposable
{
///
/// The audio track ring buffer
///
private SoundIoRingBuffer m_Buffer;
///
/// A list of buffers currently pending writeback to the audio backend
///
private ConcurrentQueue m_ReservedBuffers;
///
/// Occurs when a buffer has been released by the audio backend
///
private event ReleaseCallback BufferReleased;
///
/// The track ID of this
///
public int TrackID { get; private set; }
///
/// The current playback state
///
public PlaybackState State { get; private set; }
///
/// The audio context this track belongs to
///
public SoundIO AudioContext { get; private set; }
///
/// The this track belongs to
///
public SoundIODevice AudioDevice { get; private set; }
///
/// The audio output stream of this track
///
public SoundIOOutStream AudioStream { get; private set; }
///
/// Released buffers the track is no longer holding
///
public ConcurrentQueue ReleasedBuffers { get; private set; }
///
/// Buffer count of the track
///
public uint BufferCount => (uint)m_ReservedBuffers.Count;
///
/// Played sample count of the track
///
public ulong PlayedSampleCount { get; private set; }
private int _hardwareChannels;
private int _virtualChannels;
///
/// Constructs a new instance of a
///
/// The track ID
/// The SoundIO audio context
/// The SoundIO audio device
public SoundIoAudioTrack(int trackId, SoundIO audioContext, SoundIODevice audioDevice)
{
TrackID = trackId;
AudioContext = audioContext;
AudioDevice = audioDevice;
State = PlaybackState.Stopped;
ReleasedBuffers = new ConcurrentQueue();
m_Buffer = new SoundIoRingBuffer();
m_ReservedBuffers = new ConcurrentQueue();
}
///
/// Opens the audio track with the specified parameters
///
/// The requested sample rate of the track
/// The requested hardware channels
/// The requested virtual channels
/// A that represents the delegate to invoke when a buffer has been released by the audio track
/// The requested sample format of the track
public void Open(
int sampleRate,
int hardwareChannels,
int virtualChannels,
ReleaseCallback callback,
SoundIOFormat format = SoundIOFormat.S16LE)
{
// Close any existing audio streams
if (AudioStream != null)
{
Close();
}
if (!AudioDevice.SupportsSampleRate(sampleRate))
{
throw new InvalidOperationException($"This sound device does not support a sample rate of {sampleRate}Hz");
}
if (!AudioDevice.SupportsFormat(format))
{
throw new InvalidOperationException($"This sound device does not support SoundIOFormat.{Enum.GetName(typeof(SoundIOFormat), format)}");
}
if (!AudioDevice.SupportsChannelCount(hardwareChannels))
{
throw new InvalidOperationException($"This sound device does not support channel count {hardwareChannels}");
}
_hardwareChannels = hardwareChannels;
_virtualChannels = virtualChannels;
AudioStream = AudioDevice.CreateOutStream();
AudioStream.Name = $"SwitchAudioTrack_{TrackID}";
AudioStream.Layout = SoundIOChannelLayout.GetDefault(hardwareChannels);
AudioStream.Format = format;
AudioStream.SampleRate = sampleRate;
AudioStream.WriteCallback = WriteCallback;
BufferReleased += callback;
AudioStream.Open();
}
///
/// This callback occurs when the sound device is ready to buffer more frames
///
/// The minimum amount of frames expected by the audio backend
/// The maximum amount of frames that can be written to the audio backend
private unsafe void WriteCallback(int minFrameCount, int maxFrameCount)
{
int bytesPerFrame = AudioStream.BytesPerFrame;
uint bytesPerSample = (uint)AudioStream.BytesPerSample;
int bufferedFrames = m_Buffer.Length / bytesPerFrame;
long bufferedSamples = m_Buffer.Length / bytesPerSample;
int frameCount = Math.Min(bufferedFrames, maxFrameCount);
if (frameCount == 0)
{
return;
}
SoundIOChannelAreas areas = AudioStream.BeginWrite(ref frameCount);
int channelCount = areas.ChannelCount;
byte[] samples = new byte[frameCount * bytesPerFrame];
m_Buffer.Read(samples, 0, samples.Length);
// This is a huge ugly block of code, but we save
// a significant amount of time over the generic
// loop that handles other channel counts.
// Mono
if (channelCount == 1)
{
SoundIOChannelArea area = areas.GetArea(0);
fixed (byte* srcptr = samples)
{
if (bytesPerSample == 1)
{
for (int frame = 0; frame < frameCount; frame++)
{
((byte*)area.Pointer)[0] = srcptr[frame * bytesPerFrame];
area.Pointer += area.Step;
}
}
else if (bytesPerSample == 2)
{
for (int frame = 0; frame < frameCount; frame++)
{
((short*)area.Pointer)[0] = ((short*)srcptr)[frame * bytesPerFrame >> 1];
area.Pointer += area.Step;
}
}
else if (bytesPerSample == 4)
{
for (int frame = 0; frame < frameCount; frame++)
{
((int*)area.Pointer)[0] = ((int*)srcptr)[frame * bytesPerFrame >> 2];
area.Pointer += area.Step;
}
}
else
{
for (int frame = 0; frame < frameCount; frame++)
{
Unsafe.CopyBlockUnaligned((byte*)area.Pointer, srcptr + (frame * bytesPerFrame), bytesPerSample);
area.Pointer += area.Step;
}
}
}
}
// Stereo
else if (channelCount == 2)
{
SoundIOChannelArea area1 = areas.GetArea(0);
SoundIOChannelArea area2 = areas.GetArea(1);
fixed (byte* srcptr = samples)
{
if (bytesPerSample == 1)
{
for (int frame = 0; frame < frameCount; frame++)
{
// Channel 1
((byte*)area1.Pointer)[0] = srcptr[(frame * bytesPerFrame) + 0];
// Channel 2
((byte*)area2.Pointer)[0] = srcptr[(frame * bytesPerFrame) + 1];
area1.Pointer += area1.Step;
area2.Pointer += area2.Step;
}
}
else if (bytesPerSample == 2)
{
for (int frame = 0; frame < frameCount; frame++)
{
// Channel 1
((short*)area1.Pointer)[0] = ((short*)srcptr)[(frame * bytesPerFrame >> 1) + 0];
// Channel 2
((short*)area2.Pointer)[0] = ((short*)srcptr)[(frame * bytesPerFrame >> 1) + 1];
area1.Pointer += area1.Step;
area2.Pointer += area2.Step;
}
}
else if (bytesPerSample == 4)
{
for (int frame = 0; frame < frameCount; frame++)
{
// Channel 1
((int*)area1.Pointer)[0] = ((int*)srcptr)[(frame * bytesPerFrame >> 2) + 0];
// Channel 2
((int*)area2.Pointer)[0] = ((int*)srcptr)[(frame * bytesPerFrame >> 2) + 1];
area1.Pointer += area1.Step;
area2.Pointer += area2.Step;
}
}
else
{
for (int frame = 0; frame < frameCount; frame++)
{
// Channel 1
Unsafe.CopyBlockUnaligned((byte*)area1.Pointer, srcptr + (frame * bytesPerFrame) + (0 * bytesPerSample), bytesPerSample);
// Channel 2
Unsafe.CopyBlockUnaligned((byte*)area2.Pointer, srcptr + (frame * bytesPerFrame) + (1 * bytesPerSample), bytesPerSample);
area1.Pointer += area1.Step;
area2.Pointer += area2.Step;
}
}
}
}
// Surround
else if (channelCount == 6)
{
SoundIOChannelArea area1 = areas.GetArea(0);
SoundIOChannelArea area2 = areas.GetArea(1);
SoundIOChannelArea area3 = areas.GetArea(2);
SoundIOChannelArea area4 = areas.GetArea(3);
SoundIOChannelArea area5 = areas.GetArea(4);
SoundIOChannelArea area6 = areas.GetArea(5);
fixed (byte* srcptr = samples)
{
if (bytesPerSample == 1)
{
for (int frame = 0; frame < frameCount; frame++)
{
// Channel 1
((byte*)area1.Pointer)[0] = srcptr[(frame * bytesPerFrame) + 0];
// Channel 2
((byte*)area2.Pointer)[0] = srcptr[(frame * bytesPerFrame) + 1];
// Channel 3
((byte*)area3.Pointer)[0] = srcptr[(frame * bytesPerFrame) + 2];
// Channel 4
((byte*)area4.Pointer)[0] = srcptr[(frame * bytesPerFrame) + 3];
// Channel 5
((byte*)area5.Pointer)[0] = srcptr[(frame * bytesPerFrame) + 4];
// Channel 6
((byte*)area6.Pointer)[0] = srcptr[(frame * bytesPerFrame) + 5];
area1.Pointer += area1.Step;
area2.Pointer += area2.Step;
area3.Pointer += area3.Step;
area4.Pointer += area4.Step;
area5.Pointer += area5.Step;
area6.Pointer += area6.Step;
}
}
else if (bytesPerSample == 2)
{
for (int frame = 0; frame < frameCount; frame++)
{
// Channel 1
((short*)area1.Pointer)[0] = ((short*)srcptr)[(frame * bytesPerFrame >> 1) + 0];
// Channel 2
((short*)area2.Pointer)[0] = ((short*)srcptr)[(frame * bytesPerFrame >> 1) + 1];
// Channel 3
((short*)area3.Pointer)[0] = ((short*)srcptr)[(frame * bytesPerFrame >> 1) + 2];
// Channel 4
((short*)area4.Pointer)[0] = ((short*)srcptr)[(frame * bytesPerFrame >> 1) + 3];
// Channel 5
((short*)area5.Pointer)[0] = ((short*)srcptr)[(frame * bytesPerFrame >> 1) + 4];
// Channel 6
((short*)area6.Pointer)[0] = ((short*)srcptr)[(frame * bytesPerFrame >> 1) + 5];
area1.Pointer += area1.Step;
area2.Pointer += area2.Step;
area3.Pointer += area3.Step;
area4.Pointer += area4.Step;
area5.Pointer += area5.Step;
area6.Pointer += area6.Step;
}
}
else if (bytesPerSample == 4)
{
for (int frame = 0; frame < frameCount; frame++)
{
// Channel 1
((int*)area1.Pointer)[0] = ((int*)srcptr)[(frame * bytesPerFrame >> 2) + 0];
// Channel 2
((int*)area2.Pointer)[0] = ((int*)srcptr)[(frame * bytesPerFrame >> 2) + 1];
// Channel 3
((int*)area3.Pointer)[0] = ((int*)srcptr)[(frame * bytesPerFrame >> 2) + 2];
// Channel 4
((int*)area4.Pointer)[0] = ((int*)srcptr)[(frame * bytesPerFrame >> 2) + 3];
// Channel 5
((int*)area5.Pointer)[0] = ((int*)srcptr)[(frame * bytesPerFrame >> 2) + 4];
// Channel 6
((int*)area6.Pointer)[0] = ((int*)srcptr)[(frame * bytesPerFrame >> 2) + 5];
area1.Pointer += area1.Step;
area2.Pointer += area2.Step;
area3.Pointer += area3.Step;
area4.Pointer += area4.Step;
area5.Pointer += area5.Step;
area6.Pointer += area6.Step;
}
}
else
{
for (int frame = 0; frame < frameCount; frame++)
{
// Channel 1
Unsafe.CopyBlockUnaligned((byte*)area1.Pointer, srcptr + (frame * bytesPerFrame) + (0 * bytesPerSample), bytesPerSample);
// Channel 2
Unsafe.CopyBlockUnaligned((byte*)area2.Pointer, srcptr + (frame * bytesPerFrame) + (1 * bytesPerSample), bytesPerSample);
// Channel 3
Unsafe.CopyBlockUnaligned((byte*)area3.Pointer, srcptr + (frame * bytesPerFrame) + (2 * bytesPerSample), bytesPerSample);
// Channel 4
Unsafe.CopyBlockUnaligned((byte*)area4.Pointer, srcptr + (frame * bytesPerFrame) + (3 * bytesPerSample), bytesPerSample);
// Channel 5
Unsafe.CopyBlockUnaligned((byte*)area5.Pointer, srcptr + (frame * bytesPerFrame) + (4 * bytesPerSample), bytesPerSample);
// Channel 6
Unsafe.CopyBlockUnaligned((byte*)area6.Pointer, srcptr + (frame * bytesPerFrame) + (5 * bytesPerSample), bytesPerSample);
area1.Pointer += area1.Step;
area2.Pointer += area2.Step;
area3.Pointer += area3.Step;
area4.Pointer += area4.Step;
area5.Pointer += area5.Step;
area6.Pointer += area6.Step;
}
}
}
}
// Every other channel count
else
{
SoundIOChannelArea[] channels = new SoundIOChannelArea[channelCount];
// Obtain the channel area for each channel
for (int i = 0; i < channelCount; i++)
{
channels[i] = areas.GetArea(i);
}
fixed (byte* srcptr = samples)
{
for (int frame = 0; frame < frameCount; frame++)
for (int channel = 0; channel < areas.ChannelCount; channel++)
{
// Copy channel by channel, frame by frame. This is slow!
Unsafe.CopyBlockUnaligned((byte*)channels[channel].Pointer, srcptr + (frame * bytesPerFrame) + (channel * bytesPerSample), bytesPerSample);
channels[channel].Pointer += channels[channel].Step;
}
}
}
AudioStream.EndWrite();
PlayedSampleCount += (ulong)samples.Length;
UpdateReleasedBuffers(samples.Length);
}
///
/// Releases any buffers that have been fully written to the output device
///
/// The amount of bytes written in the last device write
private void UpdateReleasedBuffers(int bytesRead)
{
bool bufferReleased = false;
while (bytesRead > 0)
{
if (m_ReservedBuffers.TryPeek(out SoundIoBuffer buffer))
{
if (buffer.Length > bytesRead)
{
buffer.Length -= bytesRead;
bytesRead = 0;
}
else
{
bufferReleased = true;
bytesRead -= buffer.Length;
m_ReservedBuffers.TryDequeue(out buffer);
ReleasedBuffers.Enqueue(buffer.Tag);
}
}
}
if (bufferReleased)
{
OnBufferReleased();
}
}
///
/// Starts audio playback
///
public void Start()
{
if (AudioStream == null)
{
return;
}
AudioStream.Start();
AudioStream.Pause(false);
AudioContext.FlushEvents();
State = PlaybackState.Playing;
}
///
/// Stops audio playback
///
public void Stop()
{
if (AudioStream == null)
{
return;
}
AudioStream.Pause(true);
AudioContext.FlushEvents();
State = PlaybackState.Stopped;
}
///
/// Appends an audio buffer to the tracks internal ring buffer
///
/// The audio sample type
/// The unqiue tag of the buffer being appended
/// The buffer to append
public void AppendBuffer(long bufferTag, T[] buffer) where T: struct
{
if (AudioStream == null)
{
return;
}
int sampleSize = Unsafe.SizeOf();
int targetSize = sampleSize * buffer.Length;
// Do we need to downmix?
if (_hardwareChannels != _virtualChannels)
{
if (sampleSize != sizeof(short))
{
throw new NotImplementedException("Downmixing formats other than PCM16 is not supported!");
}
short[] downmixedBuffer;
ReadOnlySpan bufferPCM16 = MemoryMarshal.Cast(buffer);
if (_virtualChannels == 6)
{
downmixedBuffer = Downmixing.DownMixSurroundToStereo(bufferPCM16);
if (_hardwareChannels == 1)
{
downmixedBuffer = Downmixing.DownMixStereoToMono(downmixedBuffer);
}
}
else if (_virtualChannels == 2)
{
downmixedBuffer = Downmixing.DownMixStereoToMono(bufferPCM16);
}
else
{
throw new NotImplementedException($"Downmixing from {_virtualChannels} to {_hardwareChannels} not implemented!");
}
targetSize = sampleSize * downmixedBuffer.Length;
// Copy the memory to our ring buffer
m_Buffer.Write(downmixedBuffer, 0, targetSize);
// Keep track of "buffered" buffers
m_ReservedBuffers.Enqueue(new SoundIoBuffer(bufferTag, targetSize));
}
else
{
// Copy the memory to our ring buffer
m_Buffer.Write(buffer, 0, targetSize);
// Keep track of "buffered" buffers
m_ReservedBuffers.Enqueue(new SoundIoBuffer(bufferTag, targetSize));
}
}
///
/// Returns a value indicating whether the specified buffer is currently reserved by the track
///
/// The buffer tag to check
public bool ContainsBuffer(long bufferTag)
{
return m_ReservedBuffers.Any(x => x.Tag == bufferTag);
}
///
/// Flush all track buffers
///
public bool FlushBuffers()
{
m_Buffer.Clear();
if (m_ReservedBuffers.Count > 0)
{
foreach (var buffer in m_ReservedBuffers)
{
ReleasedBuffers.Enqueue(buffer.Tag);
}
OnBufferReleased();
return true;
}
return false;
}
///
/// Closes the
///
public void Close()
{
if (AudioStream != null)
{
AudioStream.Pause(true);
AudioStream.Dispose();
}
m_Buffer.Clear();
OnBufferReleased();
ReleasedBuffers.Clear();
State = PlaybackState.Stopped;
AudioStream = null;
BufferReleased = null;
}
private void OnBufferReleased()
{
BufferReleased?.Invoke();
}
///
/// Releases the unmanaged resources used by the
///
public void Dispose()
{
Close();
}
~SoundIoAudioTrack()
{
Dispose();
}
}
}