Move solution and projects to src

This commit is contained in:
TSR Berry 2023-04-08 01:22:00 +02:00 committed by Mary
parent cd124bda58
commit cee7121058
3466 changed files with 55 additions and 55 deletions

View file

@ -0,0 +1,132 @@
using System;
using System.Threading;
namespace Ryujinx.Audio
{
/// <summary>
/// Manage audio input and output system.
/// </summary>
public class AudioManager : IDisposable
{
/// <summary>
/// Lock used to control the waiters registration.
/// </summary>
private object _lock = new object();
/// <summary>
/// Events signaled when the driver played audio buffers.
/// </summary>
private ManualResetEvent[] _updateRequiredEvents;
/// <summary>
/// Action to execute when the driver played audio buffers.
/// </summary>
private Action[] _actions;
/// <summary>
/// The worker thread in charge of handling sessions update.
/// </summary>
private Thread _workerThread;
private bool _isRunning;
/// <summary>
/// Create a new <see cref="AudioManager"/>.
/// </summary>
public AudioManager()
{
_updateRequiredEvents = new ManualResetEvent[2];
_actions = new Action[2];
_isRunning = false;
// Termination event.
_updateRequiredEvents[1] = new ManualResetEvent(false);
_workerThread = new Thread(Update)
{
Name = "AudioManager.Worker"
};
}
/// <summary>
/// Start the <see cref="AudioManager"/>.
/// </summary>
public void Start()
{
if (_workerThread.IsAlive)
{
throw new InvalidOperationException();
}
_isRunning = true;
_workerThread.Start();
}
/// <summary>
/// Initialize update handlers.
/// </summary>
/// <param name="updatedRequiredEvent ">The driver event that will get signaled by the device driver when an audio buffer finished playing/being captured</param>
/// <param name="outputCallback">The callback to call when an audio buffer finished playing</param>
/// <param name="inputCallback">The callback to call when an audio buffer was captured</param>
public void Initialize(ManualResetEvent updatedRequiredEvent, Action outputCallback, Action inputCallback)
{
lock (_lock)
{
_updateRequiredEvents[0] = updatedRequiredEvent;
_actions[0] = outputCallback;
_actions[1] = inputCallback;
}
}
/// <summary>
/// Entrypoint of the <see cref="_workerThread"/> in charge of updating the <see cref="AudioManager"/>.
/// </summary>
private void Update()
{
while (_isRunning)
{
int index = WaitHandle.WaitAny(_updateRequiredEvents);
// Last index is here to indicate thread termination.
if (index + 1 == _updateRequiredEvents.Length)
{
break;
}
lock (_lock)
{
foreach (Action action in _actions)
{
action?.Invoke();
}
_updateRequiredEvents[0].Reset();
}
}
}
/// <summary>
/// Stop updating the <see cref="AudioManager"/> without stopping the worker thread.
/// </summary>
public void StopUpdates()
{
_isRunning = false;
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_updateRequiredEvents[1].Set();
_workerThread.Join();
_updateRequiredEvents[1].Dispose();
}
}
}
}

View file

@ -0,0 +1,26 @@
using Ryujinx.Audio.Common;
using System;
namespace Ryujinx.Audio.Backends.Common
{
public static class BackendHelper
{
public static int GetSampleSize(SampleFormat format)
{
return format switch
{
SampleFormat.PcmInt8 => sizeof(byte),
SampleFormat.PcmInt16 => sizeof(ushort),
SampleFormat.PcmInt24 => 3,
SampleFormat.PcmInt32 => sizeof(int),
SampleFormat.PcmFloat => sizeof(float),
_ => throw new ArgumentException($"{format}"),
};
}
public static int GetSampleCount(SampleFormat format, int channelCount, int bufferSize)
{
return bufferSize / GetSampleSize(format) / channelCount;
}
}
}

View file

@ -0,0 +1,166 @@
using Ryujinx.Common;
using System;
namespace Ryujinx.Audio.Backends.Common
{
/// <summary>
/// A ring buffer that grow if data written to it is too big to fit.
/// </summary>
public class DynamicRingBuffer
{
private const int RingBufferAlignment = 2048;
private object _lock = new object();
private byte[] _buffer;
private int _size;
private int _headOffset;
private int _tailOffset;
public int Length => _size;
public DynamicRingBuffer(int initialCapacity = RingBufferAlignment)
{
_buffer = new byte[initialCapacity];
}
public void Clear()
{
_size = 0;
_headOffset = 0;
_tailOffset = 0;
}
public void Clear(int size)
{
lock (_lock)
{
if (size > _size)
{
size = _size;
}
if (size == 0)
{
return;
}
_headOffset = (_headOffset + size) % _buffer.Length;
_size -= size;
if (_size == 0)
{
_headOffset = 0;
_tailOffset = 0;
}
}
}
private void SetCapacityLocked(int capacity)
{
byte[] buffer = new byte[capacity];
if (_size > 0)
{
if (_headOffset < _tailOffset)
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, 0, _size);
}
else
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, 0, _buffer.Length - _headOffset);
Buffer.BlockCopy(_buffer, 0, buffer, _buffer.Length - _headOffset, _tailOffset);
}
}
_buffer = buffer;
_headOffset = 0;
_tailOffset = _size;
}
public void Write<T>(T[] buffer, int index, int count)
{
if (count == 0)
{
return;
}
lock (_lock)
{
if ((_size + count) > _buffer.Length)
{
SetCapacityLocked(BitUtils.AlignUp(_size + count, RingBufferAlignment));
}
if (_headOffset < _tailOffset)
{
int tailLength = _buffer.Length - _tailOffset;
if (tailLength >= count)
{
Buffer.BlockCopy(buffer, index, _buffer, _tailOffset, count);
}
else
{
Buffer.BlockCopy(buffer, index, _buffer, _tailOffset, tailLength);
Buffer.BlockCopy(buffer, index + tailLength, _buffer, 0, count - tailLength);
}
}
else
{
Buffer.BlockCopy(buffer, index, _buffer, _tailOffset, count);
}
_size += count;
_tailOffset = (_tailOffset + count) % _buffer.Length;
}
}
public int Read<T>(T[] buffer, int index, int count)
{
lock (_lock)
{
if (count > _size)
{
count = _size;
}
if (count == 0)
{
return 0;
}
if (_headOffset < _tailOffset)
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, index, count);
}
else
{
int tailLength = _buffer.Length - _headOffset;
if (tailLength >= count)
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, index, count);
}
else
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, index, tailLength);
Buffer.BlockCopy(_buffer, 0, buffer, index + tailLength, count - tailLength);
}
}
_size -= count;
_headOffset = (_headOffset + count) % _buffer.Length;
if (_size == 0)
{
_headOffset = 0;
_tailOffset = 0;
}
return count;
}
}
}
}

View file

@ -0,0 +1,79 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Backends.Common
{
public abstract class HardwareDeviceSessionOutputBase : IHardwareDeviceSession
{
public IVirtualMemoryManager MemoryManager { get; }
public SampleFormat RequestedSampleFormat { get; }
public uint RequestedSampleRate { get; }
public uint RequestedChannelCount { get; }
public HardwareDeviceSessionOutputBase(IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount)
{
MemoryManager = memoryManager;
RequestedSampleFormat = requestedSampleFormat;
RequestedSampleRate = requestedSampleRate;
RequestedChannelCount = requestedChannelCount;
}
private byte[] GetBufferSamples(AudioBuffer buffer)
{
if (buffer.DataPointer == 0)
{
return null;
}
byte[] data = new byte[buffer.DataSize];
MemoryManager.Read(buffer.DataPointer, data);
return data;
}
protected ulong GetSampleCount(AudioBuffer buffer)
{
return GetSampleCount((int)buffer.DataSize);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected ulong GetSampleCount(int dataSize)
{
return (ulong)BackendHelper.GetSampleCount(RequestedSampleFormat, (int)RequestedChannelCount, dataSize);
}
public abstract void Dispose();
public abstract void PrepareToClose();
public abstract void QueueBuffer(AudioBuffer buffer);
public abstract void SetVolume(float volume);
public abstract float GetVolume();
public abstract void Start();
public abstract void Stop();
public abstract ulong GetPlayedSampleCount();
public abstract bool WasBufferFullyConsumed(AudioBuffer buffer);
public virtual bool RegisterBuffer(AudioBuffer buffer)
{
return RegisterBuffer(buffer, GetBufferSamples(buffer));
}
public virtual bool RegisterBuffer(AudioBuffer buffer, byte[] samples)
{
if (samples == null)
{
return false;
}
if (buffer.Data == null)
{
buffer.Data = samples;
}
return true;
}
public virtual void UnregisterBuffer(AudioBuffer buffer) { }
}
}

View file

@ -0,0 +1,186 @@
using Ryujinx.Audio.Backends.Common;
using Ryujinx.Audio.Backends.Dummy;
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Threading;
using static Ryujinx.Audio.Integration.IHardwareDeviceDriver;
namespace Ryujinx.Audio.Backends.CompatLayer
{
public class CompatLayerHardwareDeviceDriver : IHardwareDeviceDriver
{
private IHardwareDeviceDriver _realDriver;
public static bool IsSupported => true;
public CompatLayerHardwareDeviceDriver(IHardwareDeviceDriver realDevice)
{
_realDriver = realDevice;
}
public void Dispose()
{
_realDriver.Dispose();
}
public ManualResetEvent GetUpdateRequiredEvent()
{
return _realDriver.GetUpdateRequiredEvent();
}
public ManualResetEvent GetPauseEvent()
{
return _realDriver.GetPauseEvent();
}
private uint SelectHardwareChannelCount(uint targetChannelCount)
{
if (_realDriver.SupportsChannelCount(targetChannelCount))
{
return targetChannelCount;
}
return targetChannelCount switch
{
6 => SelectHardwareChannelCount(2),
2 => SelectHardwareChannelCount(1),
1 => throw new ArgumentException("No valid channel configuration found!"),
_ => throw new ArgumentException($"Invalid targetChannelCount {targetChannelCount}")
};
}
private SampleFormat SelectHardwareSampleFormat(SampleFormat targetSampleFormat)
{
if (_realDriver.SupportsSampleFormat(targetSampleFormat))
{
return targetSampleFormat;
}
// Attempt conversion from PCM16.
if (targetSampleFormat == SampleFormat.PcmInt16)
{
// Prefer PCM32 if we need to convert.
if (_realDriver.SupportsSampleFormat(SampleFormat.PcmInt32))
{
return SampleFormat.PcmInt32;
}
// If not supported, PCM float provides the best quality with a cost lower than PCM24.
if (_realDriver.SupportsSampleFormat(SampleFormat.PcmFloat))
{
return SampleFormat.PcmFloat;
}
if (_realDriver.SupportsSampleFormat(SampleFormat.PcmInt24))
{
return SampleFormat.PcmInt24;
}
// If nothing is truly supported, attempt PCM8 at the cost of losing quality.
if (_realDriver.SupportsSampleFormat(SampleFormat.PcmInt8))
{
return SampleFormat.PcmInt8;
}
}
throw new ArgumentException("No valid sample format configuration found!");
}
public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount, float volume)
{
if (channelCount == 0)
{
channelCount = 2;
}
if (sampleRate == 0)
{
sampleRate = Constants.TargetSampleRate;
}
volume = Math.Clamp(volume, 0, 1);
if (!_realDriver.SupportsDirection(direction))
{
if (direction == Direction.Input)
{
Logger.Warning?.Print(LogClass.Audio, "The selected audio backend doesn't support audio input, fallback to dummy...");
return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
}
throw new NotImplementedException();
}
SampleFormat hardwareSampleFormat = SelectHardwareSampleFormat(sampleFormat);
uint hardwareChannelCount = SelectHardwareChannelCount(channelCount);
IHardwareDeviceSession realSession = _realDriver.OpenDeviceSession(direction, memoryManager, hardwareSampleFormat, sampleRate, hardwareChannelCount, volume);
if (hardwareChannelCount == channelCount && hardwareSampleFormat == sampleFormat)
{
return realSession;
}
if (hardwareSampleFormat != sampleFormat)
{
Logger.Warning?.Print(LogClass.Audio, $"{sampleFormat} isn't supported by the audio device, conversion to {hardwareSampleFormat} will happen.");
if (hardwareSampleFormat < sampleFormat)
{
Logger.Warning?.Print(LogClass.Audio, $"{hardwareSampleFormat} has lower quality than {sampleFormat}, expect some loss in audio fidelity.");
}
}
if (direction == Direction.Input)
{
Logger.Warning?.Print(LogClass.Audio, $"The selected audio backend doesn't support the requested audio input configuration, fallback to dummy...");
// TODO: We currently don't support audio input upsampling/downsampling, implement this.
realSession.Dispose();
return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
}
// It must be a HardwareDeviceSessionOutputBase.
if (realSession is not HardwareDeviceSessionOutputBase realSessionOutputBase)
{
throw new InvalidOperationException($"Real driver session class type isn't based on {typeof(HardwareDeviceSessionOutputBase).Name}.");
}
// If we need to do post processing before sending to the hardware device, wrap around it.
return new CompatLayerHardwareDeviceSession(realSessionOutputBase, sampleFormat, channelCount);
}
public bool SupportsChannelCount(uint channelCount)
{
return channelCount == 1 || channelCount == 2 || channelCount == 6;
}
public bool SupportsSampleFormat(SampleFormat sampleFormat)
{
// TODO: More formats.
return sampleFormat == SampleFormat.PcmInt16;
}
public bool SupportsSampleRate(uint sampleRate)
{
// TODO: More sample rates.
return sampleRate == Constants.TargetSampleRate;
}
public IHardwareDeviceDriver GetRealDeviceDriver()
{
return _realDriver;
}
public bool SupportsDirection(Direction direction)
{
return direction == Direction.Input || direction == Direction.Output;
}
}
}

View file

@ -0,0 +1,162 @@
using Ryujinx.Audio.Backends.Common;
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Dsp;
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Backends.CompatLayer
{
class CompatLayerHardwareDeviceSession : HardwareDeviceSessionOutputBase
{
private HardwareDeviceSessionOutputBase _realSession;
private SampleFormat _userSampleFormat;
private uint _userChannelCount;
public CompatLayerHardwareDeviceSession(HardwareDeviceSessionOutputBase realSession, SampleFormat userSampleFormat, uint userChannelCount) : base(realSession.MemoryManager, realSession.RequestedSampleFormat, realSession.RequestedSampleRate, userChannelCount)
{
_realSession = realSession;
_userSampleFormat = userSampleFormat;
_userChannelCount = userChannelCount;
}
public override void Dispose()
{
_realSession.Dispose();
}
public override ulong GetPlayedSampleCount()
{
return _realSession.GetPlayedSampleCount();
}
public override float GetVolume()
{
return _realSession.GetVolume();
}
public override void PrepareToClose()
{
_realSession.PrepareToClose();
}
public override void QueueBuffer(AudioBuffer buffer)
{
SampleFormat realSampleFormat = _realSession.RequestedSampleFormat;
if (_userSampleFormat != realSampleFormat)
{
if (_userSampleFormat != SampleFormat.PcmInt16)
{
throw new NotImplementedException("Converting formats other than PCM16 is not supported.");
}
int userSampleCount = buffer.Data.Length / BackendHelper.GetSampleSize(_userSampleFormat);
ReadOnlySpan<short> samples = MemoryMarshal.Cast<byte, short>(buffer.Data);
byte[] convertedSamples = new byte[BackendHelper.GetSampleSize(realSampleFormat) * userSampleCount];
switch (realSampleFormat)
{
case SampleFormat.PcmInt8:
PcmHelper.ConvertSampleToPcm8(MemoryMarshal.Cast<byte, sbyte>(convertedSamples), samples);
break;
case SampleFormat.PcmInt24:
PcmHelper.ConvertSampleToPcm24(convertedSamples, samples);
break;
case SampleFormat.PcmInt32:
PcmHelper.ConvertSampleToPcm32(MemoryMarshal.Cast<byte, int>(convertedSamples), samples);
break;
case SampleFormat.PcmFloat:
PcmHelper.ConvertSampleToPcmFloat(MemoryMarshal.Cast<byte, float>(convertedSamples), samples);
break;
default:
throw new NotImplementedException($"Sample format conversion from {_userSampleFormat} to {realSampleFormat} not implemented.");
}
buffer.Data = convertedSamples;
}
_realSession.QueueBuffer(buffer);
}
public override bool RegisterBuffer(AudioBuffer buffer, byte[] samples)
{
if (samples == null)
{
return false;
}
if (_userChannelCount != _realSession.RequestedChannelCount)
{
if (_userSampleFormat != SampleFormat.PcmInt16)
{
throw new NotImplementedException("Downmixing formats other than PCM16 is not supported.");
}
ReadOnlySpan<short> samplesPCM16 = MemoryMarshal.Cast<byte, short>(samples);
if (_userChannelCount == 6)
{
samplesPCM16 = Downmixing.DownMixSurroundToStereo(samplesPCM16);
if (_realSession.RequestedChannelCount == 1)
{
samplesPCM16 = Downmixing.DownMixStereoToMono(samplesPCM16);
}
}
else if (_userChannelCount == 2 && _realSession.RequestedChannelCount == 1)
{
samplesPCM16 = Downmixing.DownMixStereoToMono(samplesPCM16);
}
else
{
throw new NotImplementedException($"Downmixing from {_userChannelCount} to {_realSession.RequestedChannelCount} not implemented.");
}
samples = MemoryMarshal.Cast<short, byte>(samplesPCM16).ToArray();
}
AudioBuffer fakeBuffer = new AudioBuffer
{
BufferTag = buffer.BufferTag,
DataPointer = buffer.DataPointer,
DataSize = (ulong)samples.Length
};
bool result = _realSession.RegisterBuffer(fakeBuffer, samples);
if (result)
{
buffer.Data = fakeBuffer.Data;
buffer.DataSize = fakeBuffer.DataSize;
}
return result;
}
public override void SetVolume(float volume)
{
_realSession.SetVolume(volume);
}
public override void Start()
{
_realSession.Start();
}
public override void Stop()
{
_realSession.Stop();
}
public override void UnregisterBuffer(AudioBuffer buffer)
{
_realSession.UnregisterBuffer(buffer);
}
public override bool WasBufferFullyConsumed(AudioBuffer buffer)
{
return _realSession.WasBufferFullyConsumed(buffer);
}
}
}

View file

@ -0,0 +1,125 @@
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Backends.CompatLayer
{
public static class Downmixing
{
[StructLayout(LayoutKind.Sequential, Pack = 1)]
private struct Channel51FormatPCM16
{
public short FrontLeft;
public short FrontRight;
public short FrontCenter;
public short LowFrequency;
public short BackLeft;
public short BackRight;
}
[StructLayout(LayoutKind.Sequential, Pack = 1)]
private struct ChannelStereoFormatPCM16
{
public short Left;
public short Right;
}
private const int Q15Bits = 16;
private const int RawQ15One = 1 << Q15Bits;
private const int RawQ15HalfOne = (int)(0.5f * RawQ15One);
private const int Minus3dBInQ15 = (int)(0.707f * RawQ15One);
private const int Minus6dBInQ15 = (int)(0.501f * RawQ15One);
private const int Minus12dBInQ15 = (int)(0.251f * RawQ15One);
private static readonly int[] DefaultSurroundToStereoCoefficients = new int[4]
{
RawQ15One,
Minus3dBInQ15,
Minus12dBInQ15,
Minus3dBInQ15
};
private static readonly int[] DefaultStereoToMonoCoefficients = new int[2]
{
Minus6dBInQ15,
Minus6dBInQ15
};
private const int SurroundChannelCount = 6;
private const int StereoChannelCount = 2;
private const int MonoChannelCount = 1;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static ReadOnlySpan<Channel51FormatPCM16> GetSurroundBuffer(ReadOnlySpan<short> data)
{
return MemoryMarshal.Cast<short, Channel51FormatPCM16>(data);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static ReadOnlySpan<ChannelStereoFormatPCM16> GetStereoBuffer(ReadOnlySpan<short> data)
{
return MemoryMarshal.Cast<short, ChannelStereoFormatPCM16>(data);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short DownMixStereoToMono(ReadOnlySpan<int> coefficients, short left, short right)
{
return (short)((left * coefficients[0] + right * coefficients[1]) >> Q15Bits);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short DownMixSurroundToStereo(ReadOnlySpan<int> coefficients, short back, short lfe, short center, short front)
{
return (short)((coefficients[3] * back + coefficients[2] * lfe + coefficients[1] * center + coefficients[0] * front + RawQ15HalfOne) >> Q15Bits);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short[] DownMixSurroundToStereo(ReadOnlySpan<int> coefficients, ReadOnlySpan<short> data)
{
int samplePerChannelCount = data.Length / SurroundChannelCount;
short[] downmixedBuffer = new short[samplePerChannelCount * StereoChannelCount];
ReadOnlySpan<Channel51FormatPCM16> channels = GetSurroundBuffer(data);
for (int i = 0; i < samplePerChannelCount; i++)
{
Channel51FormatPCM16 channel = channels[i];
downmixedBuffer[i * 2] = DownMixSurroundToStereo(coefficients, channel.BackLeft, channel.LowFrequency, channel.FrontCenter, channel.FrontLeft);
downmixedBuffer[i * 2 + 1] = DownMixSurroundToStereo(coefficients, channel.BackRight, channel.LowFrequency, channel.FrontCenter, channel.FrontRight);
}
return downmixedBuffer;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short[] DownMixStereoToMono(ReadOnlySpan<int> coefficients, ReadOnlySpan<short> data)
{
int samplePerChannelCount = data.Length / StereoChannelCount;
short[] downmixedBuffer = new short[samplePerChannelCount * MonoChannelCount];
ReadOnlySpan<ChannelStereoFormatPCM16> channels = GetStereoBuffer(data);
for (int i = 0; i < samplePerChannelCount; i++)
{
ChannelStereoFormatPCM16 channel = channels[i];
downmixedBuffer[i] = DownMixStereoToMono(coefficients, channel.Left, channel.Right);
}
return downmixedBuffer;
}
public static short[] DownMixStereoToMono(ReadOnlySpan<short> data)
{
return DownMixStereoToMono(DefaultStereoToMonoCoefficients, data);
}
public static short[] DownMixSurroundToStereo(ReadOnlySpan<short> data)
{
return DownMixSurroundToStereo(DefaultSurroundToStereoCoefficients, data);
}
}
}

View file

@ -0,0 +1,89 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System.Threading;
using static Ryujinx.Audio.Integration.IHardwareDeviceDriver;
namespace Ryujinx.Audio.Backends.Dummy
{
public class DummyHardwareDeviceDriver : IHardwareDeviceDriver
{
private ManualResetEvent _updateRequiredEvent;
private ManualResetEvent _pauseEvent;
public static bool IsSupported => true;
public DummyHardwareDeviceDriver()
{
_updateRequiredEvent = new ManualResetEvent(false);
_pauseEvent = new ManualResetEvent(true);
}
public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount, float volume)
{
if (sampleRate == 0)
{
sampleRate = Constants.TargetSampleRate;
}
if (channelCount == 0)
{
channelCount = 2;
}
if (direction == Direction.Output)
{
return new DummyHardwareDeviceSessionOutput(this, memoryManager, sampleFormat, sampleRate, channelCount, volume);
}
else
{
return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
}
}
public ManualResetEvent GetUpdateRequiredEvent()
{
return _updateRequiredEvent;
}
public ManualResetEvent GetPauseEvent()
{
return _pauseEvent;
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// NOTE: The _updateRequiredEvent will be disposed somewhere else.
_pauseEvent.Dispose();
}
}
public bool SupportsSampleRate(uint sampleRate)
{
return true;
}
public bool SupportsSampleFormat(SampleFormat sampleFormat)
{
return true;
}
public bool SupportsDirection(Direction direction)
{
return direction == Direction.Output || direction == Direction.Input;
}
public bool SupportsChannelCount(uint channelCount)
{
return channelCount == 1 || channelCount == 2 || channelCount == 6;
}
}
}

View file

@ -0,0 +1,67 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System;
namespace Ryujinx.Audio.Backends.Dummy
{
class DummyHardwareDeviceSessionInput : IHardwareDeviceSession
{
private float _volume;
private IHardwareDeviceDriver _manager;
private IVirtualMemoryManager _memoryManager;
public DummyHardwareDeviceSessionInput(IHardwareDeviceDriver manager, IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount)
{
_volume = 1.0f;
_manager = manager;
_memoryManager = memoryManager;
}
public void Dispose()
{
// Nothing to do.
}
public ulong GetPlayedSampleCount()
{
// Not implemented for input.
throw new NotSupportedException();
}
public float GetVolume()
{
return _volume;
}
public void PrepareToClose() { }
public void QueueBuffer(AudioBuffer buffer)
{
_memoryManager.Fill(buffer.DataPointer, buffer.DataSize, 0);
_manager.GetUpdateRequiredEvent().Set();
}
public bool RegisterBuffer(AudioBuffer buffer)
{
return buffer.DataPointer != 0;
}
public void SetVolume(float volume)
{
_volume = volume;
}
public void Start() { }
public void Stop() { }
public void UnregisterBuffer(AudioBuffer buffer) { }
public bool WasBufferFullyConsumed(AudioBuffer buffer)
{
return true;
}
}
}

View file

@ -0,0 +1,62 @@
using Ryujinx.Audio.Backends.Common;
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System.Threading;
namespace Ryujinx.Audio.Backends.Dummy
{
internal class DummyHardwareDeviceSessionOutput : HardwareDeviceSessionOutputBase
{
private float _volume;
private IHardwareDeviceDriver _manager;
private ulong _playedSampleCount;
public DummyHardwareDeviceSessionOutput(IHardwareDeviceDriver manager, IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount, float requestedVolume) : base(memoryManager, requestedSampleFormat, requestedSampleRate, requestedChannelCount)
{
_volume = requestedVolume;
_manager = manager;
}
public override void Dispose()
{
// Nothing to do.
}
public override ulong GetPlayedSampleCount()
{
return Interlocked.Read(ref _playedSampleCount);
}
public override float GetVolume()
{
return _volume;
}
public override void PrepareToClose() { }
public override void QueueBuffer(AudioBuffer buffer)
{
Interlocked.Add(ref _playedSampleCount, GetSampleCount(buffer));
_manager.GetUpdateRequiredEvent().Set();
}
public override void SetVolume(float volume)
{
_volume = volume;
}
public override void Start() { }
public override void Stop() { }
public override void UnregisterBuffer(AudioBuffer buffer) { }
public override bool WasBufferFullyConsumed(AudioBuffer buffer)
{
return true;
}
}
}

View file

@ -0,0 +1,37 @@
using Ryujinx.Audio.Integration;
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Represent an audio buffer that will be used by an <see cref="IHardwareDeviceSession"/>.
/// </summary>
public class AudioBuffer
{
/// <summary>
/// Unique tag of this buffer.
/// </summary>
/// <remarks>Unique per session</remarks>
public ulong BufferTag;
/// <summary>
/// Pointer to the user samples.
/// </summary>
public ulong DataPointer;
/// <summary>
/// Size of the user samples region.
/// </summary>
public ulong DataSize;
/// <summary>
/// The timestamp at which the buffer was played.
/// </summary>
/// <remarks>Not used but useful for debugging</remarks>
public ulong PlayedTimestamp;
/// <summary>
/// The user samples.
/// </summary>
public byte[] Data;
}
}

View file

@ -0,0 +1,518 @@
using Ryujinx.Audio.Integration;
using Ryujinx.Common;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Common
{
/// <summary>
/// An audio device session.
/// </summary>
class AudioDeviceSession : IDisposable
{
/// <summary>
/// The volume of the <see cref="AudioDeviceSession"/>.
/// </summary>
private float _volume;
/// <summary>
/// The state of the <see cref="AudioDeviceSession"/>.
/// </summary>
private AudioDeviceState _state;
/// <summary>
/// Array of all buffers currently used or released.
/// </summary>
private AudioBuffer[] _buffers;
/// <summary>
/// The server index inside <see cref="_buffers"/> (appended but not queued to device driver).
/// </summary>
private uint _serverBufferIndex;
/// <summary>
/// The hardware index inside <see cref="_buffers"/> (queued to device driver).
/// </summary>
private uint _hardwareBufferIndex;
/// <summary>
/// The released index inside <see cref="_buffers"/> (released by the device driver).
/// </summary>
private uint _releasedBufferIndex;
/// <summary>
/// The count of buffer appended (server side).
/// </summary>
private uint _bufferAppendedCount;
/// <summary>
/// The count of buffer registered (driver side).
/// </summary>
private uint _bufferRegisteredCount;
/// <summary>
/// The count of buffer released (released by the driver side).
/// </summary>
private uint _bufferReleasedCount;
/// <summary>
/// The released buffer event.
/// </summary>
private IWritableEvent _bufferEvent;
/// <summary>
/// The session on the device driver.
/// </summary>
private IHardwareDeviceSession _hardwareDeviceSession;
/// <summary>
/// Max number of buffers that can be registered to the device driver at a time.
/// </summary>
private uint _bufferRegisteredLimit;
/// <summary>
/// Create a new <see cref="AudioDeviceSession"/>.
/// </summary>
/// <param name="deviceSession">The device driver session associated</param>
/// <param name="bufferEvent">The release buffer event</param>
/// <param name="bufferRegisteredLimit">The max number of buffers that can be registered to the device driver at a time</param>
public AudioDeviceSession(IHardwareDeviceSession deviceSession, IWritableEvent bufferEvent, uint bufferRegisteredLimit = 4)
{
_bufferEvent = bufferEvent;
_hardwareDeviceSession = deviceSession;
_bufferRegisteredLimit = bufferRegisteredLimit;
_buffers = new AudioBuffer[Constants.AudioDeviceBufferCountMax];
_serverBufferIndex = 0;
_hardwareBufferIndex = 0;
_releasedBufferIndex = 0;
_bufferAppendedCount = 0;
_bufferRegisteredCount = 0;
_bufferReleasedCount = 0;
_volume = deviceSession.GetVolume();
_state = AudioDeviceState.Stopped;
}
/// <summary>
/// Get the released buffer event.
/// </summary>
/// <returns>The released buffer event</returns>
public IWritableEvent GetBufferEvent()
{
return _bufferEvent;
}
/// <summary>
/// Get the state of the session.
/// </summary>
/// <returns>The state of the session</returns>
public AudioDeviceState GetState()
{
Debug.Assert(_state == AudioDeviceState.Started || _state == AudioDeviceState.Stopped);
return _state;
}
/// <summary>
/// Get the total buffer count (server + driver + released).
/// </summary>
/// <returns>Return the total buffer count</returns>
private uint GetTotalBufferCount()
{
uint bufferCount = _bufferAppendedCount + _bufferRegisteredCount + _bufferReleasedCount;
Debug.Assert(bufferCount <= Constants.AudioDeviceBufferCountMax);
return bufferCount;
}
/// <summary>
/// Register a new <see cref="AudioBuffer"/> on the server side.
/// </summary>
/// <param name="buffer">The <see cref="AudioBuffer"/> to register</param>
/// <returns>True if the operation succeeded</returns>
private bool RegisterBuffer(AudioBuffer buffer)
{
if (GetTotalBufferCount() == Constants.AudioDeviceBufferCountMax)
{
return false;
}
_buffers[_serverBufferIndex] = buffer;
_serverBufferIndex = (_serverBufferIndex + 1) % Constants.AudioDeviceBufferCountMax;
_bufferAppendedCount++;
return true;
}
/// <summary>
/// Flush server buffers to hardware.
/// </summary>
private void FlushToHardware()
{
uint bufferToFlushCount = Math.Min(Math.Min(_bufferAppendedCount, 4), _bufferRegisteredLimit - _bufferRegisteredCount);
AudioBuffer[] buffersToFlush = new AudioBuffer[bufferToFlushCount];
uint hardwareBufferIndex = _hardwareBufferIndex;
for (int i = 0; i < buffersToFlush.Length; i++)
{
buffersToFlush[i] = _buffers[hardwareBufferIndex];
_bufferAppendedCount--;
_bufferRegisteredCount++;
hardwareBufferIndex = (hardwareBufferIndex + 1) % Constants.AudioDeviceBufferCountMax;
}
_hardwareBufferIndex = hardwareBufferIndex;
for (int i = 0; i < buffersToFlush.Length; i++)
{
_hardwareDeviceSession.QueueBuffer(buffersToFlush[i]);
}
}
/// <summary>
/// Get the current index of the <see cref="AudioBuffer"/> playing on the driver side.
/// </summary>
/// <param name="playingIndex">The output index of the <see cref="AudioBuffer"/> playing on the driver side</param>
/// <returns>True if any buffer is playing</returns>
private bool TryGetPlayingBufferIndex(out uint playingIndex)
{
if (_bufferRegisteredCount > 0)
{
playingIndex = (_hardwareBufferIndex - _bufferRegisteredCount) % Constants.AudioDeviceBufferCountMax;
return true;
}
playingIndex = 0;
return false;
}
/// <summary>
/// Try to pop the <see cref="AudioBuffer"/> playing on the driver side.
/// </summary>
/// <param name="buffer">The output <see cref="AudioBuffer"/> playing on the driver side</param>
/// <returns>True if any buffer is playing</returns>
private bool TryPopPlayingBuffer(out AudioBuffer buffer)
{
if (_bufferRegisteredCount > 0)
{
uint bufferIndex = (_hardwareBufferIndex - _bufferRegisteredCount) % Constants.AudioDeviceBufferCountMax;
buffer = _buffers[bufferIndex];
_buffers[bufferIndex] = null;
_bufferRegisteredCount--;
return true;
}
buffer = null;
return false;
}
/// <summary>
/// Try to pop a <see cref="AudioBuffer"/> released by the driver side.
/// </summary>
/// <param name="buffer">The output <see cref="AudioBuffer"/> released by the driver side</param>
/// <returns>True if any buffer has been released</returns>
public bool TryPopReleasedBuffer(out AudioBuffer buffer)
{
if (_bufferReleasedCount > 0)
{
uint bufferIndex = (_releasedBufferIndex - _bufferReleasedCount) % Constants.AudioDeviceBufferCountMax;
buffer = _buffers[bufferIndex];
_buffers[bufferIndex] = null;
_bufferReleasedCount--;
return true;
}
buffer = null;
return false;
}
/// <summary>
/// Release a <see cref="AudioBuffer"/>.
/// </summary>
/// <param name="buffer">The <see cref="AudioBuffer"/> to release</param>
private void ReleaseBuffer(AudioBuffer buffer)
{
buffer.PlayedTimestamp = (ulong)PerformanceCounter.ElapsedNanoseconds;
_bufferRegisteredCount--;
_bufferReleasedCount++;
_releasedBufferIndex = (_releasedBufferIndex + 1) % Constants.AudioDeviceBufferCountMax;
}
/// <summary>
/// Update the released buffers.
/// </summary>
/// <param name="updateForStop">True if the session is currently stopping</param>
private void UpdateReleaseBuffers(bool updateForStop = false)
{
bool wasAnyBuffersReleased = false;
while (TryGetPlayingBufferIndex(out uint playingIndex))
{
if (!updateForStop && !_hardwareDeviceSession.WasBufferFullyConsumed(_buffers[playingIndex]))
{
break;
}
if (updateForStop)
{
_hardwareDeviceSession.UnregisterBuffer(_buffers[playingIndex]);
}
ReleaseBuffer(_buffers[playingIndex]);
wasAnyBuffersReleased = true;
}
if (wasAnyBuffersReleased)
{
_bufferEvent.Signal();
}
}
/// <summary>
/// Append a new <see cref="AudioBuffer"/>.
/// </summary>
/// <param name="buffer">The <see cref="AudioBuffer"/> to append</param>
/// <returns>True if the buffer was appended</returns>
public bool AppendBuffer(AudioBuffer buffer)
{
if (_hardwareDeviceSession.RegisterBuffer(buffer))
{
if (RegisterBuffer(buffer))
{
FlushToHardware();
return true;
}
_hardwareDeviceSession.UnregisterBuffer(buffer);
}
return false;
}
public bool AppendUacBuffer(AudioBuffer buffer, uint handle)
{
// NOTE: On hardware, there is another RegisterBuffer method taking an handle.
// This variant of the call always return false (stubbed?) as a result this logic will never succeed.
return false;
}
/// <summary>
/// Start the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Start()
{
if (_state == AudioDeviceState.Started)
{
return ResultCode.OperationFailed;
}
_hardwareDeviceSession.Start();
_state = AudioDeviceState.Started;
FlushToHardware();
_hardwareDeviceSession.SetVolume(_volume);
return ResultCode.Success;
}
/// <summary>
/// Stop the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Stop()
{
if (_state == AudioDeviceState.Started)
{
_hardwareDeviceSession.Stop();
UpdateReleaseBuffers(true);
_state = AudioDeviceState.Stopped;
}
return ResultCode.Success;
}
/// <summary>
/// Get the volume of the session.
/// </summary>
/// <returns>The volume of the session</returns>
public float GetVolume()
{
return _hardwareDeviceSession.GetVolume();
}
/// <summary>
/// Set the volume of the session.
/// </summary>
/// <param name="volume">The new volume to set</param>
public void SetVolume(float volume)
{
_volume = volume;
if (_state == AudioDeviceState.Started)
{
_hardwareDeviceSession.SetVolume(volume);
}
}
/// <summary>
/// Get the count of buffer currently in use (server + driver side).
/// </summary>
/// <returns>The count of buffer currently in use</returns>
public uint GetBufferCount()
{
return _bufferAppendedCount + _bufferRegisteredCount;
}
/// <summary>
/// Check if a buffer is present.
/// </summary>
/// <param name="bufferTag">The unique tag of the buffer</param>
/// <returns>Return true if a buffer is present</returns>
public bool ContainsBuffer(ulong bufferTag)
{
uint bufferIndex = (_releasedBufferIndex - _bufferReleasedCount) % Constants.AudioDeviceBufferCountMax;
uint totalBufferCount = GetTotalBufferCount();
for (int i = 0; i < totalBufferCount; i++)
{
if (_buffers[bufferIndex].BufferTag == bufferTag)
{
return true;
}
bufferIndex = (bufferIndex + 1) % Constants.AudioDeviceBufferCountMax;
}
return false;
}
/// <summary>
/// Get the count of sample played in this session.
/// </summary>
/// <returns>The count of sample played in this session</returns>
public ulong GetPlayedSampleCount()
{
if (_state == AudioDeviceState.Stopped)
{
return 0;
}
else
{
return _hardwareDeviceSession.GetPlayedSampleCount();
}
}
/// <summary>
/// Flush all buffers to the initial state.
/// </summary>
/// <returns>True if any buffer was flushed</returns>
public bool FlushBuffers()
{
if (_state == AudioDeviceState.Stopped)
{
return false;
}
uint bufferCount = GetBufferCount();
while (TryPopReleasedBuffer(out AudioBuffer buffer))
{
_hardwareDeviceSession.UnregisterBuffer(buffer);
}
while (TryPopPlayingBuffer(out AudioBuffer buffer))
{
_hardwareDeviceSession.UnregisterBuffer(buffer);
}
if (_bufferRegisteredCount == 0 || (_bufferReleasedCount + _bufferAppendedCount) > Constants.AudioDeviceBufferCountMax)
{
return false;
}
_bufferReleasedCount += _bufferAppendedCount;
_releasedBufferIndex = (_releasedBufferIndex + _bufferAppendedCount) % Constants.AudioDeviceBufferCountMax;
_bufferAppendedCount = 0;
_hardwareBufferIndex = _serverBufferIndex;
if (bufferCount > 0)
{
_bufferEvent.Signal();
}
return true;
}
/// <summary>
/// Update the session.
/// </summary>
public void Update()
{
if (_state == AudioDeviceState.Started)
{
UpdateReleaseBuffers();
FlushToHardware();
}
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// Tell the hardware session that we are ending.
_hardwareDeviceSession.PrepareToClose();
// Unregister all buffers
while (TryPopReleasedBuffer(out AudioBuffer buffer))
{
_hardwareDeviceSession.UnregisterBuffer(buffer);
}
while (TryPopPlayingBuffer(out AudioBuffer buffer))
{
_hardwareDeviceSession.UnregisterBuffer(buffer);
}
// Finally dispose hardware session.
_hardwareDeviceSession.Dispose();
_bufferEvent.Signal();
}
}
}
}

View file

@ -0,0 +1,18 @@
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Audio device state.
/// </summary>
public enum AudioDeviceState : uint
{
/// <summary>
/// The audio device is started.
/// </summary>
Started,
/// <summary>
/// The audio device is stopped.
/// </summary>
Stopped
}
}

View file

@ -0,0 +1,29 @@
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Audio user input configuration.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct AudioInputConfiguration
{
/// <summary>
/// The target sample rate of the user.
/// </summary>
/// <remarks>Only 48000Hz is considered valid, other sample rates will be refused.</remarks>
public uint SampleRate;
/// <summary>
/// The target channel count of the user.
/// </summary>
/// <remarks>Only Stereo and Surround are considered valid, other configurations will be refused.</remarks>
/// <remarks>Not used in audin.</remarks>
public ushort ChannelCount;
/// <summary>
/// Reserved/unused.
/// </summary>
private ushort _reserved;
}
}

View file

@ -0,0 +1,37 @@
using Ryujinx.Common.Memory;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Audio system output configuration.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct AudioOutputConfiguration
{
/// <summary>
/// The target sample rate of the system.
/// </summary>
public uint SampleRate;
/// <summary>
/// The target channel count of the system.
/// </summary>
public uint ChannelCount;
/// <summary>
/// Reserved/unused
/// </summary>
public SampleFormat SampleFormat;
/// <summary>
/// Reserved/unused.
/// </summary>
private Array3<byte> _padding;
/// <summary>
/// The initial audio system state.
/// </summary>
public AudioDeviceState AudioOutState;
}
}

View file

@ -0,0 +1,36 @@
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Audio user buffer.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct AudioUserBuffer
{
/// <summary>
/// Pointer to the next buffer (ignored).
/// </summary>
public ulong NextBuffer;
/// <summary>
/// Pointer to the user samples.
/// </summary>
public ulong Data;
/// <summary>
/// Capacity of the buffer (unused).
/// </summary>
public ulong Capacity;
/// <summary>
/// Size of the user samples region.
/// </summary>
public ulong DataSize;
/// <summary>
/// Offset in the user samples region (unused).
/// </summary>
public ulong DataOffset;
}
}

View file

@ -0,0 +1,43 @@
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Sample format definition.
/// </summary>
public enum SampleFormat : byte
{
/// <summary>
/// Invalid sample format.
/// </summary>
Invalid = 0,
/// <summary>
/// PCM8 sample format. (unsupported)
/// </summary>
PcmInt8 = 1,
/// <summary>
/// PCM16 sample format.
/// </summary>
PcmInt16 = 2,
/// <summary>
/// PCM24 sample format. (unsupported)
/// </summary>
PcmInt24 = 3,
/// <summary>
/// PCM32 sample format.
/// </summary>
PcmInt32 = 4,
/// <summary>
/// PCM Float sample format.
/// </summary>
PcmFloat = 5,
/// <summary>
/// ADPCM sample format. (Also known as GC-ADPCM)
/// </summary>
Adpcm = 6
}
}

View file

@ -0,0 +1,175 @@
namespace Ryujinx.Audio
{
/// <summary>
/// Define constants used by the audio system.
/// </summary>
public static class Constants
{
/// <summary>
/// The default device output name.
/// </summary>
public const string DefaultDeviceOutputName = "DeviceOut";
/// <summary>
/// The default device input name.
/// </summary>
public const string DefaultDeviceInputName = "BuiltInHeadset";
/// <summary>
/// The maximum number of channels supported. (6 channels for 5.1 surround)
/// </summary>
public const int ChannelCountMax = 6;
/// <summary>
/// The maximum number of channels supported per voice.
/// </summary>
public const int VoiceChannelCountMax = ChannelCountMax;
/// <summary>
/// The maximum count of mix buffer supported per operations (volumes, mix effect, ...)
/// </summary>
public const int MixBufferCountMax = 24;
/// <summary>
/// The maximum count of wavebuffer per voice.
/// </summary>
public const int VoiceWaveBufferCount = 4;
/// <summary>
/// The maximum count of biquad filter per voice.
/// </summary>
public const int VoiceBiquadFilterCount = 2;
/// <summary>
/// The lowest priority that a voice can have.
/// </summary>
public const int VoiceLowestPriority = 0xFF;
/// <summary>
/// The highest priority that a voice can have.
/// </summary>
/// <remarks>Voices with the highest priority will not be dropped if a voice drop needs to occur.</remarks>
public const int VoiceHighestPriority = 0;
/// <summary>
/// Maximum <see cref="Common.BehaviourParameter.ErrorInfo"/> that can be returned by <see cref="Parameter.BehaviourErrorInfoOutStatus"/>.
/// </summary>
public const int MaxErrorInfos = 10;
/// <summary>
/// Default alignment for buffers.
/// </summary>
public const int BufferAlignment = 0x40;
/// <summary>
/// Alignment required for the work buffer.
/// </summary>
public const int WorkBufferAlignment = 0x1000;
/// <summary>
/// Alignment required for every performance metrics frame.
/// </summary>
public const int PerformanceMetricsPerFramesSizeAlignment = 0x100;
/// <summary>
/// The id of the final mix.
/// </summary>
public const int FinalMixId = 0;
/// <summary>
/// The id defining an unused mix id.
/// </summary>
public const int UnusedMixId = int.MaxValue;
/// <summary>
/// The id defining an unused splitter id as a signed integer.
/// </summary>
public const int UnusedSplitterIdInt = -1;
/// <summary>
/// The id defining an unused splitter id.
/// </summary>
public const uint UnusedSplitterId = uint.MaxValue;
/// <summary>
/// The id of invalid/unused node id.
/// </summary>
public const int InvalidNodeId = -268435456;
/// <summary>
/// The indice considered invalid for processing order.
/// </summary>
public const int InvalidProcessingOrder = -1;
/// <summary>
/// The maximum number of audio renderer sessions allowed to be created system wide.
/// </summary>
public const int AudioRendererSessionCountMax = 2;
/// <summary>
/// The maximum number of audio output sessions allowed to be created system wide.
/// </summary>
public const int AudioOutSessionCountMax = 12;
/// <summary>
/// The maximum number of audio input sessions allowed to be created system wide.
/// </summary>
public const int AudioInSessionCountMax = 4;
/// <summary>
/// Maximum buffers supported by one audio device session.
/// </summary>
public const int AudioDeviceBufferCountMax = 32;
/// <summary>
/// The target sample rate of the audio renderer. (48kHz)
/// </summary>
public const uint TargetSampleRate = 48000;
/// <summary>
/// The target sample size of the audio renderer. (PCM16)
/// </summary>
public const int TargetSampleSize = sizeof(ushort);
/// <summary>
/// The target sample count per audio renderer update.
/// </summary>
public const int TargetSampleCount = 240;
/// <summary>
/// The size of an upsampler entry to process upsampling to <see cref="TargetSampleRate"/>.
/// </summary>
public const int UpSampleEntrySize = TargetSampleCount * VoiceChannelCountMax;
/// <summary>
/// The target audio latency computed from <see cref="TargetSampleRate"/> and <see cref="TargetSampleCount"/>.
/// </summary>
public const int AudioProcessorMaxUpdateTimeTarget = 1000000000 / ((int)TargetSampleRate / TargetSampleCount); // 5.00 ms
/// <summary>
/// The maximum update time of the DSP on original hardware.
/// </summary>
public const int AudioProcessorMaxUpdateTime = 5760000; // 5.76 ms
/// <summary>
/// The maximum update time per audio renderer session.
/// </summary>
public const int AudioProcessorMaxUpdateTimePerSessions = AudioProcessorMaxUpdateTime / AudioRendererSessionCountMax;
/// <summary>
/// Guest timer frequency used for system ticks.
/// </summary>
public const int TargetTimerFrequency = 19200000;
/// <summary>
/// The default coefficients used for standard 5.1 surround to stereo downmixing.
/// </summary>
public static float[] DefaultSurroundToStereoCoefficients = new float[4]
{
1.0f,
0.707f,
0.251f,
0.707f,
};
}
}

View file

@ -0,0 +1,266 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Diagnostics;
using System.Linq;
using System.Threading;
namespace Ryujinx.Audio.Input
{
/// <summary>
/// The audio input manager.
/// </summary>
public class AudioInputManager : IDisposable
{
private object _lock = new object();
/// <summary>
/// Lock used for session allocation.
/// </summary>
private object _sessionLock = new object();
/// <summary>
/// The session ids allocation table.
/// </summary>
private int[] _sessionIds;
/// <summary>
/// The device driver.
/// </summary>
private IHardwareDeviceDriver _deviceDriver;
/// <summary>
/// The events linked to each session.
/// </summary>
private IWritableEvent[] _sessionsBufferEvents;
/// <summary>
/// The <see cref="AudioInputSystem"/> session instances.
/// </summary>
private AudioInputSystem[] _sessions;
/// <summary>
/// The count of active sessions.
/// </summary>
private int _activeSessionCount;
/// <summary>
/// The dispose state.
/// </summary>
private int _disposeState;
/// <summary>
/// Create a new <see cref="AudioInputManager"/>.
/// </summary>
public AudioInputManager()
{
_sessionIds = new int[Constants.AudioInSessionCountMax];
_sessions = new AudioInputSystem[Constants.AudioInSessionCountMax];
_activeSessionCount = 0;
for (int i = 0; i < _sessionIds.Length; i++)
{
_sessionIds[i] = i;
}
}
/// <summary>
/// Initialize the <see cref="AudioInputManager"/>.
/// </summary>
/// <param name="deviceDriver">The device driver.</param>
/// <param name="sessionRegisterEvents">The events associated to each session.</param>
public void Initialize(IHardwareDeviceDriver deviceDriver, IWritableEvent[] sessionRegisterEvents)
{
_deviceDriver = deviceDriver;
_sessionsBufferEvents = sessionRegisterEvents;
}
/// <summary>
/// Acquire a new session id.
/// </summary>
/// <returns>A new session id.</returns>
private int AcquireSessionId()
{
lock (_sessionLock)
{
int index = _activeSessionCount;
Debug.Assert(index < _sessionIds.Length);
int sessionId = _sessionIds[index];
_sessionIds[index] = -1;
_activeSessionCount++;
Logger.Info?.Print(LogClass.AudioRenderer, $"Registered new input ({sessionId})");
return sessionId;
}
}
/// <summary>
/// Release a given <paramref name="sessionId"/>.
/// </summary>
/// <param name="sessionId">The session id to release.</param>
private void ReleaseSessionId(int sessionId)
{
lock (_sessionLock)
{
Debug.Assert(_activeSessionCount > 0);
int newIndex = --_activeSessionCount;
_sessionIds[newIndex] = sessionId;
}
Logger.Info?.Print(LogClass.AudioRenderer, $"Unregistered input ({sessionId})");
}
/// <summary>
/// Used to update audio input system.
/// </summary>
public void Update()
{
lock (_sessionLock)
{
foreach (AudioInputSystem input in _sessions)
{
input?.Update();
}
}
}
/// <summary>
/// Register a new <see cref="AudioInputSystem"/>.
/// </summary>
/// <param name="input">The <see cref="AudioInputSystem"/> to register.</param>
private void Register(AudioInputSystem input)
{
lock (_sessionLock)
{
_sessions[input.GetSessionId()] = input;
}
}
/// <summary>
/// Unregister a new <see cref="AudioInputSystem"/>.
/// </summary>
/// <param name="input">The <see cref="AudioInputSystem"/> to unregister.</param>
internal void Unregister(AudioInputSystem input)
{
lock (_sessionLock)
{
int sessionId = input.GetSessionId();
_sessions[input.GetSessionId()] = null;
ReleaseSessionId(sessionId);
}
}
/// <summary>
/// Get the list of all audio inputs names.
/// </summary>
/// <param name="filtered">If true, filter disconnected devices</param>
/// <returns>The list of all audio inputs name</returns>
public string[] ListAudioIns(bool filtered)
{
if (filtered)
{
// TODO: Detect if the driver supports audio input
}
return new string[] { Constants.DefaultDeviceInputName };
}
/// <summary>
/// Open a new <see cref="AudioInputSystem"/>.
/// </summary>
/// <param name="outputDeviceName">The output device name selected by the <see cref="AudioInputSystem"/></param>
/// <param name="outputConfiguration">The output audio configuration selected by the <see cref="AudioInputSystem"/></param>
/// <param name="obj">The new <see cref="AudioInputSystem"/></param>
/// <param name="memoryManager">The memory manager that will be used for all guest memory operations</param>
/// <param name="inputDeviceName">The input device name wanted by the user</param>
/// <param name="sampleFormat">The sample format to use</param>
/// <param name="parameter">The user configuration</param>
/// <param name="appletResourceUserId">The applet resource user id of the application</param>
/// <param name="processHandle">The process handle of the application</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode OpenAudioIn(out string outputDeviceName,
out AudioOutputConfiguration outputConfiguration,
out AudioInputSystem obj,
IVirtualMemoryManager memoryManager,
string inputDeviceName,
SampleFormat sampleFormat,
ref AudioInputConfiguration parameter,
ulong appletResourceUserId,
uint processHandle)
{
int sessionId = AcquireSessionId();
_sessionsBufferEvents[sessionId].Clear();
IHardwareDeviceSession deviceSession = _deviceDriver.OpenDeviceSession(IHardwareDeviceDriver.Direction.Input, memoryManager, sampleFormat, parameter.SampleRate, parameter.ChannelCount);
AudioInputSystem audioIn = new AudioInputSystem(this, _lock, deviceSession, _sessionsBufferEvents[sessionId]);
ResultCode result = audioIn.Initialize(inputDeviceName, sampleFormat, ref parameter, sessionId);
if (result == ResultCode.Success)
{
outputDeviceName = audioIn.DeviceName;
outputConfiguration = new AudioOutputConfiguration
{
ChannelCount = audioIn.ChannelCount,
SampleFormat = audioIn.SampleFormat,
SampleRate = audioIn.SampleRate,
AudioOutState = audioIn.GetState(),
};
obj = audioIn;
Register(audioIn);
}
else
{
ReleaseSessionId(sessionId);
obj = null;
outputDeviceName = null;
outputConfiguration = default;
}
return result;
}
public void Dispose()
{
if (Interlocked.CompareExchange(ref _disposeState, 1, 0) == 0)
{
Dispose(true);
}
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// Clone the sessions array to dispose them outside the lock.
AudioInputSystem[] sessions;
lock (_sessionLock)
{
sessions = _sessions.ToArray();
}
foreach (AudioInputSystem input in sessions)
{
input?.Dispose();
}
}
}
}
}

View file

@ -0,0 +1,392 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using System;
using System.Threading;
namespace Ryujinx.Audio.Input
{
/// <summary>
/// Audio input system.
/// </summary>
public class AudioInputSystem : IDisposable
{
/// <summary>
/// The session id associated to the <see cref="AudioInputSystem"/>.
/// </summary>
private int _sessionId;
/// <summary>
/// The session the <see cref="AudioInputSystem"/>.
/// </summary>
private AudioDeviceSession _session;
/// <summary>
/// The target device name of the <see cref="AudioInputSystem"/>.
/// </summary>
public string DeviceName { get; private set; }
/// <summary>
/// The target sample rate of the <see cref="AudioInputSystem"/>.
/// </summary>
public uint SampleRate { get; private set; }
/// <summary>
/// The target channel count of the <see cref="AudioInputSystem"/>.
/// </summary>
public uint ChannelCount { get; private set; }
/// <summary>
/// The target sample format of the <see cref="AudioInputSystem"/>.
/// </summary>
public SampleFormat SampleFormat { get; private set; }
/// <summary>
/// The <see cref="AudioInputManager"/> owning this.
/// </summary>
private AudioInputManager _manager;
/// <summary>
/// The lock of the parent.
/// </summary>
private object _parentLock;
/// <summary>
/// The dispose state.
/// </summary>
private int _disposeState;
/// <summary>
/// Create a new <see cref="AudioInputSystem"/>.
/// </summary>
/// <param name="manager">The manager instance</param>
/// <param name="parentLock">The lock of the manager</param>
/// <param name="deviceSession">The hardware device session</param>
/// <param name="bufferEvent">The buffer release event of the audio input</param>
public AudioInputSystem(AudioInputManager manager, object parentLock, IHardwareDeviceSession deviceSession, IWritableEvent bufferEvent)
{
_manager = manager;
_parentLock = parentLock;
_session = new AudioDeviceSession(deviceSession, bufferEvent);
}
/// <summary>
/// Get the default device name on the system.
/// </summary>
/// <returns>The default device name on the system.</returns>
private static string GetDeviceDefaultName()
{
return Constants.DefaultDeviceInputName;
}
/// <summary>
/// Check if a given configuration and device name is valid on the system.
/// </summary>
/// <param name="configuration">The configuration to check.</param>
/// <param name="deviceName">The device name to check.</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
private static ResultCode IsConfigurationValid(ref AudioInputConfiguration configuration, string deviceName)
{
if (deviceName.Length != 0 && !deviceName.Equals(GetDeviceDefaultName()))
{
return ResultCode.DeviceNotFound;
}
else if (configuration.SampleRate != 0 && configuration.SampleRate != Constants.TargetSampleRate)
{
return ResultCode.UnsupportedSampleRate;
}
else if (configuration.ChannelCount != 0 && configuration.ChannelCount != 1 && configuration.ChannelCount != 2 && configuration.ChannelCount != 6)
{
return ResultCode.UnsupportedChannelConfiguration;
}
return ResultCode.Success;
}
/// <summary>
/// Get the released buffer event.
/// </summary>
/// <returns>The released buffer event</returns>
public IWritableEvent RegisterBufferEvent()
{
lock (_parentLock)
{
return _session.GetBufferEvent();
}
}
/// <summary>
/// Update the <see cref="AudioInputSystem"/>.
/// </summary>
public void Update()
{
lock (_parentLock)
{
_session.Update();
}
}
/// <summary>
/// Get the id of this session.
/// </summary>
/// <returns>The id of this session</returns>
public int GetSessionId()
{
return _sessionId;
}
/// <summary>
/// Initialize the <see cref="AudioInputSystem"/>.
/// </summary>
/// <param name="inputDeviceName">The input device name wanted by the user</param>
/// <param name="sampleFormat">The sample format to use</param>
/// <param name="parameter">The user configuration</param>
/// <param name="sessionId">The session id associated to this <see cref="AudioInputSystem"/></param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode Initialize(string inputDeviceName, SampleFormat sampleFormat, ref AudioInputConfiguration parameter, int sessionId)
{
_sessionId = sessionId;
ResultCode result = IsConfigurationValid(ref parameter, inputDeviceName);
if (result == ResultCode.Success)
{
if (inputDeviceName.Length == 0)
{
DeviceName = GetDeviceDefaultName();
}
else
{
DeviceName = inputDeviceName;
}
if (parameter.ChannelCount == 6)
{
ChannelCount = 6;
}
else
{
ChannelCount = 2;
}
SampleFormat = sampleFormat;
SampleRate = Constants.TargetSampleRate;
}
return result;
}
/// <summary>
/// Append a new audio buffer to the audio input.
/// </summary>
/// <param name="bufferTag">The unique tag of this buffer.</param>
/// <param name="userBuffer">The buffer informations.</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode AppendBuffer(ulong bufferTag, ref AudioUserBuffer userBuffer)
{
lock (_parentLock)
{
AudioBuffer buffer = new AudioBuffer
{
BufferTag = bufferTag,
DataPointer = userBuffer.Data,
DataSize = userBuffer.DataSize
};
if (_session.AppendBuffer(buffer))
{
return ResultCode.Success;
}
return ResultCode.BufferRingFull;
}
}
/// <summary>
/// Append a new audio buffer to the audio input.
/// </summary>
/// <remarks>This is broken by design, only added for completness.</remarks>
/// <param name="bufferTag">The unique tag of this buffer.</param>
/// <param name="userBuffer">The buffer informations.</param>
/// <param name="handle">Some unknown handle.</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode AppendUacBuffer(ulong bufferTag, ref AudioUserBuffer userBuffer, uint handle)
{
lock (_parentLock)
{
AudioBuffer buffer = new AudioBuffer
{
BufferTag = bufferTag,
DataPointer = userBuffer.Data,
DataSize = userBuffer.DataSize
};
if (_session.AppendUacBuffer(buffer, handle))
{
return ResultCode.Success;
}
return ResultCode.BufferRingFull;
}
}
/// <summary>
/// Get the release buffers.
/// </summary>
/// <param name="releasedBuffers">The buffer to write the release buffers</param>
/// <param name="releasedCount">The count of released buffers</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode GetReleasedBuffers(Span<ulong> releasedBuffers, out uint releasedCount)
{
releasedCount = 0;
// Ensure that the first entry is set to zero if no entries are returned.
if (releasedBuffers.Length > 0)
{
releasedBuffers[0] = 0;
}
lock (_parentLock)
{
for (int i = 0; i < releasedBuffers.Length; i++)
{
if (!_session.TryPopReleasedBuffer(out AudioBuffer buffer))
{
break;
}
releasedBuffers[i] = buffer.BufferTag;
releasedCount++;
}
}
return ResultCode.Success;
}
/// <summary>
/// Get the current state of the <see cref="AudioInputSystem"/>.
/// </summary>
/// <returns>Return the curent sta\te of the <see cref="AudioInputSystem"/></returns>
public AudioDeviceState GetState()
{
lock (_parentLock)
{
return _session.GetState();
}
}
/// <summary>
/// Start the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Start()
{
lock (_parentLock)
{
return _session.Start();
}
}
/// <summary>
/// Stop the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Stop()
{
lock (_parentLock)
{
return _session.Stop();
}
}
/// <summary>
/// Get the volume of the session.
/// </summary>
/// <returns>The volume of the session</returns>
public float GetVolume()
{
lock (_parentLock)
{
return _session.GetVolume();
}
}
/// <summary>
/// Set the volume of the session.
/// </summary>
/// <param name="volume">The new volume to set</param>
public void SetVolume(float volume)
{
lock (_parentLock)
{
_session.SetVolume(volume);
}
}
/// <summary>
/// Get the count of buffer currently in use (server + driver side).
/// </summary>
/// <returns>The count of buffer currently in use</returns>
public uint GetBufferCount()
{
lock (_parentLock)
{
return _session.GetBufferCount();
}
}
/// <summary>
/// Check if a buffer is present.
/// </summary>
/// <param name="bufferTag">The unique tag of the buffer</param>
/// <returns>Return true if a buffer is present</returns>
public bool ContainsBuffer(ulong bufferTag)
{
lock (_parentLock)
{
return _session.ContainsBuffer(bufferTag);
}
}
/// <summary>
/// Get the count of sample played in this session.
/// </summary>
/// <returns>The count of sample played in this session</returns>
public ulong GetPlayedSampleCount()
{
lock (_parentLock)
{
return _session.GetPlayedSampleCount();
}
}
/// <summary>
/// Flush all buffers to the initial state.
/// </summary>
/// <returns>True if any buffers was flushed</returns>
public bool FlushBuffers()
{
lock (_parentLock)
{
return _session.FlushBuffers();
}
}
public void Dispose()
{
if (Interlocked.CompareExchange(ref _disposeState, 1, 0) == 0)
{
Dispose(true);
}
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_session.Dispose();
_manager.Unregister(this);
}
}
}
}

View file

@ -0,0 +1,75 @@
using Ryujinx.Audio.Common;
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Integration
{
public class HardwareDeviceImpl : IHardwareDevice
{
private IHardwareDeviceSession _session;
private uint _channelCount;
private uint _sampleRate;
private uint _currentBufferTag;
private byte[] _buffer;
public HardwareDeviceImpl(IHardwareDeviceDriver deviceDriver, uint channelCount, uint sampleRate, float volume)
{
_session = deviceDriver.OpenDeviceSession(IHardwareDeviceDriver.Direction.Output, null, SampleFormat.PcmInt16, sampleRate, channelCount, volume);
_channelCount = channelCount;
_sampleRate = sampleRate;
_currentBufferTag = 0;
_buffer = new byte[Constants.TargetSampleCount * channelCount * sizeof(ushort)];
_session.Start();
}
public void AppendBuffer(ReadOnlySpan<short> data, uint channelCount)
{
data.CopyTo(MemoryMarshal.Cast<byte, short>(_buffer));
_session.QueueBuffer(new AudioBuffer
{
DataPointer = _currentBufferTag++,
Data = _buffer,
DataSize = (ulong)_buffer.Length,
});
_currentBufferTag = _currentBufferTag % 4;
}
public void SetVolume(float volume)
{
_session.SetVolume(volume);
}
public float GetVolume()
{
return _session.GetVolume();
}
public uint GetChannelCount()
{
return _channelCount;
}
public uint GetSampleRate()
{
return _sampleRate;
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_session.Dispose();
}
}
}
}

View file

@ -0,0 +1,55 @@
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Integration
{
/// <summary>
/// Represent an hardware device used in <see cref="Renderer.Dsp.Command.DeviceSinkCommand"/>
/// </summary>
public interface IHardwareDevice : IDisposable
{
/// <summary>
/// Sets the volume level for this device.
/// </summary>
/// <param name="volume">The volume level to set.</param>
void SetVolume(float volume);
/// <summary>
/// Gets the volume level for this device.
/// </summary>
/// <returns>The volume level of this device.</returns>
float GetVolume();
/// <summary>
/// Get the supported sample rate of this device.
/// </summary>
/// <returns>The supported sample rate of this device.</returns>
uint GetSampleRate();
/// <summary>
/// Get the channel count supported by this device.
/// </summary>
/// <returns>The channel count supported by this device.</returns>
uint GetChannelCount();
/// <summary>
/// Appends new PCM16 samples to the device.
/// </summary>
/// <param name="data">The new PCM16 samples.</param>
/// <param name="channelCount">The number of channels.</param>
void AppendBuffer(ReadOnlySpan<short> data, uint channelCount);
/// <summary>
/// Check if the audio renderer needs to perform downmixing.
/// </summary>
/// <returns>True if downmixing is needed.</returns>
public bool NeedDownmixing()
{
uint channelCount = GetChannelCount();
Debug.Assert(channelCount > 0 && channelCount <= Constants.ChannelCountMax);
return channelCount != Constants.ChannelCountMax;
}
}
}

View file

@ -0,0 +1,36 @@
using Ryujinx.Audio.Common;
using Ryujinx.Memory;
using System;
using System.Threading;
namespace Ryujinx.Audio.Integration
{
/// <summary>
/// Represent an hardware device driver used in <see cref="Output.AudioOutputSystem"/>.
/// </summary>
public interface IHardwareDeviceDriver : IDisposable
{
public enum Direction
{
Input,
Output
}
IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount, float volume = 1f);
ManualResetEvent GetUpdateRequiredEvent();
ManualResetEvent GetPauseEvent();
bool SupportsDirection(Direction direction);
bool SupportsSampleRate(uint sampleRate);
bool SupportsSampleFormat(SampleFormat sampleFormat);
bool SupportsChannelCount(uint channelCount);
static abstract bool IsSupported { get; }
IHardwareDeviceDriver GetRealDeviceDriver()
{
return this;
}
}
}

View file

@ -0,0 +1,28 @@
using Ryujinx.Audio.Common;
using System;
namespace Ryujinx.Audio.Integration
{
public interface IHardwareDeviceSession : IDisposable
{
bool RegisterBuffer(AudioBuffer buffer);
void UnregisterBuffer(AudioBuffer buffer);
void QueueBuffer(AudioBuffer buffer);
bool WasBufferFullyConsumed(AudioBuffer buffer);
void SetVolume(float volume);
float GetVolume();
ulong GetPlayedSampleCount();
void Start();
void Stop();
void PrepareToClose();
}
}

View file

@ -0,0 +1,18 @@
namespace Ryujinx.Audio.Integration
{
/// <summary>
/// Represent a writable event with manual clear.
/// </summary>
public interface IWritableEvent
{
/// <summary>
/// Signal the event.
/// </summary>
void Signal();
/// <summary>
/// Clear the signaled state of the event.
/// </summary>
void Clear();
}
}

View file

@ -0,0 +1,296 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Diagnostics;
using System.Linq;
using System.Threading;
namespace Ryujinx.Audio.Output
{
/// <summary>
/// The audio output manager.
/// </summary>
public class AudioOutputManager : IDisposable
{
private object _lock = new object();
/// <summary>
/// Lock used for session allocation.
/// </summary>
private object _sessionLock = new object();
/// <summary>
/// The session ids allocation table.
/// </summary>
private int[] _sessionIds;
/// <summary>
/// The device driver.
/// </summary>
private IHardwareDeviceDriver _deviceDriver;
/// <summary>
/// The events linked to each session.
/// </summary>
private IWritableEvent[] _sessionsBufferEvents;
/// <summary>
/// The <see cref="AudioOutputSystem"/> session instances.
/// </summary>
private AudioOutputSystem[] _sessions;
/// <summary>
/// The count of active sessions.
/// </summary>
private int _activeSessionCount;
/// <summary>
/// The dispose state.
/// </summary>
private int _disposeState;
/// <summary>
/// Create a new <see cref="AudioOutputManager"/>.
/// </summary>
public AudioOutputManager()
{
_sessionIds = new int[Constants.AudioOutSessionCountMax];
_sessions = new AudioOutputSystem[Constants.AudioOutSessionCountMax];
_activeSessionCount = 0;
for (int i = 0; i < _sessionIds.Length; i++)
{
_sessionIds[i] = i;
}
}
/// <summary>
/// Initialize the <see cref="AudioOutputManager"/>.
/// </summary>
/// <param name="deviceDriver">The device driver.</param>
/// <param name="sessionRegisterEvents">The events associated to each session.</param>
public void Initialize(IHardwareDeviceDriver deviceDriver, IWritableEvent[] sessionRegisterEvents)
{
_deviceDriver = deviceDriver;
_sessionsBufferEvents = sessionRegisterEvents;
}
/// <summary>
/// Acquire a new session id.
/// </summary>
/// <returns>A new session id.</returns>
private int AcquireSessionId()
{
lock (_sessionLock)
{
int index = _activeSessionCount;
Debug.Assert(index < _sessionIds.Length);
int sessionId = _sessionIds[index];
_sessionIds[index] = -1;
_activeSessionCount++;
Logger.Info?.Print(LogClass.AudioRenderer, $"Registered new output ({sessionId})");
return sessionId;
}
}
/// <summary>
/// Release a given <paramref name="sessionId"/>.
/// </summary>
/// <param name="sessionId">The session id to release.</param>
private void ReleaseSessionId(int sessionId)
{
lock (_sessionLock)
{
Debug.Assert(_activeSessionCount > 0);
int newIndex = --_activeSessionCount;
_sessionIds[newIndex] = sessionId;
}
Logger.Info?.Print(LogClass.AudioRenderer, $"Unregistered output ({sessionId})");
}
/// <summary>
/// Used to update audio output system.
/// </summary>
public void Update()
{
lock (_sessionLock)
{
foreach (AudioOutputSystem output in _sessions)
{
output?.Update();
}
}
}
/// <summary>
/// Register a new <see cref="AudioOutputSystem"/>.
/// </summary>
/// <param name="output">The <see cref="AudioOutputSystem"/> to register.</param>
private void Register(AudioOutputSystem output)
{
lock (_sessionLock)
{
_sessions[output.GetSessionId()] = output;
}
}
/// <summary>
/// Unregister a new <see cref="AudioOutputSystem"/>.
/// </summary>
/// <param name="output">The <see cref="AudioOutputSystem"/> to unregister.</param>
internal void Unregister(AudioOutputSystem output)
{
lock (_sessionLock)
{
int sessionId = output.GetSessionId();
_sessions[output.GetSessionId()] = null;
ReleaseSessionId(sessionId);
}
}
/// <summary>
/// Get the list of all audio outputs name.
/// </summary>
/// <returns>The list of all audio outputs name</returns>
public string[] ListAudioOuts()
{
return new string[] { Constants.DefaultDeviceOutputName };
}
/// <summary>
/// Open a new <see cref="AudioOutputSystem"/>.
/// </summary>
/// <param name="outputDeviceName">The output device name selected by the <see cref="AudioOutputSystem"/></param>
/// <param name="outputConfiguration">The output audio configuration selected by the <see cref="AudioOutputSystem"/></param>
/// <param name="obj">The new <see cref="AudioOutputSystem"/></param>
/// <param name="memoryManager">The memory manager that will be used for all guest memory operations</param>
/// <param name="inputDeviceName">The input device name wanted by the user</param>
/// <param name="sampleFormat">The sample format to use</param>
/// <param name="parameter">The user configuration</param>
/// <param name="appletResourceUserId">The applet resource user id of the application</param>
/// <param name="processHandle">The process handle of the application</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode OpenAudioOut(out string outputDeviceName,
out AudioOutputConfiguration outputConfiguration,
out AudioOutputSystem obj,
IVirtualMemoryManager memoryManager,
string inputDeviceName,
SampleFormat sampleFormat,
ref AudioInputConfiguration parameter,
ulong appletResourceUserId,
uint processHandle,
float volume)
{
int sessionId = AcquireSessionId();
_sessionsBufferEvents[sessionId].Clear();
IHardwareDeviceSession deviceSession = _deviceDriver.OpenDeviceSession(IHardwareDeviceDriver.Direction.Output, memoryManager, sampleFormat, parameter.SampleRate, parameter.ChannelCount, volume);
AudioOutputSystem audioOut = new AudioOutputSystem(this, _lock, deviceSession, _sessionsBufferEvents[sessionId]);
ResultCode result = audioOut.Initialize(inputDeviceName, sampleFormat, ref parameter, sessionId);
if (result == ResultCode.Success)
{
outputDeviceName = audioOut.DeviceName;
outputConfiguration = new AudioOutputConfiguration
{
ChannelCount = audioOut.ChannelCount,
SampleFormat = audioOut.SampleFormat,
SampleRate = audioOut.SampleRate,
AudioOutState = audioOut.GetState(),
};
obj = audioOut;
Register(audioOut);
}
else
{
ReleaseSessionId(sessionId);
obj = null;
outputDeviceName = null;
outputConfiguration = default;
}
return result;
}
/// <summary>
/// Sets the volume for all output devices.
/// </summary>
/// <param name="volume">The volume to set.</param>
public void SetVolume(float volume)
{
if (_sessions != null)
{
foreach (AudioOutputSystem session in _sessions)
{
session?.SetVolume(volume);
}
}
}
/// <summary>
/// Gets the volume for all output devices.
/// </summary>
/// <returns>A float indicating the volume level.</returns>
public float GetVolume()
{
if (_sessions != null)
{
foreach (AudioOutputSystem session in _sessions)
{
if (session != null)
{
return session.GetVolume();
}
}
}
return 0.0f;
}
public void Dispose()
{
if (Interlocked.CompareExchange(ref _disposeState, 1, 0) == 0)
{
Dispose(true);
}
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// Clone the sessions array to dispose them outside the lock.
AudioOutputSystem[] sessions;
lock (_sessionLock)
{
sessions = _sessions.ToArray();
}
foreach (AudioOutputSystem output in sessions)
{
output?.Dispose();
}
}
}
}
}

View file

@ -0,0 +1,365 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using System;
using System.Threading;
namespace Ryujinx.Audio.Output
{
/// <summary>
/// Audio output system.
/// </summary>
public class AudioOutputSystem : IDisposable
{
/// <summary>
/// The session id associated to the <see cref="AudioOutputSystem"/>.
/// </summary>
private int _sessionId;
/// <summary>
/// The session the <see cref="AudioOutputSystem"/>.
/// </summary>
private AudioDeviceSession _session;
/// <summary>
/// The target device name of the <see cref="AudioOutputSystem"/>.
/// </summary>
public string DeviceName { get; private set; }
/// <summary>
/// The target sample rate of the <see cref="AudioOutputSystem"/>.
/// </summary>
public uint SampleRate { get; private set; }
/// <summary>
/// The target channel count of the <see cref="AudioOutputSystem"/>.
/// </summary>
public uint ChannelCount { get; private set; }
/// <summary>
/// The target sample format of the <see cref="AudioOutputSystem"/>.
/// </summary>
public SampleFormat SampleFormat { get; private set; }
/// <summary>
/// The <see cref="AudioOutputManager"/> owning this.
/// </summary>
private AudioOutputManager _manager;
/// <summary>
/// THe lock of the parent.
/// </summary>
private object _parentLock;
/// <summary>
/// The dispose state.
/// </summary>
private int _disposeState;
/// <summary>
/// Create a new <see cref="AudioOutputSystem"/>.
/// </summary>
/// <param name="manager">The manager instance</param>
/// <param name="parentLock">The lock of the manager</param>
/// <param name="deviceSession">The hardware device session</param>
/// <param name="bufferEvent">The buffer release event of the audio output</param>
public AudioOutputSystem(AudioOutputManager manager, object parentLock, IHardwareDeviceSession deviceSession, IWritableEvent bufferEvent)
{
_manager = manager;
_parentLock = parentLock;
_session = new AudioDeviceSession(deviceSession, bufferEvent);
}
/// <summary>
/// Get the default device name on the system.
/// </summary>
/// <returns>The default device name on the system.</returns>
private static string GetDeviceDefaultName()
{
return Constants.DefaultDeviceOutputName;
}
/// <summary>
/// Check if a given configuration and device name is valid on the system.
/// </summary>
/// <param name="configuration">The configuration to check.</param>
/// <param name="deviceName">The device name to check.</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
private static ResultCode IsConfigurationValid(ref AudioInputConfiguration configuration, string deviceName)
{
if (deviceName.Length != 0 && !deviceName.Equals(GetDeviceDefaultName()))
{
return ResultCode.DeviceNotFound;
}
else if (configuration.SampleRate != 0 && configuration.SampleRate != Constants.TargetSampleRate)
{
return ResultCode.UnsupportedSampleRate;
}
else if (configuration.ChannelCount != 0 && configuration.ChannelCount != 1 && configuration.ChannelCount != 2 && configuration.ChannelCount != 6)
{
return ResultCode.UnsupportedChannelConfiguration;
}
return ResultCode.Success;
}
/// <summary>
/// Get the released buffer event.
/// </summary>
/// <returns>The released buffer event</returns>
public IWritableEvent RegisterBufferEvent()
{
lock (_parentLock)
{
return _session.GetBufferEvent();
}
}
/// <summary>
/// Update the <see cref="AudioOutputSystem"/>.
/// </summary>
public void Update()
{
lock (_parentLock)
{
_session.Update();
}
}
/// <summary>
/// Get the id of this session.
/// </summary>
/// <returns>The id of this session</returns>
public int GetSessionId()
{
return _sessionId;
}
/// <summary>
/// Initialize the <see cref="AudioOutputSystem"/>.
/// </summary>
/// <param name="inputDeviceName">The input device name wanted by the user</param>
/// <param name="sampleFormat">The sample format to use</param>
/// <param name="parameter">The user configuration</param>
/// <param name="sessionId">The session id associated to this <see cref="AudioOutputSystem"/></param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode Initialize(string inputDeviceName, SampleFormat sampleFormat, ref AudioInputConfiguration parameter, int sessionId)
{
_sessionId = sessionId;
ResultCode result = IsConfigurationValid(ref parameter, inputDeviceName);
if (result == ResultCode.Success)
{
if (inputDeviceName.Length == 0)
{
DeviceName = GetDeviceDefaultName();
}
else
{
DeviceName = inputDeviceName;
}
if (parameter.ChannelCount == 6)
{
ChannelCount = 6;
}
else
{
ChannelCount = 2;
}
SampleFormat = sampleFormat;
SampleRate = Constants.TargetSampleRate;
}
return result;
}
/// <summary>
/// Append a new audio buffer to the audio output.
/// </summary>
/// <param name="bufferTag">The unique tag of this buffer.</param>
/// <param name="userBuffer">The buffer informations.</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode AppendBuffer(ulong bufferTag, ref AudioUserBuffer userBuffer)
{
lock (_parentLock)
{
AudioBuffer buffer = new AudioBuffer
{
BufferTag = bufferTag,
DataPointer = userBuffer.Data,
DataSize = userBuffer.DataSize
};
if (_session.AppendBuffer(buffer))
{
return ResultCode.Success;
}
return ResultCode.BufferRingFull;
}
}
/// <summary>
/// Get the release buffers.
/// </summary>
/// <param name="releasedBuffers">The buffer to write the release buffers</param>
/// <param name="releasedCount">The count of released buffers</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode GetReleasedBuffer(Span<ulong> releasedBuffers, out uint releasedCount)
{
releasedCount = 0;
// Ensure that the first entry is set to zero if no entries are returned.
if (releasedBuffers.Length > 0)
{
releasedBuffers[0] = 0;
}
lock (_parentLock)
{
for (int i = 0; i < releasedBuffers.Length; i++)
{
if (!_session.TryPopReleasedBuffer(out AudioBuffer buffer))
{
break;
}
releasedBuffers[i] = buffer.BufferTag;
releasedCount++;
}
}
return ResultCode.Success;
}
/// <summary>
/// Get the current state of the <see cref="AudioOutputSystem"/>.
/// </summary>
/// <returns>Return the curent sta\te of the <see cref="AudioOutputSystem"/></returns>
/// <returns></returns>
public AudioDeviceState GetState()
{
lock (_parentLock)
{
return _session.GetState();
}
}
/// <summary>
/// Start the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Start()
{
lock (_parentLock)
{
return _session.Start();
}
}
/// <summary>
/// Stop the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Stop()
{
lock (_parentLock)
{
return _session.Stop();
}
}
/// <summary>
/// Get the volume of the session.
/// </summary>
/// <returns>The volume of the session</returns>
public float GetVolume()
{
lock (_parentLock)
{
return _session.GetVolume();
}
}
/// <summary>
/// Set the volume of the session.
/// </summary>
/// <param name="volume">The new volume to set</param>
public void SetVolume(float volume)
{
lock (_parentLock)
{
_session.SetVolume(volume);
}
}
/// <summary>
/// Get the count of buffer currently in use (server + driver side).
/// </summary>
/// <returns>The count of buffer currently in use</returns>
public uint GetBufferCount()
{
lock (_parentLock)
{
return _session.GetBufferCount();
}
}
/// <summary>
/// Check if a buffer is present.
/// </summary>
/// <param name="bufferTag">The unique tag of the buffer</param>
/// <returns>Return true if a buffer is present</returns>
public bool ContainsBuffer(ulong bufferTag)
{
lock (_parentLock)
{
return _session.ContainsBuffer(bufferTag);
}
}
/// <summary>
/// Get the count of sample played in this session.
/// </summary>
/// <returns>The count of sample played in this session</returns>
public ulong GetPlayedSampleCount()
{
lock (_parentLock)
{
return _session.GetPlayedSampleCount();
}
}
/// <summary>
/// Flush all buffers to the initial state.
/// </summary>
/// <returns>True if any buffers was flushed</returns>
public bool FlushBuffers()
{
lock (_parentLock)
{
return _session.FlushBuffers();
}
}
public void Dispose()
{
if (Interlocked.CompareExchange(ref _disposeState, 1, 0) == 0)
{
Dispose(true);
}
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_session.Dispose();
_manager.Unregister(this);
}
}
}
}

View file

@ -0,0 +1,13 @@
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Common
{
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct AuxiliaryBufferAddresses
{
public ulong SendBufferInfo;
public ulong SendBufferInfoBase;
public ulong ReturnBufferInfo;
public ulong ReturnBufferInfoBase;
}
}

View file

@ -0,0 +1,50 @@
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Represents the input parameter for <see cref="Server.BehaviourContext"/>.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct BehaviourParameter
{
/// <summary>
/// The current audio renderer revision in use.
/// </summary>
public int UserRevision;
/// <summary>
/// Reserved/padding.
/// </summary>
private uint _padding;
/// <summary>
/// The flags given controlling behaviour of the audio renderer
/// </summary>
/// <remarks>See <see cref="Server.BehaviourContext.UpdateFlags(ulong)"/> and <see cref="Server.BehaviourContext.IsMemoryPoolForceMappingEnabled"/>.</remarks>
public ulong Flags;
/// <summary>
/// Represents an error during <see cref="Server.AudioRenderSystem.Update(System.Memory{byte}, System.Memory{byte}, System.ReadOnlyMemory{byte})"/>.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct ErrorInfo
{
/// <summary>
/// The error code to report.
/// </summary>
public ResultCode ErrorCode;
/// <summary>
/// Reserved/padding.
/// </summary>
private uint _padding;
/// <summary>
/// Extra information given with the <see cref="ResultCode"/>
/// </summary>
/// <remarks>This is usually used to report a faulting cpu address when a <see cref="Server.MemoryPool.MemoryPoolState"/> mapping fail.</remarks>
public ulong ExtraErrorInfo;
}
}
}

View file

@ -0,0 +1,150 @@
using Ryujinx.Audio.Renderer.Utils;
using Ryujinx.Common;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Represents a adjacent matrix.
/// </summary>
/// <remarks>This is used for splitter routing.</remarks>
public class EdgeMatrix
{
/// <summary>
/// Backing <see cref="BitArray"/> used for node connections.
/// </summary>
private BitArray _storage;
/// <summary>
/// The count of nodes of the current instance.
/// </summary>
private int _nodeCount;
/// <summary>
/// Get the required work buffer size memory needed for the <see cref="EdgeMatrix"/>.
/// </summary>
/// <param name="nodeCount">The count of nodes.</param>
/// <returns>The size required for the given <paramref name="nodeCount"/>.</returns>
public static int GetWorkBufferSize(int nodeCount)
{
int size = BitUtils.AlignUp(nodeCount * nodeCount, Constants.BufferAlignment);
return size / Unsafe.SizeOf<byte>();
}
/// <summary>
/// Initializes the <see cref="EdgeMatrix"/> instance with backing memory.
/// </summary>
/// <param name="edgeMatrixWorkBuffer">The backing memory.</param>
/// <param name="nodeCount">The count of nodes.</param>
public void Initialize(Memory<byte> edgeMatrixWorkBuffer, int nodeCount)
{
Debug.Assert(edgeMatrixWorkBuffer.Length >= GetWorkBufferSize(nodeCount));
_storage = new BitArray(edgeMatrixWorkBuffer);
_nodeCount = nodeCount;
_storage.Reset();
}
/// <summary>
/// Test if the bit at the given index is set.
/// </summary>
/// <param name="index">A bit index.</param>
/// <returns>Returns true if the bit at the given index is set</returns>
public bool Test(int index)
{
return _storage.Test(index);
}
/// <summary>
/// Reset all bits in the storage.
/// </summary>
public void Reset()
{
_storage.Reset();
}
/// <summary>
/// Reset the bit at the given index.
/// </summary>
/// <param name="index">A bit index.</param>
public void Reset(int index)
{
_storage.Reset(index);
}
/// <summary>
/// Set the bit at the given index.
/// </summary>
/// <param name="index">A bit index.</param>
public void Set(int index)
{
_storage.Set(index);
}
/// <summary>
/// Connect a given source to a given destination.
/// </summary>
/// <param name="source">The source index.</param>
/// <param name="destination">The destination index.</param>
public void Connect(int source, int destination)
{
Debug.Assert(source < _nodeCount);
Debug.Assert(destination < _nodeCount);
_storage.Set(_nodeCount * source + destination);
}
/// <summary>
/// Check if the given source is connected to the given destination.
/// </summary>
/// <param name="source">The source index.</param>
/// <param name="destination">The destination index.</param>
/// <returns>Returns true if the given source is connected to the given destination.</returns>
public bool Connected(int source, int destination)
{
Debug.Assert(source < _nodeCount);
Debug.Assert(destination < _nodeCount);
return _storage.Test(_nodeCount * source + destination);
}
/// <summary>
/// Disconnect a given source from a given destination.
/// </summary>
/// <param name="source">The source index.</param>
/// <param name="destination">The destination index.</param>
public void Disconnect(int source, int destination)
{
Debug.Assert(source < _nodeCount);
Debug.Assert(destination < _nodeCount);
_storage.Reset(_nodeCount * source + destination);
}
/// <summary>
/// Remove all edges from a given source.
/// </summary>
/// <param name="source">The source index.</param>
public void RemoveEdges(int source)
{
for (int i = 0; i < _nodeCount; i++)
{
Disconnect(source, i);
}
}
/// <summary>
/// Get the total node count.
/// </summary>
/// <returns>The total node count.</returns>
public int GetNodeCount()
{
return _nodeCount;
}
}
}

View file

@ -0,0 +1,58 @@
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// The type of an effect.
/// </summary>
public enum EffectType : byte
{
/// <summary>
/// Invalid effect.
/// </summary>
Invalid,
/// <summary>
/// Effect applying additional mixing capability.
/// </summary>
BufferMix,
/// <summary>
/// Effect applying custom user effect (via auxiliary buffers).
/// </summary>
AuxiliaryBuffer,
/// <summary>
/// Effect applying a delay.
/// </summary>
Delay,
/// <summary>
/// Effect applying a reverberation effect via a given preset.
/// </summary>
Reverb,
/// <summary>
/// Effect applying a 3D reverberation effect via a given preset.
/// </summary>
Reverb3d,
/// <summary>
/// Effect applying a biquad filter.
/// </summary>
BiquadFilter,
/// <summary>
/// Effect applying a limiter (DRC).
/// </summary>
Limiter,
/// <summary>
/// Effect to capture mixes (via auxiliary buffers).
/// </summary>
CaptureBuffer,
/// <summary>
/// Effect applying a compressor filter (DRC).
/// </summary>
Compressor,
}
}

View file

@ -0,0 +1,43 @@
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Represents the state of a memory pool.
/// </summary>
public enum MemoryPoolUserState : uint
{
/// <summary>
/// Invalid state.
/// </summary>
Invalid = 0,
/// <summary>
/// The memory pool is new. (client side only)
/// </summary>
New = 1,
/// <summary>
/// The user asked to detach the memory pool from the <see cref="Dsp.AudioProcessor"/>.
/// </summary>
RequestDetach = 2,
/// <summary>
/// The memory pool is detached from the <see cref="Dsp.AudioProcessor"/>.
/// </summary>
Detached = 3,
/// <summary>
/// The user asked to attach the memory pool to the <see cref="Dsp.AudioProcessor"/>.
/// </summary>
RequestAttach = 4,
/// <summary>
/// The memory pool is attached to the <see cref="Dsp.AudioProcessor"/>.
/// </summary>
Attached = 5,
/// <summary>
/// The memory pool is released. (client side only)
/// </summary>
Released = 6
}
}

View file

@ -0,0 +1,28 @@
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Helper for manipulating node ids.
/// </summary>
public static class NodeIdHelper
{
/// <summary>
/// Get the type of a node from a given node id.
/// </summary>
/// <param name="nodeId">Id of the node.</param>
/// <returns>The type of the node.</returns>
public static NodeIdType GetType(int nodeId)
{
return (NodeIdType)(nodeId >> 28);
}
/// <summary>
/// Get the base of a node from a given node id.
/// </summary>
/// <param name="nodeId">Id of the node.</param>
/// <returns>The base of the node.</returns>
public static int GetBase(int nodeId)
{
return (nodeId >> 16) & 0xFFF;
}
}
}

View file

@ -0,0 +1,33 @@
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// The type of a node.
/// </summary>
public enum NodeIdType : byte
{
/// <summary>
/// Invalid node id.
/// </summary>
Invalid = 0,
/// <summary>
/// Voice related node id. (data source, biquad filter, ...)
/// </summary>
Voice = 1,
/// <summary>
/// Mix related node id. (mix, effects, splitters, ...)
/// </summary>
Mix = 2,
/// <summary>
/// Sink related node id. (device &amp; circular buffer sink)
/// </summary>
Sink = 3,
/// <summary>
/// Performance monitoring related node id (performance commands)
/// </summary>
Performance = 15
}
}

View file

@ -0,0 +1,229 @@
using Ryujinx.Audio.Renderer.Utils;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Common
{
public class NodeStates
{
private class Stack
{
private Memory<int> _storage;
private int _index;
private int _nodeCount;
public void Reset(Memory<int> storage, int nodeCount)
{
Debug.Assert(storage.Length * sizeof(int) >= CalcBufferSize(nodeCount));
_storage = storage;
_index = 0;
_nodeCount = nodeCount;
}
public int GetCurrentCount()
{
return _index;
}
public void Push(int data)
{
Debug.Assert(_index + 1 <= _nodeCount);
_storage.Span[_index++] = data;
}
public int Pop()
{
Debug.Assert(_index > 0);
return _storage.Span[--_index];
}
public int Top()
{
return _storage.Span[_index - 1];
}
public static int CalcBufferSize(int nodeCount)
{
return nodeCount * sizeof(int);
}
}
private int _nodeCount;
private EdgeMatrix _discovered;
private EdgeMatrix _finished;
private Memory<int> _resultArray;
private Stack _stack;
private int _tsortResultIndex;
private enum NodeState : byte
{
Unknown,
Discovered,
Finished
}
public NodeStates()
{
_stack = new Stack();
_discovered = new EdgeMatrix();
_finished = new EdgeMatrix();
}
public static int GetWorkBufferSize(int nodeCount)
{
return Stack.CalcBufferSize(nodeCount * nodeCount) + 0xC * nodeCount + 2 * EdgeMatrix.GetWorkBufferSize(nodeCount);
}
public void Initialize(Memory<byte> nodeStatesWorkBuffer, int nodeCount)
{
int workBufferSize = GetWorkBufferSize(nodeCount);
Debug.Assert(nodeStatesWorkBuffer.Length >= workBufferSize);
_nodeCount = nodeCount;
int edgeMatrixWorkBufferSize = EdgeMatrix.GetWorkBufferSize(nodeCount);
_discovered.Initialize(nodeStatesWorkBuffer.Slice(0, edgeMatrixWorkBufferSize), nodeCount);
_finished.Initialize(nodeStatesWorkBuffer.Slice(edgeMatrixWorkBufferSize, edgeMatrixWorkBufferSize), nodeCount);
nodeStatesWorkBuffer = nodeStatesWorkBuffer.Slice(edgeMatrixWorkBufferSize * 2);
_resultArray = SpanMemoryManager<int>.Cast(nodeStatesWorkBuffer.Slice(0, sizeof(int) * nodeCount));
nodeStatesWorkBuffer = nodeStatesWorkBuffer.Slice(sizeof(int) * nodeCount);
Memory<int> stackWorkBuffer = SpanMemoryManager<int>.Cast(nodeStatesWorkBuffer.Slice(0, Stack.CalcBufferSize(nodeCount * nodeCount)));
_stack.Reset(stackWorkBuffer, nodeCount * nodeCount);
}
private void Reset()
{
_discovered.Reset();
_finished.Reset();
_tsortResultIndex = 0;
_resultArray.Span.Fill(-1);
}
private NodeState GetState(int index)
{
Debug.Assert(index < _nodeCount);
if (_discovered.Test(index))
{
Debug.Assert(!_finished.Test(index));
return NodeState.Discovered;
}
else if (_finished.Test(index))
{
Debug.Assert(!_discovered.Test(index));
return NodeState.Finished;
}
return NodeState.Unknown;
}
private void SetState(int index, NodeState state)
{
switch (state)
{
case NodeState.Unknown:
_discovered.Reset(index);
_finished.Reset(index);
break;
case NodeState.Discovered:
_discovered.Set(index);
_finished.Reset(index);
break;
case NodeState.Finished:
_finished.Set(index);
_discovered.Reset(index);
break;
}
}
private void PushTsortResult(int index)
{
Debug.Assert(index < _nodeCount);
_resultArray.Span[_tsortResultIndex++] = index;
}
public ReadOnlySpan<int> GetTsortResult()
{
return _resultArray.Span.Slice(0, _tsortResultIndex);
}
public bool Sort(EdgeMatrix edgeMatrix)
{
Reset();
if (_nodeCount <= 0)
{
return true;
}
for (int i = 0; i < _nodeCount; i++)
{
if (GetState(i) == NodeState.Unknown)
{
_stack.Push(i);
}
while (_stack.GetCurrentCount() > 0)
{
int topIndex = _stack.Top();
NodeState topState = GetState(topIndex);
if (topState == NodeState.Discovered)
{
SetState(topIndex, NodeState.Finished);
PushTsortResult(topIndex);
_stack.Pop();
}
else if (topState == NodeState.Finished)
{
_stack.Pop();
}
else
{
if (topState == NodeState.Unknown)
{
SetState(topIndex, NodeState.Discovered);
}
for (int j = 0; j < edgeMatrix.GetNodeCount(); j++)
{
if (edgeMatrix.Connected(topIndex, j))
{
NodeState jState = GetState(j);
if (jState == NodeState.Unknown)
{
_stack.Push(j);
}
// Found a loop, reset and propagate rejection.
else if (jState == NodeState.Discovered)
{
Reset();
return false;
}
}
}
}
}
}
return true;
}
}
}

View file

@ -0,0 +1,20 @@
namespace Ryujinx.Audio.Renderer.Common
{
public enum PerformanceDetailType : byte
{
Unknown,
PcmInt16,
Adpcm,
VolumeRamp,
BiquadFilter,
Mix,
Delay,
Aux,
Reverb,
Reverb3d,
PcmFloat,
Limiter,
CaptureBuffer,
Compressor
}
}

View file

@ -0,0 +1,11 @@
namespace Ryujinx.Audio.Renderer.Common
{
public enum PerformanceEntryType : byte
{
Invalid,
Voice,
SubMix,
FinalMix,
Sink
}
}

View file

@ -0,0 +1,23 @@
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Common play state.
/// </summary>
public enum PlayState : byte
{
/// <summary>
/// The user request the voice to be started.
/// </summary>
Start,
/// <summary>
/// The user request the voice to be stopped.
/// </summary>
Stop,
/// <summary>
/// The user request the voice to be paused.
/// </summary>
Pause
}
}

View file

@ -0,0 +1,33 @@
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Early reverb reflection.
/// </summary>
public enum ReverbEarlyMode : uint
{
/// <summary>
/// Room early reflection. (small acoustic space, fast reflection)
/// </summary>
Room,
/// <summary>
/// Chamber early reflection. (bigger than <see cref="Room"/>'s acoustic space, short reflection)
/// </summary>
Chamber,
/// <summary>
/// Hall early reflection. (large acoustic space, warm reflection)
/// </summary>
Hall,
/// <summary>
/// Cathedral early reflection. (very large acoustic space, pronounced bright reflection)
/// </summary>
Cathedral,
/// <summary>
/// No early reflection.
/// </summary>
Disabled
}
}

View file

@ -0,0 +1,38 @@
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Late reverb reflection.
/// </summary>
public enum ReverbLateMode : uint
{
/// <summary>
/// Room late reflection. (small acoustic space, fast reflection)
/// </summary>
Room,
/// <summary>
/// Hall late reflection. (large acoustic space, warm reflection)
/// </summary>
Hall,
/// <summary>
/// Classic plate late reflection. (clean distinctive reverb)
/// </summary>
Plate,
/// <summary>
/// Cathedral late reflection. (very large acoustic space, pronounced bright reflection)
/// </summary>
Cathedral,
/// <summary>
/// Do not apply any delay. (max delay)
/// </summary>
NoDelay,
/// <summary>
/// Max delay. (used for delay line limits)
/// </summary>
Limit = NoDelay
}
}

View file

@ -0,0 +1,23 @@
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// The type of a sink.
/// </summary>
public enum SinkType : byte
{
/// <summary>
/// The sink is in an invalid state.
/// </summary>
Invalid,
/// <summary>
/// The sink is a device.
/// </summary>
Device,
/// <summary>
/// The sink is a circular buffer.
/// </summary>
CircularBuffer
}
}

View file

@ -0,0 +1,33 @@
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Update data header used for input and output of <see cref="Server.AudioRenderSystem.Update(System.Memory{byte}, System.Memory{byte}, System.ReadOnlyMemory{byte})"/>.
/// </summary>
public struct UpdateDataHeader
{
public int Revision;
public uint BehaviourSize;
public uint MemoryPoolsSize;
public uint VoicesSize;
public uint VoiceResourcesSize;
public uint EffectsSize;
public uint MixesSize;
public uint SinksSize;
public uint PerformanceBufferSize;
public uint Unknown24;
public uint RenderInfoSize;
private unsafe fixed int _reserved[4];
public uint TotalSize;
public void Initialize(int revision)
{
Revision = revision;
TotalSize = (uint)Unsafe.SizeOf<UpdateDataHeader>();
}
}
}

View file

@ -0,0 +1,104 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Common.Memory;
using Ryujinx.Common.Utilities;
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Represent the update state of a voice.
/// </summary>
/// <remarks>This is shared between the server and audio processor.</remarks>
[StructLayout(LayoutKind.Sequential, Pack = Align)]
public struct VoiceUpdateState
{
public const int Align = 0x10;
public const int BiquadStateOffset = 0x0;
public const int BiquadStateSize = 0x10;
/// <summary>
/// The state of the biquad filters of this voice.
/// </summary>
public Array2<BiquadFilterState> BiquadFilterState;
/// <summary>
/// The total amount of samples that was played.
/// </summary>
/// <remarks>This is reset to 0 when a <see cref="WaveBuffer"/> finishes playing and <see cref="WaveBuffer.IsEndOfStream"/> is set.</remarks>
/// <remarks>This is reset to 0 when looping while <see cref="Parameter.VoiceInParameter.DecodingBehaviour.PlayedSampleCountResetWhenLooping"/> is set.</remarks>
public ulong PlayedSampleCount;
/// <summary>
/// The current sample offset in the <see cref="WaveBuffer"/> pointed by <see cref="WaveBufferIndex"/>.
/// </summary>
public int Offset;
/// <summary>
/// The current index of the <see cref="WaveBuffer"/> in use.
/// </summary>
public uint WaveBufferIndex;
private WaveBufferValidArray _isWaveBufferValid;
/// <summary>
/// The total amount of <see cref="WaveBuffer"/> consumed.
/// </summary>
public uint WaveBufferConsumed;
/// <summary>
/// Pitch used for Sample Rate Conversion.
/// </summary>
public Array8<short> Pitch;
public float Fraction;
/// <summary>
/// The ADPCM loop context when <see cref="SampleFormat.Adpcm"/> is in use.
/// </summary>
public AdpcmLoopContext LoopContext;
/// <summary>
/// The last samples after a mix ramp.
/// </summary>
/// <remarks>This is used for depop (to perform voice drop).</remarks>
public Array24<float> LastSamples;
/// <summary>
/// The current count of loop performed.
/// </summary>
public int LoopCount;
[StructLayout(LayoutKind.Sequential, Size = 1 * Constants.VoiceWaveBufferCount, Pack = 1)]
private struct WaveBufferValidArray { }
/// <summary>
/// Contains information of <see cref="WaveBuffer"/> validity.
/// </summary>
public Span<bool> IsWaveBufferValid => SpanHelpers.AsSpan<WaveBufferValidArray, bool>(ref _isWaveBufferValid);
/// <summary>
/// Mark the current <see cref="WaveBuffer"/> as played and switch to the next one.
/// </summary>
/// <param name="waveBuffer">The current <see cref="WaveBuffer"/></param>
/// <param name="waveBufferIndex">The wavebuffer index.</param>
/// <param name="waveBufferConsumed">The amount of wavebuffers consumed.</param>
/// <param name="playedSampleCount">The total count of sample played.</param>
public void MarkEndOfBufferWaveBufferProcessing(ref WaveBuffer waveBuffer, ref int waveBufferIndex, ref uint waveBufferConsumed, ref ulong playedSampleCount)
{
IsWaveBufferValid[waveBufferIndex++] = false;
LoopCount = 0;
waveBufferConsumed++;
if (waveBufferIndex >= Constants.VoiceWaveBufferCount)
{
waveBufferIndex = 0;
}
if (waveBuffer.IsEndOfStream)
{
playedSampleCount = 0;
}
}
}
}

View file

@ -0,0 +1,82 @@
using System.Runtime.InteropServices;
using DspAddr = System.UInt64;
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// A wavebuffer used for data source commands.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct WaveBuffer
{
/// <summary>
/// The DSP address of the sample data of the wavebuffer.
/// </summary>
public DspAddr Buffer;
/// <summary>
/// The DSP address of the context of the wavebuffer.
/// </summary>
/// <remarks>Only used by <see cref="SampleFormat.Adpcm"/>.</remarks>
public DspAddr Context;
/// <summary>
/// The size of the sample buffer data.
/// </summary>
public uint BufferSize;
/// <summary>
/// The size of the context buffer.
/// </summary>
public uint ContextSize;
/// <summary>
/// First sample to play on the wavebuffer.
/// </summary>
public uint StartSampleOffset;
/// <summary>
/// Last sample to play on the wavebuffer.
/// </summary>
public uint EndSampleOffset;
/// <summary>
/// First sample to play when looping the wavebuffer.
/// </summary>
/// <remarks>
/// If <see cref="LoopStartSampleOffset"/> or <see cref="LoopEndSampleOffset"/> is equal to zero,, it will default to <see cref="StartSampleOffset"/> and <see cref="EndSampleOffset"/>.
/// </remarks>
public uint LoopStartSampleOffset;
/// <summary>
/// Last sample to play when looping the wavebuffer.
/// </summary>
/// <remarks>
/// If <see cref="LoopStartSampleOffset"/> or <see cref="LoopEndSampleOffset"/> is equal to zero, it will default to <see cref="StartSampleOffset"/> and <see cref="EndSampleOffset"/>.
/// </remarks>
public uint LoopEndSampleOffset;
/// <summary>
/// The max loop count.
/// </summary>
public int LoopCount;
/// <summary>
/// Set to true if the wavebuffer is looping.
/// </summary>
[MarshalAs(UnmanagedType.I1)]
public bool Looping;
/// <summary>
/// Set to true if the wavebuffer is the end of stream.
/// </summary>
[MarshalAs(UnmanagedType.I1)]
public bool IsEndOfStream;
/// <summary>
/// Padding/Reserved.
/// </summary>
private ushort _padding;
}
}

View file

@ -0,0 +1,61 @@
using Ryujinx.Audio.Renderer.Utils;
using Ryujinx.Common;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Common
{
public class WorkBufferAllocator
{
public Memory<byte> BackingMemory { get; }
public ulong Offset { get; private set; }
public WorkBufferAllocator(Memory<byte> backingMemory)
{
BackingMemory = backingMemory;
}
public Memory<byte> Allocate(ulong size, int align)
{
Debug.Assert(align != 0);
if (size != 0)
{
ulong alignedOffset = BitUtils.AlignUp<ulong>(Offset, (ulong)align);
if (alignedOffset + size <= (ulong)BackingMemory.Length)
{
Memory<byte> result = BackingMemory.Slice((int)alignedOffset, (int)size);
Offset = alignedOffset + size;
// Clear the memory to be sure that is does not contain any garbage.
result.Span.Fill(0);
return result;
}
}
return Memory<byte>.Empty;
}
public Memory<T> Allocate<T>(ulong count, int align) where T : unmanaged
{
Memory<byte> allocatedMemory = Allocate((ulong)Unsafe.SizeOf<T>() * count, align);
if (allocatedMemory.IsEmpty)
{
return Memory<T>.Empty;
}
return SpanMemoryManager<T>.Cast(allocatedMemory);
}
public static ulong GetTargetSize<T>(ulong currentSize, ulong count, int align) where T : unmanaged
{
return BitUtils.AlignUp<ulong>(currentSize, (ulong)align) + (ulong)Unsafe.SizeOf<T>() * count;
}
}
}

View file

@ -0,0 +1,89 @@
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Device
{
/// <summary>
/// Represents a virtual device used by IAudioDevice.
/// </summary>
public class VirtualDevice
{
/// <summary>
/// All the defined virtual devices.
/// </summary>
public static readonly VirtualDevice[] Devices = new VirtualDevice[5]
{
new VirtualDevice("AudioStereoJackOutput", 2, true),
new VirtualDevice("AudioBuiltInSpeakerOutput", 2, false),
new VirtualDevice("AudioTvOutput", 6, false),
new VirtualDevice("AudioUsbDeviceOutput", 2, true),
new VirtualDevice("AudioExternalOutput", 6, true),
};
/// <summary>
/// The name of the <see cref="VirtualDevice"/>.
/// </summary>
public string Name { get; }
/// <summary>
/// The count of channels supported by the <see cref="VirtualDevice"/>.
/// </summary>
public uint ChannelCount { get; }
/// <summary>
/// The system master volume of the <see cref="VirtualDevice"/>.
/// </summary>
public float MasterVolume { get; private set; }
/// <summary>
/// Define if the <see cref="VirtualDevice"/> is provided by an external interface.
/// </summary>
public bool IsExternalOutput { get; }
/// <summary>
/// Create a new <see cref="VirtualDevice"/> instance.
/// </summary>
/// <param name="name">The name of the <see cref="VirtualDevice"/>.</param>
/// <param name="channelCount">The count of channels supported by the <see cref="VirtualDevice"/>.</param>
/// <param name="isExternalOutput">Indicate if the <see cref="VirtualDevice"/> is provided by an external interface.</param>
private VirtualDevice(string name, uint channelCount, bool isExternalOutput)
{
Name = name;
ChannelCount = channelCount;
IsExternalOutput = isExternalOutput;
}
/// <summary>
/// Update the master volume of the <see cref="VirtualDevice"/>.
/// </summary>
/// <param name="volume">The new master volume.</param>
public void UpdateMasterVolume(float volume)
{
Debug.Assert(volume >= 0.0f && volume <= 1.0f);
MasterVolume = volume;
}
/// <summary>
/// Check if the <see cref="VirtualDevice"/> is a usb device.
/// </summary>
/// <returns>Returns true if the <see cref="VirtualDevice"/> is a usb device.</returns>
public bool IsUsbDevice()
{
return Name.Equals("AudioUsbDeviceOutput");
}
/// <summary>
/// Get the output device name of the <see cref="VirtualDevice"/>.
/// </summary>
/// <returns>The output device name of the <see cref="VirtualDevice"/>.</returns>
public string GetOutputDeviceName()
{
if (IsExternalOutput)
{
return "AudioExternalOutput";
}
return Name;
}
}
}

View file

@ -0,0 +1,27 @@
namespace Ryujinx.Audio.Renderer.Device
{
/// <summary>
/// Represents a virtual device session used by IAudioDevice.
/// </summary>
public class VirtualDeviceSession
{
/// <summary>
/// The <see cref="VirtualDevice"/> associated to this session.
/// </summary>
public VirtualDevice Device { get; }
/// <summary>
/// The user volume of this session.
/// </summary>
public float Volume { get; set; }
/// <summary>
/// Create a new <see cref="VirtualDeviceSession"/> instance.
/// </summary>
/// <param name="virtualDevice">The <see cref="VirtualDevice"/> associated to this session.</param>
public VirtualDeviceSession(VirtualDevice virtualDevice)
{
Device = virtualDevice;
}
}
}

View file

@ -0,0 +1,62 @@
using System.Collections.Generic;
namespace Ryujinx.Audio.Renderer.Device
{
/// <summary>
/// Represent an instance containing a registry of <see cref="VirtualDeviceSession"/>.
/// </summary>
public class VirtualDeviceSessionRegistry
{
/// <summary>
/// The session registry, used to store the sessions of a given AppletResourceId.
/// </summary>
private Dictionary<ulong, VirtualDeviceSession[]> _sessionsRegistry = new Dictionary<ulong, VirtualDeviceSession[]>();
/// <summary>
/// The default <see cref="VirtualDevice"/>.
/// </summary>
/// <remarks>This is used when the USB device is the default one on older revision.</remarks>
public VirtualDevice DefaultDevice => VirtualDevice.Devices[0];
/// <summary>
/// The current active <see cref="VirtualDevice"/>.
/// </summary>
// TODO: make this configurable
public VirtualDevice ActiveDevice = VirtualDevice.Devices[2];
/// <summary>
/// Get the associated <see cref="T:VirtualDeviceSession[]"/> from an AppletResourceId.
/// </summary>
/// <param name="resourceAppletId">The AppletResourceId used.</param>
/// <returns>The associated <see cref="T:VirtualDeviceSession[]"/> from an AppletResourceId.</returns>
public VirtualDeviceSession[] GetSessionByAppletResourceId(ulong resourceAppletId)
{
if (_sessionsRegistry.TryGetValue(resourceAppletId, out VirtualDeviceSession[] result))
{
return result;
}
result = CreateSessionsFromBehaviourContext();
_sessionsRegistry.Add(resourceAppletId, result);
return result;
}
/// <summary>
/// Create a new array of sessions for each <see cref="VirtualDevice"/>.
/// </summary>
/// <returns>A new array of sessions for each <see cref="VirtualDevice"/>.</returns>
private static VirtualDeviceSession[] CreateSessionsFromBehaviourContext()
{
VirtualDeviceSession[] virtualDeviceSession = new VirtualDeviceSession[VirtualDevice.Devices.Length];
for (int i = 0; i < virtualDeviceSession.Length; i++)
{
virtualDeviceSession[i] = new VirtualDeviceSession(VirtualDevice.Devices[i]);
}
return virtualDeviceSession;
}
}
}

View file

@ -0,0 +1,216 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Common.Logging;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp
{
public static class AdpcmHelper
{
private const int FixedPointPrecision = 11;
private const int SamplesPerFrame = 14;
private const int NibblesPerFrame = SamplesPerFrame + 2;
private const int BytesPerFrame = 8;
private const int BitsPerFrame = BytesPerFrame * 8;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint GetAdpcmDataSize(int sampleCount)
{
Debug.Assert(sampleCount >= 0);
int frames = sampleCount / SamplesPerFrame;
int extraSize = 0;
if ((sampleCount % SamplesPerFrame) != 0)
{
extraSize = (sampleCount % SamplesPerFrame) / 2 + 1 + (sampleCount % 2);
}
return (uint)(BytesPerFrame * frames + extraSize);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int GetAdpcmOffsetFromSampleOffset(int sampleOffset)
{
Debug.Assert(sampleOffset >= 0);
return GetNibblesFromSampleCount(sampleOffset) / 2;
}
public static int NibbleToSample(int nibble)
{
int frames = nibble / NibblesPerFrame;
int extraNibbles = nibble % NibblesPerFrame;
int samples = SamplesPerFrame * frames;
return samples + extraNibbles - 2;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int GetNibblesFromSampleCount(int sampleCount)
{
byte headerSize = 0;
if ((sampleCount % SamplesPerFrame) != 0)
{
headerSize = 2;
}
return sampleCount % SamplesPerFrame + NibblesPerFrame * (sampleCount / SamplesPerFrame) + headerSize;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short Saturate(int value)
{
if (value > short.MaxValue)
value = short.MaxValue;
if (value < short.MinValue)
value = short.MinValue;
return (short)value;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short GetCoefficientAtIndex(ReadOnlySpan<short> coefficients, int index)
{
if ((uint)index > (uint)coefficients.Length)
{
Logger.Error?.Print(LogClass.AudioRenderer, $"Out of bound read for coefficient at index {index}");
return 0;
}
return coefficients[index];
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int Decode(Span<short> output, ReadOnlySpan<byte> input, int startSampleOffset, int endSampleOffset, int offset, int count, ReadOnlySpan<short> coefficients, ref AdpcmLoopContext loopContext)
{
if (input.IsEmpty || endSampleOffset < startSampleOffset)
{
return 0;
}
byte predScale = (byte)loopContext.PredScale;
byte scale = (byte)(predScale & 0xF);
byte coefficientIndex = (byte)((predScale >> 4) & 0xF);
short history0 = loopContext.History0;
short history1 = loopContext.History1;
short coefficient0 = GetCoefficientAtIndex(coefficients, coefficientIndex * 2 + 0);
short coefficient1 = GetCoefficientAtIndex(coefficients, coefficientIndex * 2 + 1);
int decodedCount = Math.Min(count, endSampleOffset - startSampleOffset - offset);
int nibbles = GetNibblesFromSampleCount(offset + startSampleOffset);
int remaining = decodedCount;
int outputBufferIndex = 0;
int inputIndex = 0;
ReadOnlySpan<byte> targetInput;
targetInput = input.Slice(nibbles / 2);
while (remaining > 0)
{
int samplesCount;
if (((uint)nibbles % NibblesPerFrame) == 0)
{
predScale = targetInput[inputIndex++];
scale = (byte)(predScale & 0xF);
coefficientIndex = (byte)((predScale >> 4) & 0xF);
coefficient0 = GetCoefficientAtIndex(coefficients, coefficientIndex * 2);
coefficient1 = GetCoefficientAtIndex(coefficients, coefficientIndex * 2 + 1);
nibbles += 2;
samplesCount = Math.Min(remaining, SamplesPerFrame);
}
else
{
samplesCount = 1;
}
int scaleFixedPoint = FixedPointHelper.ToFixed(1.0f, FixedPointPrecision) << scale;
if (samplesCount < SamplesPerFrame)
{
for (int i = 0; i < samplesCount; i++)
{
int value = targetInput[inputIndex];
int sample;
if ((nibbles & 1) != 0)
{
sample = (value << 28) >> 28;
inputIndex++;
}
else
{
sample = (value << 24) >> 28;
}
nibbles++;
int prediction = coefficient0 * history0 + coefficient1 * history1;
sample = FixedPointHelper.RoundUpAndToInt(sample * scaleFixedPoint + prediction, FixedPointPrecision);
short saturatedSample = Saturate(sample);
history1 = history0;
history0 = saturatedSample;
output[outputBufferIndex++] = saturatedSample;
remaining--;
}
}
else
{
for (int i = 0; i < SamplesPerFrame / 2; i++)
{
int value = targetInput[inputIndex];
int sample0;
int sample1;
sample0 = (value << 24) >> 28;
sample1 = (value << 28) >> 28;
inputIndex++;
int prediction0 = coefficient0 * history0 + coefficient1 * history1;
sample0 = FixedPointHelper.RoundUpAndToInt(sample0 * scaleFixedPoint + prediction0, FixedPointPrecision);
short saturatedSample0 = Saturate(sample0);
int prediction1 = coefficient0 * saturatedSample0 + coefficient1 * history0;
sample1 = FixedPointHelper.RoundUpAndToInt(sample1 * scaleFixedPoint + prediction1, FixedPointPrecision);
short saturatedSample1 = Saturate(sample1);
history1 = saturatedSample0;
history0 = saturatedSample1;
output[outputBufferIndex++] = saturatedSample0;
output[outputBufferIndex++] = saturatedSample1;
}
nibbles += SamplesPerFrame;
remaining -= SamplesPerFrame;
}
}
loopContext.PredScale = predScale;
loopContext.History0 = history0;
loopContext.History1 = history1;
return decodedCount;
}
}
}

View file

@ -0,0 +1,276 @@
using Ryujinx.Audio.Integration;
using Ryujinx.Audio.Renderer.Dsp.Command;
using Ryujinx.Audio.Renderer.Utils;
using Ryujinx.Common;
using Ryujinx.Common.Logging;
using System;
using System.Threading;
namespace Ryujinx.Audio.Renderer.Dsp
{
public class AudioProcessor : IDisposable
{
private const int MaxBufferedFrames = 5;
private const int TargetBufferedFrames = 3;
private enum MailboxMessage : uint
{
Start,
Stop,
RenderStart,
RenderEnd
}
private class RendererSession
{
public CommandList CommandList;
public int RenderingLimit;
public ulong AppletResourceId;
}
private Mailbox<MailboxMessage> _mailbox;
private RendererSession[] _sessionCommandList;
private Thread _workerThread;
public IHardwareDevice[] OutputDevices { get; private set; }
private long _lastTime;
private long _playbackEnds;
private ManualResetEvent _event;
private ManualResetEvent _pauseEvent;
public AudioProcessor()
{
_event = new ManualResetEvent(false);
}
private static uint GetHardwareChannelCount(IHardwareDeviceDriver deviceDriver)
{
// Get the real device driver (In case the compat layer is on top of it).
deviceDriver = deviceDriver.GetRealDeviceDriver();
if (deviceDriver.SupportsChannelCount(6))
{
return 6;
}
else
{
// NOTE: We default to stereo as this will get downmixed to mono by the compat layer if it's not compatible.
return 2;
}
}
public void Start(IHardwareDeviceDriver deviceDriver, float volume)
{
OutputDevices = new IHardwareDevice[Constants.AudioRendererSessionCountMax];
// TODO: Before enabling this, we need up-mixing from stereo to 5.1.
// uint channelCount = GetHardwareChannelCount(deviceDriver);
uint channelCount = 2;
for (int i = 0; i < OutputDevices.Length; i++)
{
// TODO: Don't hardcode sample rate.
OutputDevices[i] = new HardwareDeviceImpl(deviceDriver, channelCount, Constants.TargetSampleRate, volume);
}
_mailbox = new Mailbox<MailboxMessage>();
_sessionCommandList = new RendererSession[Constants.AudioRendererSessionCountMax];
_event.Reset();
_lastTime = PerformanceCounter.ElapsedNanoseconds;
_pauseEvent = deviceDriver.GetPauseEvent();
StartThread();
_mailbox.SendMessage(MailboxMessage.Start);
if (_mailbox.ReceiveResponse() != MailboxMessage.Start)
{
throw new InvalidOperationException("Audio Processor Start response was invalid!");
}
}
public void Stop()
{
_mailbox.SendMessage(MailboxMessage.Stop);
if (_mailbox.ReceiveResponse() != MailboxMessage.Stop)
{
throw new InvalidOperationException("Audio Processor Stop response was invalid!");
}
foreach (IHardwareDevice device in OutputDevices)
{
device.Dispose();
}
}
public void Send(int sessionId, CommandList commands, int renderingLimit, ulong appletResourceId)
{
_sessionCommandList[sessionId] = new RendererSession
{
CommandList = commands,
RenderingLimit = renderingLimit,
AppletResourceId = appletResourceId
};
}
public bool HasRemainingCommands(int sessionId)
{
return _sessionCommandList[sessionId] != null;
}
public void Signal()
{
_mailbox.SendMessage(MailboxMessage.RenderStart);
}
public void Wait()
{
if (_mailbox.ReceiveResponse() != MailboxMessage.RenderEnd)
{
throw new InvalidOperationException("Audio Processor Wait response was invalid!");
}
long increment = Constants.AudioProcessorMaxUpdateTimeTarget;
long timeNow = PerformanceCounter.ElapsedNanoseconds;
if (timeNow > _playbackEnds)
{
// Playback has restarted.
_playbackEnds = timeNow;
}
_playbackEnds += increment;
// The number of frames we are behind where the timer says we should be.
long framesBehind = (timeNow - _lastTime) / increment;
// The number of frames yet to play on the backend.
long bufferedFrames = (_playbackEnds - timeNow) / increment + framesBehind;
// If we've entered a situation where a lot of buffers will be queued on the backend,
// Skip some audio frames so that playback can catch up.
if (bufferedFrames > MaxBufferedFrames)
{
// Skip a few frames so that we're not too far behind. (the target number of frames)
_lastTime += increment * (bufferedFrames - TargetBufferedFrames);
}
while (timeNow < _lastTime + increment)
{
_event.WaitOne(1);
timeNow = PerformanceCounter.ElapsedNanoseconds;
}
_lastTime += increment;
}
private void StartThread()
{
_workerThread = new Thread(Work)
{
Name = "AudioProcessor.Worker"
};
_workerThread.Start();
}
private void Work()
{
if (_mailbox.ReceiveMessage() != MailboxMessage.Start)
{
throw new InvalidOperationException("Audio Processor Start message was invalid!");
}
_mailbox.SendResponse(MailboxMessage.Start);
_mailbox.SendResponse(MailboxMessage.RenderEnd);
Logger.Info?.Print(LogClass.AudioRenderer, "Starting audio processor");
while (true)
{
_pauseEvent?.WaitOne();
MailboxMessage message = _mailbox.ReceiveMessage();
if (message == MailboxMessage.Stop)
{
break;
}
if (message == MailboxMessage.RenderStart)
{
long startTicks = PerformanceCounter.ElapsedNanoseconds;
for (int i = 0; i < _sessionCommandList.Length; i++)
{
if (_sessionCommandList[i] != null)
{
_sessionCommandList[i].CommandList.Process(OutputDevices[i]);
_sessionCommandList[i].CommandList.Dispose();
_sessionCommandList[i] = null;
}
}
long endTicks = PerformanceCounter.ElapsedNanoseconds;
long elapsedTime = endTicks - startTicks;
if (Constants.AudioProcessorMaxUpdateTime < elapsedTime)
{
Logger.Debug?.Print(LogClass.AudioRenderer, $"DSP too slow (exceeded by {elapsedTime - Constants.AudioProcessorMaxUpdateTime}ns)");
}
_mailbox.SendResponse(MailboxMessage.RenderEnd);
}
}
Logger.Info?.Print(LogClass.AudioRenderer, "Stopping audio processor");
_mailbox.SendResponse(MailboxMessage.Stop);
}
public float GetVolume()
{
if (OutputDevices != null)
{
foreach (IHardwareDevice outputDevice in OutputDevices)
{
if (outputDevice != null)
{
return outputDevice.GetVolume();
}
}
}
return 0f;
}
public void SetVolume(float volume)
{
if (OutputDevices != null)
{
foreach (IHardwareDevice outputDevice in OutputDevices)
{
outputDevice?.SetVolume(volume);
}
}
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_event.Dispose();
}
}
}
}

View file

@ -0,0 +1,83 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter;
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp
{
public static class BiquadFilterHelper
{
private const int FixedPointPrecisionForParameter = 14;
/// <summary>
/// Apply a single biquad filter.
/// </summary>
/// <remarks>This is implemented with a direct form 2.</remarks>
/// <param name="parameter">The biquad filter parameter</param>
/// <param name="state">The biquad filter state</param>
/// <param name="outputBuffer">The output buffer to write the result</param>
/// <param name="inputBuffer">The input buffer to write the result</param>
/// <param name="sampleCount">The count of samples to process</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ProcessBiquadFilter(ref BiquadFilterParameter parameter, ref BiquadFilterState state, Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, uint sampleCount)
{
float a0 = FixedPointHelper.ToFloat(parameter.Numerator[0], FixedPointPrecisionForParameter);
float a1 = FixedPointHelper.ToFloat(parameter.Numerator[1], FixedPointPrecisionForParameter);
float a2 = FixedPointHelper.ToFloat(parameter.Numerator[2], FixedPointPrecisionForParameter);
float b1 = FixedPointHelper.ToFloat(parameter.Denominator[0], FixedPointPrecisionForParameter);
float b2 = FixedPointHelper.ToFloat(parameter.Denominator[1], FixedPointPrecisionForParameter);
for (int i = 0; i < sampleCount; i++)
{
float input = inputBuffer[i];
float output = input * a0 + state.State0;
state.State0 = input * a1 + output * b1 + state.State1;
state.State1 = input * a2 + output * b2;
outputBuffer[i] = output;
}
}
/// <summary>
/// Apply multiple biquad filter.
/// </summary>
/// <remarks>This is implemented with a direct form 1.</remarks>
/// <param name="parameters">The biquad filter parameter</param>
/// <param name="states">The biquad filter state</param>
/// <param name="outputBuffer">The output buffer to write the result</param>
/// <param name="inputBuffer">The input buffer to write the result</param>
/// <param name="sampleCount">The count of samples to process</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ProcessBiquadFilter(ReadOnlySpan<BiquadFilterParameter> parameters, Span<BiquadFilterState> states, Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, uint sampleCount)
{
for (int stageIndex = 0; stageIndex < parameters.Length; stageIndex++)
{
BiquadFilterParameter parameter = parameters[stageIndex];
ref BiquadFilterState state = ref states[stageIndex];
float a0 = FixedPointHelper.ToFloat(parameter.Numerator[0], FixedPointPrecisionForParameter);
float a1 = FixedPointHelper.ToFloat(parameter.Numerator[1], FixedPointPrecisionForParameter);
float a2 = FixedPointHelper.ToFloat(parameter.Numerator[2], FixedPointPrecisionForParameter);
float b1 = FixedPointHelper.ToFloat(parameter.Denominator[0], FixedPointPrecisionForParameter);
float b2 = FixedPointHelper.ToFloat(parameter.Denominator[1], FixedPointPrecisionForParameter);
for (int i = 0; i < sampleCount; i++)
{
float input = inputBuffer[i];
float output = input * a0 + state.State0 * a1 + state.State1 * a2 + state.State2 * b1 + state.State3 * b2;
state.State1 = state.State0;
state.State0 = input;
state.State3 = state.State2;
state.State2 = output;
outputBuffer[i] = output;
}
}
}
}
}

View file

@ -0,0 +1,75 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class AdpcmDataSourceCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.AdpcmDataSourceVersion1;
public uint EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public ulong AdpcmParameter { get; }
public ulong AdpcmParameterSize { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public AdpcmDataSourceCommandVersion1(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
OutputBufferIndex = outputBufferIndex;
SampleRate = serverState.SampleRate;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(1);
}
AdpcmParameter = serverState.DataSourceStateAddressInfo.GetReference(true);
AdpcmParameterSize = serverState.DataSourceStateAddressInfo.Size;
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation
{
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat.Adpcm,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
ExtraParameter = AdpcmParameter,
ExtraParameterSize = AdpcmParameterSize,
ChannelIndex = 0,
ChannelCount = 1,
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, ref info, WaveBuffers, ref State.Span[0], context.SampleRate, (int)context.SampleCount);
}
}
}

View file

@ -0,0 +1,173 @@
using Ryujinx.Audio.Renderer.Common;
using Ryujinx.Memory;
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using static Ryujinx.Audio.Renderer.Dsp.State.AuxiliaryBufferHeader;
using CpuAddress = System.UInt64;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class AuxiliaryBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.AuxiliaryBuffer;
public uint EstimatedProcessingTime { get; set; }
public uint InputBufferIndex { get; }
public uint OutputBufferIndex { get; }
public AuxiliaryBufferAddresses BufferInfo { get; }
public CpuAddress InputBuffer { get; }
public CpuAddress OutputBuffer { get; }
public uint CountMax { get; }
public uint UpdateCount { get; }
public uint WriteOffset { get; }
public bool IsEffectEnabled { get; }
public AuxiliaryBufferCommand(uint bufferOffset, byte inputBufferOffset, byte outputBufferOffset,
ref AuxiliaryBufferAddresses sendBufferInfo, bool isEnabled, uint countMax,
CpuAddress outputBuffer, CpuAddress inputBuffer, uint updateCount, uint writeOffset, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = bufferOffset + inputBufferOffset;
OutputBufferIndex = bufferOffset + outputBufferOffset;
BufferInfo = sendBufferInfo;
InputBuffer = inputBuffer;
OutputBuffer = outputBuffer;
CountMax = countMax;
UpdateCount = updateCount;
WriteOffset = writeOffset;
IsEffectEnabled = isEnabled;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private uint Read(IVirtualMemoryManager memoryManager, ulong bufferAddress, uint countMax, Span<int> outBuffer, uint count, uint readOffset, uint updateCount)
{
if (countMax == 0 || bufferAddress == 0)
{
return 0;
}
uint targetReadOffset = readOffset + AuxiliaryBufferInfo.GetReadOffset(memoryManager, BufferInfo.ReturnBufferInfo);
if (targetReadOffset > countMax)
{
return 0;
}
uint remaining = count;
uint outBufferOffset = 0;
while (remaining != 0)
{
uint countToWrite = Math.Min(countMax - targetReadOffset, remaining);
memoryManager.Read(bufferAddress + targetReadOffset * sizeof(int), MemoryMarshal.Cast<int, byte>(outBuffer.Slice((int)outBufferOffset, (int)countToWrite)));
targetReadOffset = (targetReadOffset + countToWrite) % countMax;
remaining -= countToWrite;
outBufferOffset += countToWrite;
}
if (updateCount != 0)
{
uint newReadOffset = (AuxiliaryBufferInfo.GetReadOffset(memoryManager, BufferInfo.ReturnBufferInfo) + updateCount) % countMax;
AuxiliaryBufferInfo.SetReadOffset(memoryManager, BufferInfo.ReturnBufferInfo, newReadOffset);
}
return count;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private uint Write(IVirtualMemoryManager memoryManager, ulong outBufferAddress, uint countMax, ReadOnlySpan<int> buffer, uint count, uint writeOffset, uint updateCount)
{
if (countMax == 0 || outBufferAddress == 0)
{
return 0;
}
uint targetWriteOffset = writeOffset + AuxiliaryBufferInfo.GetWriteOffset(memoryManager, BufferInfo.SendBufferInfo);
if (targetWriteOffset > countMax)
{
return 0;
}
uint remaining = count;
uint inBufferOffset = 0;
while (remaining != 0)
{
uint countToWrite = Math.Min(countMax - targetWriteOffset, remaining);
memoryManager.Write(outBufferAddress + targetWriteOffset * sizeof(int), MemoryMarshal.Cast<int, byte>(buffer.Slice((int)inBufferOffset, (int)countToWrite)));
targetWriteOffset = (targetWriteOffset + countToWrite) % countMax;
remaining -= countToWrite;
inBufferOffset += countToWrite;
}
if (updateCount != 0)
{
uint newWriteOffset = (AuxiliaryBufferInfo.GetWriteOffset(memoryManager, BufferInfo.SendBufferInfo) + updateCount) % countMax;
AuxiliaryBufferInfo.SetWriteOffset(memoryManager, BufferInfo.SendBufferInfo, newWriteOffset);
}
return count;
}
public void Process(CommandList context)
{
Span<float> inputBuffer = context.GetBuffer((int)InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer((int)OutputBufferIndex);
if (IsEffectEnabled)
{
Span<int> inputBufferInt = MemoryMarshal.Cast<float, int>(inputBuffer);
Span<int> outputBufferInt = MemoryMarshal.Cast<float, int>(outputBuffer);
// Convert input data to the target format for user (int)
DataSourceHelper.ToInt(inputBufferInt, inputBuffer, inputBuffer.Length);
// Send the input to the user
Write(context.MemoryManager, OutputBuffer, CountMax, inputBufferInt, context.SampleCount, WriteOffset, UpdateCount);
// Convert back to float just in case it's reused
DataSourceHelper.ToFloat(inputBuffer, inputBufferInt, inputBuffer.Length);
// Retrieve the input from user
uint readResult = Read(context.MemoryManager, InputBuffer, CountMax, outputBufferInt, context.SampleCount, WriteOffset, UpdateCount);
// Convert the outputBuffer back to the target format of the renderer (float)
DataSourceHelper.ToFloat(outputBuffer, outputBufferInt, outputBuffer.Length);
if (readResult != context.SampleCount)
{
outputBuffer.Slice((int)readResult, (int)context.SampleCount - (int)readResult).Fill(0);
}
}
else
{
AuxiliaryBufferInfo.Reset(context.MemoryManager, BufferInfo.SendBufferInfo);
AuxiliaryBufferInfo.Reset(context.MemoryManager, BufferInfo.ReturnBufferInfo);
if (InputBufferIndex != OutputBufferIndex)
{
inputBuffer.CopyTo(outputBuffer);
}
}
}
}
}

View file

@ -0,0 +1,51 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class BiquadFilterCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.BiquadFilter;
public uint EstimatedProcessingTime { get; set; }
public Memory<BiquadFilterState> BiquadFilterState { get; }
public int InputBufferIndex { get; }
public int OutputBufferIndex { get; }
public bool NeedInitialization { get; }
private BiquadFilterParameter _parameter;
public BiquadFilterCommand(int baseIndex, ref BiquadFilterParameter filter, Memory<BiquadFilterState> biquadFilterStateMemory, int inputBufferOffset, int outputBufferOffset, bool needInitialization, int nodeId)
{
_parameter = filter;
BiquadFilterState = biquadFilterStateMemory;
InputBufferIndex = baseIndex + inputBufferOffset;
OutputBufferIndex = baseIndex + outputBufferOffset;
NeedInitialization = needInitialization;
Enabled = true;
NodeId = nodeId;
}
public void Process(CommandList context)
{
ref BiquadFilterState state = ref BiquadFilterState.Span[0];
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
if (NeedInitialization)
{
state = new BiquadFilterState();
}
BiquadFilterHelper.ProcessBiquadFilter(ref _parameter, ref state, outputBuffer, inputBuffer, context.SampleCount);
}
}
}

View file

@ -0,0 +1,136 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Memory;
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using static Ryujinx.Audio.Renderer.Dsp.State.AuxiliaryBufferHeader;
using CpuAddress = System.UInt64;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CaptureBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.CaptureBuffer;
public uint EstimatedProcessingTime { get; set; }
public uint InputBufferIndex { get; }
public ulong CpuBufferInfoAddress { get; }
public ulong DspBufferInfoAddress { get; }
public CpuAddress OutputBuffer { get; }
public uint CountMax { get; }
public uint UpdateCount { get; }
public uint WriteOffset { get; }
public bool IsEffectEnabled { get; }
public CaptureBufferCommand(uint bufferOffset, byte inputBufferOffset, ulong sendBufferInfo, bool isEnabled,
uint countMax, CpuAddress outputBuffer, uint updateCount, uint writeOffset, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = bufferOffset + inputBufferOffset;
CpuBufferInfoAddress = sendBufferInfo;
DspBufferInfoAddress = sendBufferInfo + (ulong)Unsafe.SizeOf<AuxiliaryBufferHeader>();
OutputBuffer = outputBuffer;
CountMax = countMax;
UpdateCount = updateCount;
WriteOffset = writeOffset;
IsEffectEnabled = isEnabled;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private uint Write(IVirtualMemoryManager memoryManager, ulong outBufferAddress, uint countMax, ReadOnlySpan<int> buffer, uint count, uint writeOffset, uint updateCount)
{
if (countMax == 0 || outBufferAddress == 0)
{
return 0;
}
uint targetWriteOffset = writeOffset + AuxiliaryBufferInfo.GetWriteOffset(memoryManager, DspBufferInfoAddress);
if (targetWriteOffset > countMax)
{
return 0;
}
uint remaining = count;
uint inBufferOffset = 0;
while (remaining != 0)
{
uint countToWrite = Math.Min(countMax - targetWriteOffset, remaining);
memoryManager.Write(outBufferAddress + targetWriteOffset * sizeof(int), MemoryMarshal.Cast<int, byte>(buffer.Slice((int)inBufferOffset, (int)countToWrite)));
targetWriteOffset = (targetWriteOffset + countToWrite) % countMax;
remaining -= countToWrite;
inBufferOffset += countToWrite;
}
if (updateCount != 0)
{
uint dspTotalSampleCount = AuxiliaryBufferInfo.GetTotalSampleCount(memoryManager, DspBufferInfoAddress);
uint cpuTotalSampleCount = AuxiliaryBufferInfo.GetTotalSampleCount(memoryManager, CpuBufferInfoAddress);
uint totalSampleCountDiff = dspTotalSampleCount - cpuTotalSampleCount;
if (totalSampleCountDiff >= countMax)
{
uint dspLostSampleCount = AuxiliaryBufferInfo.GetLostSampleCount(memoryManager, DspBufferInfoAddress);
uint cpuLostSampleCount = AuxiliaryBufferInfo.GetLostSampleCount(memoryManager, CpuBufferInfoAddress);
uint lostSampleCountDiff = dspLostSampleCount - cpuLostSampleCount;
uint newLostSampleCount = lostSampleCountDiff + updateCount;
if (lostSampleCountDiff > newLostSampleCount)
{
newLostSampleCount = cpuLostSampleCount - 1;
}
AuxiliaryBufferInfo.SetLostSampleCount(memoryManager, DspBufferInfoAddress, newLostSampleCount);
}
uint newWriteOffset = (AuxiliaryBufferInfo.GetWriteOffset(memoryManager, DspBufferInfoAddress) + updateCount) % countMax;
AuxiliaryBufferInfo.SetWriteOffset(memoryManager, DspBufferInfoAddress, newWriteOffset);
uint newTotalSampleCount = totalSampleCountDiff + newWriteOffset;
AuxiliaryBufferInfo.SetTotalSampleCount(memoryManager, DspBufferInfoAddress, newTotalSampleCount);
}
return count;
}
public void Process(CommandList context)
{
Span<float> inputBuffer = context.GetBuffer((int)InputBufferIndex);
if (IsEffectEnabled)
{
Span<int> inputBufferInt = MemoryMarshal.Cast<float, int>(inputBuffer);
// Convert input data to the target format for user (int)
DataSourceHelper.ToInt(inputBufferInt, inputBuffer, inputBuffer.Length);
// Send the input to the user
Write(context.MemoryManager, OutputBuffer, CountMax, inputBufferInt, context.SampleCount, WriteOffset, UpdateCount);
// Convert back to float
DataSourceHelper.ToFloat(inputBuffer, inputBufferInt, inputBuffer.Length);
}
else
{
AuxiliaryBufferInfo.Reset(context.MemoryManager, DspBufferInfoAddress);
}
}
}
}

View file

@ -0,0 +1,76 @@
using Ryujinx.Audio.Renderer.Parameter.Sink;
using Ryujinx.Audio.Renderer.Server.MemoryPool;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CircularBufferSinkCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.CircularBufferSink;
public uint EstimatedProcessingTime { get; set; }
public ushort[] Input { get; }
public uint InputCount { get; }
public ulong CircularBuffer { get; }
public ulong CircularBufferSize { get; }
public ulong CurrentOffset { get; }
public CircularBufferSinkCommand(uint bufferOffset, ref CircularBufferParameter parameter, ref AddressInfo circularBufferAddressInfo, uint currentOffset, int nodeId)
{
Enabled = true;
NodeId = nodeId;
Input = new ushort[Constants.ChannelCountMax];
InputCount = parameter.InputCount;
for (int i = 0; i < InputCount; i++)
{
Input[i] = (ushort)(bufferOffset + parameter.Input[i]);
}
CircularBuffer = circularBufferAddressInfo.GetReference(true);
CircularBufferSize = parameter.BufferSize;
CurrentOffset = currentOffset;
Debug.Assert(CircularBuffer != 0);
}
public void Process(CommandList context)
{
const int targetChannelCount = 2;
ulong currentOffset = CurrentOffset;
if (CircularBufferSize > 0)
{
for (int i = 0; i < InputCount; i++)
{
unsafe
{
float* inputBuffer = (float*)context.GetBufferPointer(Input[i]);
ulong targetOffset = CircularBuffer + currentOffset;
for (int y = 0; y < context.SampleCount; y++)
{
context.MemoryManager.Write(targetOffset + (ulong)y * targetChannelCount, PcmHelper.Saturate(inputBuffer[y]));
}
currentOffset += context.SampleCount * targetChannelCount;
if (currentOffset >= CircularBufferSize)
{
currentOffset = 0;
}
}
}
}
}
}
}

View file

@ -0,0 +1,24 @@
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class ClearMixBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.ClearMixBuffer;
public uint EstimatedProcessingTime { get; set; }
public ClearMixBufferCommand(int nodeId)
{
Enabled = true;
NodeId = nodeId;
}
public void Process(CommandList context)
{
context.ClearBuffers();
}
}
}

View file

@ -0,0 +1,155 @@
using Ryujinx.Audio.Integration;
using Ryujinx.Audio.Renderer.Server;
using Ryujinx.Common;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CommandList : IDisposable
{
public ulong StartTime { get; private set; }
public ulong EndTime { get; private set; }
public uint SampleCount { get; }
public uint SampleRate { get; }
public Memory<float> Buffers { get; }
public uint BufferCount { get; }
public List<ICommand> Commands { get; }
public IVirtualMemoryManager MemoryManager { get; }
public IHardwareDevice OutputDevice { get; private set; }
private readonly int _sampleCount;
private readonly int _buffersEntryCount;
private readonly MemoryHandle _buffersMemoryHandle;
public CommandList(AudioRenderSystem renderSystem) : this(renderSystem.MemoryManager,
renderSystem.GetMixBuffer(),
renderSystem.GetSampleCount(),
renderSystem.GetSampleRate(),
renderSystem.GetMixBufferCount(),
renderSystem.GetVoiceChannelCountMax())
{
}
public CommandList(IVirtualMemoryManager memoryManager, Memory<float> mixBuffer, uint sampleCount, uint sampleRate, uint mixBufferCount, uint voiceChannelCountMax)
{
SampleCount = sampleCount;
_sampleCount = (int)SampleCount;
SampleRate = sampleRate;
BufferCount = mixBufferCount + voiceChannelCountMax;
Buffers = mixBuffer;
Commands = new List<ICommand>();
MemoryManager = memoryManager;
_buffersEntryCount = Buffers.Length;
_buffersMemoryHandle = Buffers.Pin();
}
public void AddCommand(ICommand command)
{
Commands.Add(command);
}
public void AddCommand<T>(T command) where T : unmanaged, ICommand
{
throw new NotImplementedException();
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe IntPtr GetBufferPointer(int index)
{
if (index >= 0 && index < _buffersEntryCount)
{
return (IntPtr)((float*)_buffersMemoryHandle.Pointer + index * _sampleCount);
}
throw new ArgumentOutOfRangeException();
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe void ClearBuffer(int index)
{
Unsafe.InitBlock((void*)GetBufferPointer(index), 0, SampleCount);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe void ClearBuffers()
{
Unsafe.InitBlock(_buffersMemoryHandle.Pointer, 0, (uint)_buffersEntryCount * sizeof(float));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe void CopyBuffer(int outputBufferIndex, int inputBufferIndex)
{
Unsafe.CopyBlock((void*)GetBufferPointer(outputBufferIndex), (void*)GetBufferPointer(inputBufferIndex), SampleCount);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public Span<float> GetBuffer(int index)
{
if (index < 0 || index >= _buffersEntryCount)
{
return Span<float>.Empty;
}
unsafe
{
return new Span<float>((float*)_buffersMemoryHandle.Pointer + index * _sampleCount, _sampleCount);
}
}
public ulong GetTimeElapsedSinceDspStartedProcessing()
{
return (ulong)PerformanceCounter.ElapsedNanoseconds - StartTime;
}
public void Process(IHardwareDevice outputDevice)
{
OutputDevice = outputDevice;
StartTime = (ulong)PerformanceCounter.ElapsedNanoseconds;
foreach (ICommand command in Commands)
{
if (command.Enabled)
{
bool shouldMeter = command.ShouldMeter();
long startTime = 0;
if (shouldMeter)
{
startTime = PerformanceCounter.ElapsedNanoseconds;
}
command.Process(this);
if (shouldMeter)
{
ulong effectiveElapsedTime = (ulong)(PerformanceCounter.ElapsedNanoseconds - startTime);
if (effectiveElapsedTime > command.EstimatedProcessingTime)
{
Logger.Warning?.Print(LogClass.AudioRenderer, $"Command {command.GetType().Name} took {effectiveElapsedTime}ns (expected {command.EstimatedProcessingTime}ns)");
}
}
}
}
EndTime = (ulong)PerformanceCounter.ElapsedNanoseconds;
}
public void Dispose()
{
_buffersMemoryHandle.Dispose();
}
}
}

View file

@ -0,0 +1,37 @@
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public enum CommandType : byte
{
Invalid,
PcmInt16DataSourceVersion1,
PcmInt16DataSourceVersion2,
PcmFloatDataSourceVersion1,
PcmFloatDataSourceVersion2,
AdpcmDataSourceVersion1,
AdpcmDataSourceVersion2,
Volume,
VolumeRamp,
BiquadFilter,
Mix,
MixRamp,
MixRampGrouped,
DepopPrepare,
DepopForMixBuffers,
Delay,
Upsample,
DownMixSurroundToStereo,
AuxiliaryBuffer,
DeviceSink,
CircularBufferSink,
Reverb,
Reverb3d,
Performance,
ClearMixBuffer,
CopyMixBuffer,
LimiterVersion1,
LimiterVersion2,
GroupedBiquadFilter,
CaptureBuffer,
Compressor
}
}

View file

@ -0,0 +1,173 @@
using Ryujinx.Audio.Renderer.Dsp.Effect;
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CompressorCommand : ICommand
{
private const int FixedPointPrecision = 15;
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Compressor;
public uint EstimatedProcessingTime { get; set; }
public CompressorParameter Parameter => _parameter;
public Memory<CompressorState> State { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private CompressorParameter _parameter;
public CompressorCommand(uint bufferOffset, CompressorParameter parameter, Memory<CompressorState> state, bool isEnabled, int nodeId)
{
Enabled = true;
NodeId = nodeId;
_parameter = parameter;
State = state;
IsEffectEnabled = isEnabled;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < _parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + _parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + _parameter.Output[i]);
}
}
public void Process(CommandList context)
{
ref CompressorState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (_parameter.Status == Server.Effect.UsageState.Invalid)
{
state = new CompressorState(ref _parameter);
}
else if (_parameter.Status == Server.Effect.UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessCompressor(context, ref state);
}
private unsafe void ProcessCompressor(CommandList context, ref CompressorState state)
{
Debug.Assert(_parameter.IsChannelCountValid());
if (IsEffectEnabled && _parameter.IsChannelCountValid())
{
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<float> channelInput = stackalloc float[Parameter.ChannelCount];
ExponentialMovingAverage inputMovingAverage = state.InputMovingAverage;
float unknown4 = state.Unknown4;
ExponentialMovingAverage compressionGainAverage = state.CompressionGainAverage;
float previousCompressionEmaAlpha = state.PreviousCompressionEmaAlpha;
for (int i = 0; i < _parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
for (int sampleIndex = 0; sampleIndex < context.SampleCount; sampleIndex++)
{
for (int channelIndex = 0; channelIndex < _parameter.ChannelCount; channelIndex++)
{
channelInput[channelIndex] = *((float*)inputBuffers[channelIndex] + sampleIndex);
}
float newMean = inputMovingAverage.Update(FloatingPointHelper.MeanSquare(channelInput), _parameter.InputGain);
float y = FloatingPointHelper.Log10(newMean) * 10.0f;
float z = 0.0f;
bool unknown10OutOfRange = false;
if (newMean < 1.0e-10f)
{
z = 1.0f;
unknown10OutOfRange = state.Unknown10 < -100.0f;
}
if (y >= state.Unknown10 || unknown10OutOfRange)
{
float tmpGain;
if (y >= state.Unknown14)
{
tmpGain = ((1.0f / Parameter.Ratio) - 1.0f) * (y - Parameter.Threshold);
}
else
{
tmpGain = (y - state.Unknown10) * ((y - state.Unknown10) * -state.CompressorGainReduction);
}
z = FloatingPointHelper.DecibelToLinearExtended(tmpGain);
}
float unknown4New = z;
float compressionEmaAlpha;
if ((unknown4 - z) <= 0.08f)
{
compressionEmaAlpha = Parameter.ReleaseCoefficient;
if ((unknown4 - z) >= -0.08f)
{
if (MathF.Abs(compressionGainAverage.Read() - z) >= 0.001f)
{
unknown4New = unknown4;
}
compressionEmaAlpha = previousCompressionEmaAlpha;
}
}
else
{
compressionEmaAlpha = Parameter.AttackCoefficient;
}
float compressionGain = compressionGainAverage.Update(z, compressionEmaAlpha);
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
*((float*)outputBuffers[channelIndex] + sampleIndex) = channelInput[channelIndex] * compressionGain * state.OutputGain;
}
unknown4 = unknown4New;
previousCompressionEmaAlpha = compressionEmaAlpha;
}
state.InputMovingAverage = inputMovingAverage;
state.Unknown4 = unknown4;
state.CompressionGainAverage = compressionGainAverage;
state.PreviousCompressionEmaAlpha = previousCompressionEmaAlpha;
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
}
}

View file

@ -0,0 +1,30 @@
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CopyMixBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.CopyMixBuffer;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public CopyMixBufferCommand(uint inputBufferIndex, uint outputBufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)inputBufferIndex;
OutputBufferIndex = (ushort)outputBufferIndex;
}
public void Process(CommandList context)
{
context.CopyBuffer(OutputBufferIndex, InputBufferIndex);
}
}
}

View file

@ -0,0 +1,108 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DataSourceVersion2Command : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType { get; }
public uint EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public ulong ExtraParameter { get; }
public ulong ExtraParameterSize { get; }
public uint ChannelIndex { get; }
public uint ChannelCount { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public SampleFormat SampleFormat { get; }
public SampleRateConversionQuality SrcQuality { get; }
public DataSourceVersion2Command(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, ushort channelIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
ChannelIndex = channelIndex;
ChannelCount = serverState.ChannelsCount;
SampleFormat = serverState.SampleFormat;
SrcQuality = serverState.SrcQuality;
CommandType = GetCommandTypeBySampleFormat(SampleFormat);
OutputBufferIndex = (ushort)(channelIndex + outputBufferIndex);
SampleRate = serverState.SampleRate;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(2);
}
if (SampleFormat == SampleFormat.Adpcm)
{
ExtraParameter = serverState.DataSourceStateAddressInfo.GetReference(true);
ExtraParameterSize = serverState.DataSourceStateAddressInfo.Size;
}
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
private static CommandType GetCommandTypeBySampleFormat(SampleFormat sampleFormat)
{
switch (sampleFormat)
{
case SampleFormat.Adpcm:
return CommandType.AdpcmDataSourceVersion2;
case SampleFormat.PcmInt16:
return CommandType.PcmInt16DataSourceVersion2;
case SampleFormat.PcmFloat:
return CommandType.PcmFloatDataSourceVersion2;
default:
throw new NotImplementedException($"{sampleFormat}");
}
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation
{
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
ExtraParameter = ExtraParameter,
ExtraParameterSize = ExtraParameterSize,
ChannelIndex = (int)ChannelIndex,
ChannelCount = (int)ChannelCount,
SrcQuality = SrcQuality
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, ref info, WaveBuffers, ref State.Span[0], context.SampleRate, (int)context.SampleCount);
}
}
}

View file

@ -0,0 +1,280 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using Ryujinx.Audio.Renderer.Server.Effect;
using Ryujinx.Audio.Renderer.Utils.Math;
using System;
using System.Diagnostics;
using System.Numerics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DelayCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Delay;
public uint EstimatedProcessingTime { get; set; }
public DelayParameter Parameter => _parameter;
public Memory<DelayState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private DelayParameter _parameter;
private const int FixedPointPrecision = 14;
public DelayCommand(uint bufferOffset, DelayParameter parameter, Memory<DelayState> state, bool isEnabled, ulong workBuffer, int nodeId, bool newEffectChannelMappingSupported)
{
Enabled = true;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
IsEffectEnabled = isEnabled;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
DataSourceHelper.RemapLegacyChannelEffectMappingToChannelResourceMapping(newEffectChannelMappingSupported, InputBufferIndices);
DataSourceHelper.RemapLegacyChannelEffectMappingToChannelResourceMapping(newEffectChannelMappingSupported, OutputBufferIndices);
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelayMono(ref DelayState state, float* outputBuffer, float* inputBuffer, uint sampleCount)
{
const ushort channelCount = 1;
float feedbackGain = FixedPointHelper.ToFloat(Parameter.FeedbackGain, FixedPointPrecision);
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
for (int i = 0; i < sampleCount; i++)
{
float input = inputBuffer[i] * 64;
float delayLineValue = state.DelayLines[0].Read();
float temp = input * inGain + delayLineValue * feedbackGain;
state.UpdateLowPassFilter(ref temp, channelCount);
outputBuffer[i] = (input * dryGain + delayLineValue * outGain) / 64;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelayStereo(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
const ushort channelCount = 2;
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
Matrix2x2 delayFeedback = new Matrix2x2(delayFeedbackBaseGain, delayFeedbackCrossGain,
delayFeedbackCrossGain, delayFeedbackBaseGain);
for (int i = 0; i < sampleCount; i++)
{
Vector2 channelInput = new Vector2
{
X = *((float*)inputBuffers[0] + i) * 64,
Y = *((float*)inputBuffers[1] + i) * 64,
};
Vector2 delayLineValues = new Vector2()
{
X = state.DelayLines[0].Read(),
Y = state.DelayLines[1].Read(),
};
Vector2 temp = MatrixHelper.Transform(ref delayLineValues, ref delayFeedback) + channelInput * inGain;
state.UpdateLowPassFilter(ref Unsafe.As<Vector2, float>(ref temp), channelCount);
*((float*)outputBuffers[0] + i) = (channelInput.X * dryGain + delayLineValues.X * outGain) / 64;
*((float*)outputBuffers[1] + i) = (channelInput.Y * dryGain + delayLineValues.Y * outGain) / 64;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelayQuadraphonic(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
const ushort channelCount = 4;
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
Matrix4x4 delayFeedback = new Matrix4x4(delayFeedbackBaseGain, delayFeedbackCrossGain, delayFeedbackCrossGain, 0.0f,
delayFeedbackCrossGain, delayFeedbackBaseGain, 0.0f, delayFeedbackCrossGain,
delayFeedbackCrossGain, 0.0f, delayFeedbackBaseGain, delayFeedbackCrossGain,
0.0f, delayFeedbackCrossGain, delayFeedbackCrossGain, delayFeedbackBaseGain);
for (int i = 0; i < sampleCount; i++)
{
Vector4 channelInput = new Vector4
{
X = *((float*)inputBuffers[0] + i) * 64,
Y = *((float*)inputBuffers[1] + i) * 64,
Z = *((float*)inputBuffers[2] + i) * 64,
W = *((float*)inputBuffers[3] + i) * 64
};
Vector4 delayLineValues = new Vector4()
{
X = state.DelayLines[0].Read(),
Y = state.DelayLines[1].Read(),
Z = state.DelayLines[2].Read(),
W = state.DelayLines[3].Read()
};
Vector4 temp = MatrixHelper.Transform(ref delayLineValues, ref delayFeedback) + channelInput * inGain;
state.UpdateLowPassFilter(ref Unsafe.As<Vector4, float>(ref temp), channelCount);
*((float*)outputBuffers[0] + i) = (channelInput.X * dryGain + delayLineValues.X * outGain) / 64;
*((float*)outputBuffers[1] + i) = (channelInput.Y * dryGain + delayLineValues.Y * outGain) / 64;
*((float*)outputBuffers[2] + i) = (channelInput.Z * dryGain + delayLineValues.Z * outGain) / 64;
*((float*)outputBuffers[3] + i) = (channelInput.W * dryGain + delayLineValues.W * outGain) / 64;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelaySurround(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
const ushort channelCount = 6;
float feedbackGain = FixedPointHelper.ToFloat(Parameter.FeedbackGain, FixedPointPrecision);
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
Matrix6x6 delayFeedback = new Matrix6x6(delayFeedbackBaseGain, 0.0f, delayFeedbackCrossGain, 0.0f, delayFeedbackCrossGain, 0.0f,
0.0f, delayFeedbackBaseGain, delayFeedbackCrossGain, 0.0f, 0.0f, delayFeedbackCrossGain,
delayFeedbackCrossGain, delayFeedbackCrossGain, delayFeedbackBaseGain, 0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, feedbackGain, 0.0f, 0.0f,
delayFeedbackCrossGain, 0.0f, 0.0f, 0.0f, delayFeedbackBaseGain, delayFeedbackCrossGain,
0.0f, delayFeedbackCrossGain, 0.0f, 0.0f, delayFeedbackCrossGain, delayFeedbackBaseGain);
for (int i = 0; i < sampleCount; i++)
{
Vector6 channelInput = new Vector6
{
X = *((float*)inputBuffers[0] + i) * 64,
Y = *((float*)inputBuffers[1] + i) * 64,
Z = *((float*)inputBuffers[2] + i) * 64,
W = *((float*)inputBuffers[3] + i) * 64,
V = *((float*)inputBuffers[4] + i) * 64,
U = *((float*)inputBuffers[5] + i) * 64
};
Vector6 delayLineValues = new Vector6
{
X = state.DelayLines[0].Read(),
Y = state.DelayLines[1].Read(),
Z = state.DelayLines[2].Read(),
W = state.DelayLines[3].Read(),
V = state.DelayLines[4].Read(),
U = state.DelayLines[5].Read()
};
Vector6 temp = MatrixHelper.Transform(ref delayLineValues, ref delayFeedback) + channelInput * inGain;
state.UpdateLowPassFilter(ref Unsafe.As<Vector6, float>(ref temp), channelCount);
*((float*)outputBuffers[0] + i) = (channelInput.X * dryGain + delayLineValues.X * outGain) / 64;
*((float*)outputBuffers[1] + i) = (channelInput.Y * dryGain + delayLineValues.Y * outGain) / 64;
*((float*)outputBuffers[2] + i) = (channelInput.Z * dryGain + delayLineValues.Z * outGain) / 64;
*((float*)outputBuffers[3] + i) = (channelInput.W * dryGain + delayLineValues.W * outGain) / 64;
*((float*)outputBuffers[4] + i) = (channelInput.V * dryGain + delayLineValues.V * outGain) / 64;
*((float*)outputBuffers[5] + i) = (channelInput.U * dryGain + delayLineValues.U * outGain) / 64;
}
}
private unsafe void ProcessDelay(CommandList context, ref DelayState state)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
switch (Parameter.ChannelCount)
{
case 1:
ProcessDelayMono(ref state, (float*)outputBuffers[0], (float*)inputBuffers[0], context.SampleCount);
break;
case 2:
ProcessDelayStereo(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 4:
ProcessDelayQuadraphonic(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 6:
ProcessDelaySurround(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
default:
throw new NotImplementedException(Parameter.ChannelCount.ToString());
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
public void Process(CommandList context)
{
ref DelayState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.Status == UsageState.Invalid)
{
state = new DelayState(ref _parameter, WorkBuffer);
}
else if (Parameter.Status == UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessDelay(context, ref state);
}
}
}

View file

@ -0,0 +1,92 @@
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DepopForMixBuffersCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DepopForMixBuffers;
public uint EstimatedProcessingTime { get; set; }
public uint MixBufferOffset { get; }
public uint MixBufferCount { get; }
public float Decay { get; }
public Memory<float> DepopBuffer { get; }
public DepopForMixBuffersCommand(Memory<float> depopBuffer, uint bufferOffset, uint mixBufferCount, int nodeId, uint sampleRate)
{
Enabled = true;
NodeId = nodeId;
MixBufferOffset = bufferOffset;
MixBufferCount = mixBufferCount;
DepopBuffer = depopBuffer;
if (sampleRate == 48000)
{
Decay = 0.962189f;
}
else // if (sampleRate == 32000)
{
Decay = 0.943695f;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private unsafe float ProcessDepopMix(float* buffer, float depopValue, uint sampleCount)
{
if (depopValue < 0)
{
depopValue = -depopValue;
for (int i = 0; i < sampleCount; i++)
{
depopValue = FloatingPointHelper.MultiplyRoundDown(Decay, depopValue);
buffer[i] -= depopValue;
}
return -depopValue;
}
else
{
for (int i = 0; i < sampleCount; i++)
{
depopValue = FloatingPointHelper.MultiplyRoundDown(Decay, depopValue);
buffer[i] += depopValue;
}
return depopValue;
}
}
public void Process(CommandList context)
{
Span<float> depopBuffer = DepopBuffer.Span;
uint bufferCount = Math.Min(MixBufferOffset + MixBufferCount, context.BufferCount);
for (int i = (int)MixBufferOffset; i < bufferCount; i++)
{
float depopValue = depopBuffer[i];
if (depopValue != 0)
{
unsafe
{
float* buffer = (float*)context.GetBufferPointer(i);
depopBuffer[i] = ProcessDepopMix(buffer, depopValue, context.SampleCount);
}
}
}
}
}
}

View file

@ -0,0 +1,57 @@
using Ryujinx.Audio.Renderer.Common;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DepopPrepareCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DepopPrepare;
public uint EstimatedProcessingTime { get; set; }
public uint MixBufferCount { get; }
public ushort[] OutputBufferIndices { get; }
public Memory<VoiceUpdateState> State { get; }
public Memory<float> DepopBuffer { get; }
public DepopPrepareCommand(Memory<VoiceUpdateState> state, Memory<float> depopBuffer, uint mixBufferCount, uint bufferOffset, int nodeId, bool enabled)
{
Enabled = enabled;
NodeId = nodeId;
MixBufferCount = mixBufferCount;
OutputBufferIndices = new ushort[Constants.MixBufferCountMax];
for (int i = 0; i < Constants.MixBufferCountMax; i++)
{
OutputBufferIndices[i] = (ushort)(bufferOffset + i);
}
State = state;
DepopBuffer = depopBuffer;
}
public void Process(CommandList context)
{
ref VoiceUpdateState state = ref State.Span[0];
Span<float> depopBuffer = DepopBuffer.Span;
for (int i = 0; i < MixBufferCount; i++)
{
if (state.LastSamples[i] != 0)
{
depopBuffer[OutputBufferIndices[i]] += state.LastSamples[i];
state.LastSamples[i] = 0;
}
}
}
}
}

View file

@ -0,0 +1,91 @@
using Ryujinx.Audio.Integration;
using Ryujinx.Audio.Renderer.Server.Sink;
using System;
using System.Runtime.CompilerServices;
using System.Text;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DeviceSinkCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DeviceSink;
public uint EstimatedProcessingTime { get; set; }
public string DeviceName { get; }
public int SessionId { get; }
public uint InputCount { get; }
public ushort[] InputBufferIndices { get; }
public Memory<float> Buffers { get; }
public DeviceSinkCommand(uint bufferOffset, DeviceSink sink, int sessionId, Memory<float> buffers, int nodeId)
{
Enabled = true;
NodeId = nodeId;
DeviceName = Encoding.ASCII.GetString(sink.Parameter.DeviceName).TrimEnd('\0');
SessionId = sessionId;
InputCount = sink.Parameter.InputCount;
InputBufferIndices = new ushort[InputCount];
for (int i = 0; i < Math.Min(InputCount, Constants.ChannelCountMax); i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + sink.Parameter.Input[i]);
}
if (sink.UpsamplerState != null)
{
Buffers = sink.UpsamplerState.OutputBuffer;
}
else
{
Buffers = buffers;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private Span<float> GetBuffer(int index, int sampleCount)
{
return Buffers.Span.Slice(index * sampleCount, sampleCount);
}
public void Process(CommandList context)
{
IHardwareDevice device = context.OutputDevice;
if (device.GetSampleRate() == Constants.TargetSampleRate)
{
int channelCount = (int)device.GetChannelCount();
uint bufferCount = Math.Min(device.GetChannelCount(), InputCount);
const int sampleCount = Constants.TargetSampleCount;
short[] outputBuffer = new short[bufferCount * sampleCount];
for (int i = 0; i < bufferCount; i++)
{
ReadOnlySpan<float> inputBuffer = GetBuffer(InputBufferIndices[i], sampleCount);
for (int j = 0; j < sampleCount; j++)
{
outputBuffer[i + j * channelCount] = PcmHelper.Saturate(inputBuffer[j]);
}
}
device.AppendBuffer(outputBuffer, InputCount);
}
else
{
// TODO: support resampling for device only supporting something different
throw new NotImplementedException();
}
}
}
}

View file

@ -0,0 +1,68 @@
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DownMixSurroundToStereoCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DownMixSurroundToStereo;
public uint EstimatedProcessingTime { get; set; }
public ushort[] InputBufferIndices { get; }
public ushort[] OutputBufferIndices { get; }
public float[] Coefficients { get; }
public DownMixSurroundToStereoCommand(uint bufferOffset, Span<byte> inputBufferOffset, Span<byte> outputBufferOffset, float[] downMixParameter, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Constants.VoiceChannelCountMax; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + inputBufferOffset[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + outputBufferOffset[i]);
}
Coefficients = downMixParameter;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static float DownMixSurroundToStereo(ReadOnlySpan<float> coefficients, float back, float lfe, float center, float front)
{
return FloatingPointHelper.RoundUp(coefficients[3] * back + coefficients[2] * lfe + coefficients[1] * center + coefficients[0] * front);
}
public void Process(CommandList context)
{
ReadOnlySpan<float> frontLeft = context.GetBuffer(InputBufferIndices[0]);
ReadOnlySpan<float> frontRight = context.GetBuffer(InputBufferIndices[1]);
ReadOnlySpan<float> frontCenter = context.GetBuffer(InputBufferIndices[2]);
ReadOnlySpan<float> lowFrequency = context.GetBuffer(InputBufferIndices[3]);
ReadOnlySpan<float> backLeft = context.GetBuffer(InputBufferIndices[4]);
ReadOnlySpan<float> backRight = context.GetBuffer(InputBufferIndices[5]);
Span<float> stereoLeft = context.GetBuffer(OutputBufferIndices[0]);
Span<float> stereoRight = context.GetBuffer(OutputBufferIndices[1]);
for (int i = 0; i < context.SampleCount; i++)
{
stereoLeft[i] = DownMixSurroundToStereo(Coefficients, backLeft[i], lowFrequency[i], frontCenter[i], frontLeft[i]);
stereoRight[i] = DownMixSurroundToStereo(Coefficients, backRight[i], lowFrequency[i], frontCenter[i], frontRight[i]);
}
context.ClearBuffer(OutputBufferIndices[2]);
context.ClearBuffer(OutputBufferIndices[3]);
context.ClearBuffer(OutputBufferIndices[4]);
context.ClearBuffer(OutputBufferIndices[5]);
}
}
}

View file

@ -0,0 +1,62 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class GroupedBiquadFilterCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.GroupedBiquadFilter;
public uint EstimatedProcessingTime { get; set; }
private BiquadFilterParameter[] _parameters;
private Memory<BiquadFilterState> _biquadFilterStates;
private int _inputBufferIndex;
private int _outputBufferIndex;
private bool[] _isInitialized;
public GroupedBiquadFilterCommand(int baseIndex, ReadOnlySpan<BiquadFilterParameter> filters, Memory<BiquadFilterState> biquadFilterStateMemory, int inputBufferOffset, int outputBufferOffset, ReadOnlySpan<bool> isInitialized, int nodeId)
{
_parameters = filters.ToArray();
_biquadFilterStates = biquadFilterStateMemory;
_inputBufferIndex = baseIndex + inputBufferOffset;
_outputBufferIndex = baseIndex + outputBufferOffset;
_isInitialized = isInitialized.ToArray();
Enabled = true;
NodeId = nodeId;
}
public void Process(CommandList context)
{
Span<BiquadFilterState> states = _biquadFilterStates.Span;
ReadOnlySpan<float> inputBuffer = context.GetBuffer(_inputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(_outputBufferIndex);
for (int i = 0; i < _parameters.Length; i++)
{
if (!_isInitialized[i])
{
states[i] = new BiquadFilterState();
}
}
// NOTE: Nintendo only implement single and double biquad filters but no generic path when the command definition suggests it could be done.
// As such we currently only implement a generic path for simplicity for double biquad.
if (_parameters.Length == 1)
{
BiquadFilterHelper.ProcessBiquadFilter(ref _parameters[0], ref states[0], outputBuffer, inputBuffer, context.SampleCount);
}
else
{
BiquadFilterHelper.ProcessBiquadFilter(_parameters, states, outputBuffer, inputBuffer, context.SampleCount);
}
}
}
}

View file

@ -0,0 +1,20 @@
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public interface ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType { get; }
public uint EstimatedProcessingTime { get; }
public void Process(CommandList context);
public bool ShouldMeter()
{
return false;
}
}
}

View file

@ -0,0 +1,144 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class LimiterCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.LimiterVersion1;
public uint EstimatedProcessingTime { get; set; }
public LimiterParameter Parameter => _parameter;
public Memory<LimiterState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private LimiterParameter _parameter;
public LimiterCommandVersion1(uint bufferOffset, LimiterParameter parameter, Memory<LimiterState> state, bool isEnabled, ulong workBuffer, int nodeId)
{
Enabled = true;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
IsEffectEnabled = isEnabled;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
}
public void Process(CommandList context)
{
ref LimiterState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.Status == Server.Effect.UsageState.Invalid)
{
state = new LimiterState(ref _parameter, WorkBuffer);
}
else if (Parameter.Status == Server.Effect.UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessLimiter(context, ref state);
}
private unsafe void ProcessLimiter(CommandList context, ref LimiterState state)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
for (int sampleIndex = 0; sampleIndex < context.SampleCount; sampleIndex++)
{
float rawInputSample = *((float*)inputBuffers[channelIndex] + sampleIndex);
float inputSample = (rawInputSample / short.MaxValue) * Parameter.InputGain;
float sampleInputMax = Math.Abs(inputSample);
float inputCoefficient = Parameter.ReleaseCoefficient;
if (sampleInputMax > state.DetectorAverage[channelIndex].Read())
{
inputCoefficient = Parameter.AttackCoefficient;
}
float detectorValue = state.DetectorAverage[channelIndex].Update(sampleInputMax, inputCoefficient);
float attenuation = 1.0f;
if (detectorValue > Parameter.Threshold)
{
attenuation = Parameter.Threshold / detectorValue;
}
float outputCoefficient = Parameter.ReleaseCoefficient;
if (state.CompressionGainAverage[channelIndex].Read() > attenuation)
{
outputCoefficient = Parameter.AttackCoefficient;
}
float compressionGain = state.CompressionGainAverage[channelIndex].Update(attenuation, outputCoefficient);
ref float delayedSample = ref state.DelayedSampleBuffer[channelIndex * Parameter.DelayBufferSampleCountMax + state.DelayedSampleBufferPosition[channelIndex]];
float outputSample = delayedSample * compressionGain * Parameter.OutputGain;
*((float*)outputBuffers[channelIndex] + sampleIndex) = outputSample * short.MaxValue;
delayedSample = inputSample;
state.DelayedSampleBufferPosition[channelIndex]++;
while (state.DelayedSampleBufferPosition[channelIndex] >= Parameter.DelayBufferSampleCountMin)
{
state.DelayedSampleBufferPosition[channelIndex] -= Parameter.DelayBufferSampleCountMin;
}
}
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
}
}

View file

@ -0,0 +1,163 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class LimiterCommandVersion2 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.LimiterVersion2;
public uint EstimatedProcessingTime { get; set; }
public LimiterParameter Parameter => _parameter;
public Memory<LimiterState> State { get; }
public Memory<EffectResultState> ResultState { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private LimiterParameter _parameter;
public LimiterCommandVersion2(uint bufferOffset, LimiterParameter parameter, Memory<LimiterState> state, Memory<EffectResultState> resultState, bool isEnabled, ulong workBuffer, int nodeId)
{
Enabled = true;
NodeId = nodeId;
_parameter = parameter;
State = state;
ResultState = resultState;
WorkBuffer = workBuffer;
IsEffectEnabled = isEnabled;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
}
public void Process(CommandList context)
{
ref LimiterState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.Status == Server.Effect.UsageState.Invalid)
{
state = new LimiterState(ref _parameter, WorkBuffer);
}
else if (Parameter.Status == Server.Effect.UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessLimiter(context, ref state);
}
private unsafe void ProcessLimiter(CommandList context, ref LimiterState state)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
if (!ResultState.IsEmpty && Parameter.StatisticsReset)
{
ref LimiterStatistics statistics = ref MemoryMarshal.Cast<byte, LimiterStatistics>(ResultState.Span[0].SpecificData)[0];
statistics.Reset();
}
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
for (int sampleIndex = 0; sampleIndex < context.SampleCount; sampleIndex++)
{
float rawInputSample = *((float*)inputBuffers[channelIndex] + sampleIndex);
float inputSample = (rawInputSample / short.MaxValue) * Parameter.InputGain;
float sampleInputMax = Math.Abs(inputSample);
float inputCoefficient = Parameter.ReleaseCoefficient;
if (sampleInputMax > state.DetectorAverage[channelIndex].Read())
{
inputCoefficient = Parameter.AttackCoefficient;
}
float detectorValue = state.DetectorAverage[channelIndex].Update(sampleInputMax, inputCoefficient);
float attenuation = 1.0f;
if (detectorValue > Parameter.Threshold)
{
attenuation = Parameter.Threshold / detectorValue;
}
float outputCoefficient = Parameter.ReleaseCoefficient;
if (state.CompressionGainAverage[channelIndex].Read() > attenuation)
{
outputCoefficient = Parameter.AttackCoefficient;
}
float compressionGain = state.CompressionGainAverage[channelIndex].Update(attenuation, outputCoefficient);
ref float delayedSample = ref state.DelayedSampleBuffer[channelIndex * Parameter.DelayBufferSampleCountMax + state.DelayedSampleBufferPosition[channelIndex]];
float outputSample = delayedSample * compressionGain * Parameter.OutputGain;
*((float*)outputBuffers[channelIndex] + sampleIndex) = outputSample * short.MaxValue;
delayedSample = inputSample;
state.DelayedSampleBufferPosition[channelIndex]++;
while (state.DelayedSampleBufferPosition[channelIndex] >= Parameter.DelayBufferSampleCountMin)
{
state.DelayedSampleBufferPosition[channelIndex] -= Parameter.DelayBufferSampleCountMin;
}
if (!ResultState.IsEmpty)
{
ref LimiterStatistics statistics = ref MemoryMarshal.Cast<byte, LimiterStatistics>(ResultState.Span[0].SpecificData)[0];
statistics.InputMax[channelIndex] = Math.Max(statistics.InputMax[channelIndex], sampleInputMax);
statistics.CompressionGainMin[channelIndex] = Math.Min(statistics.CompressionGainMin[channelIndex], compressionGain);
}
}
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
}
}

View file

@ -0,0 +1,137 @@
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
using System.Runtime.Intrinsics.X86;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class MixCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Mix;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume { get; }
public MixCommand(uint inputBufferIndex, uint outputBufferIndex, int nodeId, float volume)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)inputBufferIndex;
OutputBufferIndex = (ushort)outputBufferIndex;
Volume = volume;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMixAvx(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
Vector256<float> volumeVec = Vector256.Create(Volume);
ReadOnlySpan<Vector256<float>> inputVec = MemoryMarshal.Cast<float, Vector256<float>>(inputMix);
Span<Vector256<float>> outputVec = MemoryMarshal.Cast<float, Vector256<float>>(outputMix);
int sisdStart = inputVec.Length * 8;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.Add(outputVec[i], Avx.Ceiling(Avx.Multiply(inputVec[i], volumeVec)));
}
for (int i = sisdStart; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMixSse41(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
Vector128<float> volumeVec = Vector128.Create(Volume);
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(inputMix);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(outputMix);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Sse.Add(outputVec[i], Sse41.Ceiling(Sse.Multiply(inputVec[i], volumeVec)));
}
for (int i = sisdStart; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMixAdvSimd(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
Vector128<float> volumeVec = Vector128.Create(Volume);
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(inputMix);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(outputMix);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = AdvSimd.Add(outputVec[i], AdvSimd.Ceiling(AdvSimd.Multiply(inputVec[i], volumeVec)));
}
for (int i = sisdStart; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMixSlowPath(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
for (int i = 0; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMix(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
if (Avx.IsSupported)
{
ProcessMixAvx(outputMix, inputMix);
}
else if (Sse41.IsSupported)
{
ProcessMixSse41(outputMix, inputMix);
}
else if (AdvSimd.IsSupported)
{
ProcessMixAdvSimd(outputMix, inputMix);
}
else
{
ProcessMixSlowPath(outputMix, inputMix);
}
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
ProcessMix(outputBuffer, inputBuffer);
}
}
}

View file

@ -0,0 +1,68 @@
using Ryujinx.Audio.Renderer.Common;
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class MixRampCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.MixRamp;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume0 { get; }
public float Volume1 { get; }
public Memory<VoiceUpdateState> State { get; }
public int LastSampleIndex { get; }
public MixRampCommand(float volume0, float volume1, uint inputBufferIndex, uint outputBufferIndex, int lastSampleIndex, Memory<VoiceUpdateState> state, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)inputBufferIndex;
OutputBufferIndex = (ushort)outputBufferIndex;
Volume0 = volume0;
Volume1 = volume1;
State = state;
LastSampleIndex = lastSampleIndex;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private float ProcessMixRamp(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, int sampleCount)
{
float ramp = (Volume1 - Volume0) / sampleCount;
float volume = Volume0;
float state = 0;
for (int i = 0; i < sampleCount; i++)
{
state = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], volume);
outputBuffer[i] += state;
volume += ramp;
}
return state;
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
State.Span[0].LastSamples[LastSampleIndex] = ProcessMixRamp(outputBuffer, inputBuffer, (int)context.SampleCount);
}
}
}

View file

@ -0,0 +1,91 @@
using Ryujinx.Audio.Renderer.Common;
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class MixRampGroupedCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.MixRampGrouped;
public uint EstimatedProcessingTime { get; set; }
public uint MixBufferCount { get; }
public ushort[] InputBufferIndices { get; }
public ushort[] OutputBufferIndices { get; }
public float[] Volume0 { get; }
public float[] Volume1 { get; }
public Memory<VoiceUpdateState> State { get; }
public MixRampGroupedCommand(uint mixBufferCount, uint inputBufferIndex, uint outputBufferIndex, Span<float> volume0, Span<float> volume1, Memory<VoiceUpdateState> state, int nodeId)
{
Enabled = true;
MixBufferCount = mixBufferCount;
NodeId = nodeId;
InputBufferIndices = new ushort[Constants.MixBufferCountMax];
OutputBufferIndices = new ushort[Constants.MixBufferCountMax];
Volume0 = new float[Constants.MixBufferCountMax];
Volume1 = new float[Constants.MixBufferCountMax];
for (int i = 0; i < mixBufferCount; i++)
{
InputBufferIndices[i] = (ushort)inputBufferIndex;
OutputBufferIndices[i] = (ushort)(outputBufferIndex + i);
Volume0[i] = volume0[i];
Volume1[i] = volume1[i];
}
State = state;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private float ProcessMixRampGrouped(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, float volume0, float volume1, int sampleCount)
{
float ramp = (volume1 - volume0) / sampleCount;
float volume = volume0;
float state = 0;
for (int i = 0; i < sampleCount; i++)
{
state = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], volume);
outputBuffer[i] += state;
volume += ramp;
}
return state;
}
public void Process(CommandList context)
{
for (int i = 0; i < MixBufferCount; i++)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndices[i]);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndices[i]);
float volume0 = Volume0[i];
float volume1 = Volume1[i];
ref VoiceUpdateState state = ref State.Span[0];
if (volume0 != 0 || volume1 != 0)
{
state.LastSamples[i] = ProcessMixRampGrouped(outputBuffer, inputBuffer, volume0, volume1, (int)context.SampleCount);
}
else
{
state.LastSamples[i] = 0;
}
}
}
}
}

View file

@ -0,0 +1,74 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class PcmFloatDataSourceCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.PcmFloatDataSourceVersion1;
public uint EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public uint ChannelIndex { get; }
public uint ChannelCount { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public PcmFloatDataSourceCommandVersion1(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, ushort channelIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
OutputBufferIndex = (ushort)(channelIndex + outputBufferIndex);
SampleRate = serverState.SampleRate;
ChannelIndex = channelIndex;
ChannelCount = serverState.ChannelsCount;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(1);
}
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation
{
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat.PcmFloat,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
ExtraParameter = 0,
ExtraParameterSize = 0,
ChannelIndex = (int)ChannelIndex,
ChannelCount = (int)ChannelCount,
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, ref info, WaveBuffers, ref State.Span[0], context.SampleRate, (int)context.SampleCount);
}
}
}

View file

@ -0,0 +1,74 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class PcmInt16DataSourceCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.PcmInt16DataSourceVersion1;
public uint EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public uint ChannelIndex { get; }
public uint ChannelCount { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public PcmInt16DataSourceCommandVersion1(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, ushort channelIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
OutputBufferIndex = (ushort)(channelIndex + outputBufferIndex);
SampleRate = serverState.SampleRate;
ChannelIndex = channelIndex;
ChannelCount = serverState.ChannelsCount;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(1);
}
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation
{
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat.PcmInt16,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
ExtraParameter = 0,
ExtraParameterSize = 0,
ChannelIndex = (int)ChannelIndex,
ChannelCount = (int)ChannelCount,
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, ref info, WaveBuffers, ref State.Span[0], context.SampleRate, (int)context.SampleCount);
}
}
}

View file

@ -0,0 +1,47 @@
using Ryujinx.Audio.Renderer.Server.Performance;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class PerformanceCommand : ICommand
{
public enum Type
{
Invalid,
Start,
End
}
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Performance;
public uint EstimatedProcessingTime { get; set; }
public PerformanceEntryAddresses PerformanceEntryAddresses { get; }
public Type PerformanceType { get; set; }
public PerformanceCommand(ref PerformanceEntryAddresses performanceEntryAddresses, Type performanceType, int nodeId)
{
Enabled = true;
PerformanceEntryAddresses = performanceEntryAddresses;
PerformanceType = performanceType;
NodeId = nodeId;
}
public void Process(CommandList context)
{
if (PerformanceType == Type.Start)
{
PerformanceEntryAddresses.SetStartTime(context.GetTimeElapsedSinceDspStartedProcessing());
}
else if (PerformanceType == Type.End)
{
PerformanceEntryAddresses.SetProcessingTime(context.GetTimeElapsedSinceDspStartedProcessing());
PerformanceEntryAddresses.IncrementEntryCount();
}
}
}
}

View file

@ -0,0 +1,254 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using Ryujinx.Audio.Renderer.Server.Effect;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class Reverb3dCommand : ICommand
{
private static readonly int[] OutputEarlyIndicesTableMono = new int[20] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
private static readonly int[] TargetEarlyDelayLineIndicesTableMono = new int[20] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableMono = new int[1] { 0 };
private static readonly int[] OutputEarlyIndicesTableStereo = new int[20] { 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1 };
private static readonly int[] TargetEarlyDelayLineIndicesTableStereo = new int[20] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableStereo = new int[2] { 0, 1 };
private static readonly int[] OutputEarlyIndicesTableQuadraphonic = new int[20] { 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 0, 0, 0, 0, 3, 3, 3 };
private static readonly int[] TargetEarlyDelayLineIndicesTableQuadraphonic = new int[20] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableQuadraphonic = new int[4] { 0, 1, 2, 3 };
private static readonly int[] OutputEarlyIndicesTableSurround = new int[40] { 4, 5, 0, 5, 0, 5, 1, 5, 1, 5, 1, 5, 1, 5, 2, 5, 2, 5, 2, 5, 1, 5, 1, 5, 1, 5, 0, 5, 0, 5, 0, 5, 0, 5, 3, 5, 3, 5, 3, 5 };
private static readonly int[] TargetEarlyDelayLineIndicesTableSurround = new int[40] { 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 16, 16, 17, 17, 18, 18, 19, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableSurround = new int[6] { 0, 1, 2, 3, -1, 3 };
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Reverb3d;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public Reverb3dParameter Parameter => _parameter;
public Memory<Reverb3dState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private Reverb3dParameter _parameter;
public Reverb3dCommand(uint bufferOffset, Reverb3dParameter parameter, Memory<Reverb3dState> state, bool isEnabled, ulong workBuffer, int nodeId, bool newEffectChannelMappingSupported)
{
Enabled = true;
IsEffectEnabled = isEnabled;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
// NOTE: We do the opposite as Nintendo here for now to restore previous behaviour
// TODO: Update reverb 3d processing and remove this to use RemapLegacyChannelEffectMappingToChannelResourceMapping.
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, InputBufferIndices);
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, OutputBufferIndices);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverb3dMono(ref Reverb3dState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(ref state, outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableMono, TargetEarlyDelayLineIndicesTableMono, TargetOutputFeedbackIndicesTableMono);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverb3dStereo(ref Reverb3dState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(ref state, outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableStereo, TargetEarlyDelayLineIndicesTableStereo, TargetOutputFeedbackIndicesTableStereo);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverb3dQuadraphonic(ref Reverb3dState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(ref state, outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableQuadraphonic, TargetEarlyDelayLineIndicesTableQuadraphonic, TargetOutputFeedbackIndicesTableQuadraphonic);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverb3dSurround(ref Reverb3dState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(ref state, outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableSurround, TargetEarlyDelayLineIndicesTableSurround, TargetOutputFeedbackIndicesTableSurround);
}
private unsafe void ProcessReverb3dGeneric(ref Reverb3dState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount, ReadOnlySpan<int> outputEarlyIndicesTable, ReadOnlySpan<int> targetEarlyDelayLineIndicesTable, ReadOnlySpan<int> targetOutputFeedbackIndicesTable)
{
const int delayLineSampleIndexOffset = 1;
bool isMono = Parameter.ChannelCount == 1;
bool isSurround = Parameter.ChannelCount == 6;
Span<float> outputValues = stackalloc float[Constants.ChannelCountMax];
Span<float> channelInput = stackalloc float[Parameter.ChannelCount];
Span<float> feedbackValues = stackalloc float[4];
Span<float> feedbackOutputValues = stackalloc float[4];
Span<float> values = stackalloc float[4];
for (int sampleIndex = 0; sampleIndex < sampleCount; sampleIndex++)
{
outputValues.Fill(0);
float tapOut = state.PreDelayLine.TapUnsafe(state.ReflectionDelayTime, delayLineSampleIndexOffset);
for (int i = 0; i < targetEarlyDelayLineIndicesTable.Length; i++)
{
int earlyDelayIndex = targetEarlyDelayLineIndicesTable[i];
int outputIndex = outputEarlyIndicesTable[i];
float tempTapOut = state.PreDelayLine.TapUnsafe(state.EarlyDelayTime[earlyDelayIndex], delayLineSampleIndexOffset);
outputValues[outputIndex] += tempTapOut * state.EarlyGain[earlyDelayIndex];
}
float targetPreDelayValue = 0;
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
channelInput[channelIndex] = *((float*)inputBuffers[channelIndex] + sampleIndex);
targetPreDelayValue += channelInput[channelIndex];
}
for (int i = 0; i < Parameter.ChannelCount; i++)
{
outputValues[i] *= state.EarlyReflectionsGain;
}
state.PreviousPreDelayValue = (targetPreDelayValue * state.TargetPreDelayGain) + (state.PreviousPreDelayValue * state.PreviousPreDelayGain);
state.PreDelayLine.Update(state.PreviousPreDelayValue);
for (int i = 0; i < state.FdnDelayLines.Length; i++)
{
float fdnValue = state.FdnDelayLines[i].Read();
float feedbackOutputValue = fdnValue * state.DecayDirectFdnGain[i] + state.PreviousFeedbackOutputDecayed[i];
state.PreviousFeedbackOutputDecayed[i] = (fdnValue * state.DecayCurrentFdnGain[i]) + (feedbackOutputValue * state.DecayCurrentOutputGain[i]);
feedbackOutputValues[i] = feedbackOutputValue;
}
feedbackValues[0] = feedbackOutputValues[2] + feedbackOutputValues[1];
feedbackValues[1] = -feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[2] = feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[3] = feedbackOutputValues[1] - feedbackOutputValues[2];
for (int i = 0; i < state.DecayDelays1.Length; i++)
{
float temp = state.DecayDelays1[i].Update(tapOut * state.LateReverbGain + feedbackValues[i]);
values[i] = state.DecayDelays2[i].Update(temp);
state.FdnDelayLines[i].Update(values[i]);
}
for (int channelIndex = 0; channelIndex < targetOutputFeedbackIndicesTable.Length; channelIndex++)
{
int targetOutputFeedbackIndex = targetOutputFeedbackIndicesTable[channelIndex];
if (targetOutputFeedbackIndex >= 0)
{
*((float*)outputBuffers[channelIndex] + sampleIndex) = (outputValues[channelIndex] + values[targetOutputFeedbackIndex] + channelInput[channelIndex] * state.DryGain);
}
}
if (isMono)
{
*((float*)outputBuffers[0] + sampleIndex) += values[1];
}
if (isSurround)
{
*((float*)outputBuffers[4] + sampleIndex) += (outputValues[4] + state.FrontCenterDelayLine.Update((values[2] - values[3]) * 0.5f) + channelInput[4] * state.DryGain);
}
}
}
public void ProcessReverb3d(CommandList context, ref Reverb3dState state)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
switch (Parameter.ChannelCount)
{
case 1:
ProcessReverb3dMono(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 2:
ProcessReverb3dStereo(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 4:
ProcessReverb3dQuadraphonic(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 6:
ProcessReverb3dSurround(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
default:
throw new NotImplementedException(Parameter.ChannelCount.ToString());
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
public void Process(CommandList context)
{
ref Reverb3dState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.ParameterStatus == UsageState.Invalid)
{
state = new Reverb3dState(ref _parameter, WorkBuffer);
}
else if (Parameter.ParameterStatus == UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessReverb3d(context, ref state);
}
}
}

View file

@ -0,0 +1,279 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class ReverbCommand : ICommand
{
private static readonly int[] OutputEarlyIndicesTableMono = new int[10] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
private static readonly int[] TargetEarlyDelayLineIndicesTableMono = new int[10] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private static readonly int[] OutputIndicesTableMono = new int[4] { 0, 0, 0, 0 };
private static readonly int[] TargetOutputFeedbackIndicesTableMono = new int[4] { 0, 1, 2, 3 };
private static readonly int[] OutputEarlyIndicesTableStereo = new int[10] { 0, 0, 1, 1, 0, 1, 0, 0, 1, 1 };
private static readonly int[] TargetEarlyDelayLineIndicesTableStereo = new int[10] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private static readonly int[] OutputIndicesTableStereo = new int[4] { 0, 0, 1, 1 };
private static readonly int[] TargetOutputFeedbackIndicesTableStereo = new int[4] { 2, 0, 3, 1 };
private static readonly int[] OutputEarlyIndicesTableQuadraphonic = new int[10] { 0, 0, 1, 1, 0, 1, 2, 2, 3, 3 };
private static readonly int[] TargetEarlyDelayLineIndicesTableQuadraphonic = new int[10] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private static readonly int[] OutputIndicesTableQuadraphonic = new int[4] { 0, 1, 2, 3 };
private static readonly int[] TargetOutputFeedbackIndicesTableQuadraphonic = new int[4] { 0, 1, 2, 3 };
private static readonly int[] OutputEarlyIndicesTableSurround = new int[20] { 0, 5, 0, 5, 1, 5, 1, 5, 4, 5, 4, 5, 2, 5, 2, 5, 3, 5, 3, 5 };
private static readonly int[] TargetEarlyDelayLineIndicesTableSurround = new int[20] { 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9 };
private static readonly int[] OutputIndicesTableSurround = new int[Constants.ChannelCountMax] { 0, 1, 2, 3, 4, 5 };
private static readonly int[] TargetOutputFeedbackIndicesTableSurround = new int[Constants.ChannelCountMax] { 0, 1, 2, 3, -1, 3 };
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Reverb;
public uint EstimatedProcessingTime { get; set; }
public ReverbParameter Parameter => _parameter;
public Memory<ReverbState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsLongSizePreDelaySupported { get; }
public bool IsEffectEnabled { get; }
private ReverbParameter _parameter;
private const int FixedPointPrecision = 14;
public ReverbCommand(uint bufferOffset, ReverbParameter parameter, Memory<ReverbState> state, bool isEnabled, ulong workBuffer, int nodeId, bool isLongSizePreDelaySupported, bool newEffectChannelMappingSupported)
{
Enabled = true;
IsEffectEnabled = isEnabled;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
IsLongSizePreDelaySupported = isLongSizePreDelaySupported;
// NOTE: We do the opposite as Nintendo here for now to restore previous behaviour
// TODO: Update reverb processing and remove this to use RemapLegacyChannelEffectMappingToChannelResourceMapping.
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, InputBufferIndices);
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, OutputBufferIndices);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverbMono(ref ReverbState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(ref state,
outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableMono,
TargetEarlyDelayLineIndicesTableMono,
TargetOutputFeedbackIndicesTableMono,
OutputIndicesTableMono);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverbStereo(ref ReverbState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(ref state,
outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableStereo,
TargetEarlyDelayLineIndicesTableStereo,
TargetOutputFeedbackIndicesTableStereo,
OutputIndicesTableStereo);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverbQuadraphonic(ref ReverbState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(ref state,
outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableQuadraphonic,
TargetEarlyDelayLineIndicesTableQuadraphonic,
TargetOutputFeedbackIndicesTableQuadraphonic,
OutputIndicesTableQuadraphonic);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverbSurround(ref ReverbState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(ref state,
outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableSurround,
TargetEarlyDelayLineIndicesTableSurround,
TargetOutputFeedbackIndicesTableSurround,
OutputIndicesTableSurround);
}
private unsafe void ProcessReverbGeneric(ref ReverbState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount, ReadOnlySpan<int> outputEarlyIndicesTable, ReadOnlySpan<int> targetEarlyDelayLineIndicesTable, ReadOnlySpan<int> targetOutputFeedbackIndicesTable, ReadOnlySpan<int> outputIndicesTable)
{
bool isSurround = Parameter.ChannelCount == 6;
float reverbGain = FixedPointHelper.ToFloat(Parameter.ReverbGain, FixedPointPrecision);
float lateGain = FixedPointHelper.ToFloat(Parameter.LateGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
Span<float> outputValues = stackalloc float[Constants.ChannelCountMax];
Span<float> feedbackValues = stackalloc float[4];
Span<float> feedbackOutputValues = stackalloc float[4];
Span<float> channelInput = stackalloc float[Parameter.ChannelCount];
for (int sampleIndex = 0; sampleIndex < sampleCount; sampleIndex++)
{
outputValues.Fill(0);
for (int i = 0; i < targetEarlyDelayLineIndicesTable.Length; i++)
{
int earlyDelayIndex = targetEarlyDelayLineIndicesTable[i];
int outputIndex = outputEarlyIndicesTable[i];
float tapOutput = state.PreDelayLine.TapUnsafe(state.EarlyDelayTime[earlyDelayIndex], 0);
outputValues[outputIndex] += tapOutput * state.EarlyGain[earlyDelayIndex];
}
if (isSurround)
{
outputValues[5] *= 0.2f;
}
float targetPreDelayValue = 0;
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
channelInput[channelIndex] = *((float*)inputBuffers[channelIndex] + sampleIndex) * 64;
targetPreDelayValue += channelInput[channelIndex] * reverbGain;
}
state.PreDelayLine.Update(targetPreDelayValue);
float lateValue = state.PreDelayLine.Tap(state.PreDelayLineDelayTime) * lateGain;
for (int i = 0; i < state.FdnDelayLines.Length; i++)
{
feedbackOutputValues[i] = state.FdnDelayLines[i].Read() * state.HighFrequencyDecayDirectGain[i] + state.PreviousFeedbackOutput[i] * state.HighFrequencyDecayPreviousGain[i];
state.PreviousFeedbackOutput[i] = feedbackOutputValues[i];
}
feedbackValues[0] = feedbackOutputValues[2] + feedbackOutputValues[1];
feedbackValues[1] = -feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[2] = feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[3] = feedbackOutputValues[1] - feedbackOutputValues[2];
for (int i = 0; i < state.FdnDelayLines.Length; i++)
{
feedbackOutputValues[i] = state.DecayDelays[i].Update(feedbackValues[i] + lateValue);
state.FdnDelayLines[i].Update(feedbackOutputValues[i]);
}
for (int i = 0; i < targetOutputFeedbackIndicesTable.Length; i++)
{
int targetOutputFeedbackIndex = targetOutputFeedbackIndicesTable[i];
int outputIndex = outputIndicesTable[i];
if (targetOutputFeedbackIndex >= 0)
{
outputValues[outputIndex] += feedbackOutputValues[targetOutputFeedbackIndex];
}
}
if (isSurround)
{
outputValues[4] += state.FrontCenterDelayLine.Update((feedbackOutputValues[2] - feedbackOutputValues[3]) * 0.5f);
}
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
*((float*)outputBuffers[channelIndex] + sampleIndex) = (outputValues[channelIndex] * outGain + channelInput[channelIndex] * dryGain) / 64;
}
}
}
private void ProcessReverb(CommandList context, ref ReverbState state)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
switch (Parameter.ChannelCount)
{
case 1:
ProcessReverbMono(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 2:
ProcessReverbStereo(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 4:
ProcessReverbQuadraphonic(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 6:
ProcessReverbSurround(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
default:
throw new NotImplementedException(Parameter.ChannelCount.ToString());
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
public void Process(CommandList context)
{
ref ReverbState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.Status == Server.Effect.UsageState.Invalid)
{
state = new ReverbState(ref _parameter, WorkBuffer, IsLongSizePreDelaySupported);
}
else if (Parameter.Status == Server.Effect.UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessReverb(context, ref state);
}
}
}

View file

@ -0,0 +1,70 @@
using Ryujinx.Audio.Renderer.Server.Upsampler;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class UpsampleCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Upsample;
public uint EstimatedProcessingTime { get; set; }
public uint BufferCount { get; }
public uint InputBufferIndex { get; }
public uint InputSampleCount { get; }
public uint InputSampleRate { get; }
public UpsamplerState UpsamplerInfo { get; }
public Memory<float> OutBuffer { get; }
public UpsampleCommand(uint bufferOffset, UpsamplerState info, uint inputCount, Span<byte> inputBufferOffset, uint bufferCount, uint sampleCount, uint sampleRate, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = 0;
OutBuffer = info.OutputBuffer;
BufferCount = bufferCount;
InputSampleCount = sampleCount;
InputSampleRate = sampleRate;
info.SourceSampleCount = inputCount;
info.InputBufferIndices = new ushort[inputCount];
for (int i = 0; i < inputCount; i++)
{
info.InputBufferIndices[i] = (ushort)(bufferOffset + inputBufferOffset[i]);
}
if (info.BufferStates?.Length != (int)inputCount)
{
// Keep state if possible.
info.BufferStates = new UpsamplerBufferState[(int)inputCount];
}
UpsamplerInfo = info;
}
private Span<float> GetBuffer(int index, int sampleCount)
{
return UpsamplerInfo.OutputBuffer.Span.Slice(index * sampleCount, sampleCount);
}
public void Process(CommandList context)
{
uint bufferCount = Math.Min(BufferCount, UpsamplerInfo.SourceSampleCount);
for (int i = 0; i < bufferCount; i++)
{
Span<float> inputBuffer = context.GetBuffer(UpsamplerInfo.InputBufferIndices[i]);
Span<float> outputBuffer = GetBuffer(UpsamplerInfo.InputBufferIndices[i], (int)UpsamplerInfo.SampleCount);
UpsamplerHelper.Upsample(outputBuffer, inputBuffer, (int)UpsamplerInfo.SampleCount, (int)InputSampleCount, ref UpsamplerInfo.BufferStates[i]);
}
}
}
}

View file

@ -0,0 +1,137 @@
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
using System.Runtime.Intrinsics.X86;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class VolumeCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Volume;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume { get; }
public VolumeCommand(float volume, uint bufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)bufferIndex;
OutputBufferIndex = (ushort)bufferIndex;
Volume = volume;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeAvx(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
Vector256<float> volumeVec = Vector256.Create(Volume);
ReadOnlySpan<Vector256<float>> inputVec = MemoryMarshal.Cast<float, Vector256<float>>(inputBuffer);
Span<Vector256<float>> outputVec = MemoryMarshal.Cast<float, Vector256<float>>(outputBuffer);
int sisdStart = inputVec.Length * 8;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.Ceiling(Avx.Multiply(inputVec[i], volumeVec));
}
for (int i = sisdStart; i < inputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeSse41(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
Vector128<float> volumeVec = Vector128.Create(Volume);
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(inputBuffer);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(outputBuffer);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Sse41.Ceiling(Sse.Multiply(inputVec[i], volumeVec));
}
for (int i = sisdStart; i < inputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeAdvSimd(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
Vector128<float> volumeVec = Vector128.Create(Volume);
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(inputBuffer);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(outputBuffer);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = AdvSimd.Ceiling(AdvSimd.Multiply(inputVec[i], volumeVec));
}
for (int i = sisdStart; i < inputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolume(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
if (Avx.IsSupported)
{
ProcessVolumeAvx(outputBuffer, inputBuffer);
}
else if (Sse41.IsSupported)
{
ProcessVolumeSse41(outputBuffer, inputBuffer);
}
else if (AdvSimd.IsSupported)
{
ProcessVolumeAdvSimd(outputBuffer, inputBuffer);
}
else
{
ProcessVolumeSlowPath(outputBuffer, inputBuffer);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeSlowPath(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
for (int i = 0; i < outputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
ProcessVolume(outputBuffer, inputBuffer);
}
}
}

View file

@ -0,0 +1,56 @@
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class VolumeRampCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.VolumeRamp;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume0 { get; }
public float Volume1 { get; }
public VolumeRampCommand(float volume0, float volume1, uint bufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)bufferIndex;
OutputBufferIndex = (ushort)bufferIndex;
Volume0 = volume0;
Volume1 = volume1;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeRamp(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, int sampleCount)
{
float ramp = (Volume1 - Volume0) / sampleCount;
float volume = Volume0;
for (int i = 0; i < sampleCount; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], volume);
volume += ramp;
}
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
ProcessVolumeRamp(outputBuffer, inputBuffer, (int)context.SampleCount);
}
}
}

View file

@ -0,0 +1,466 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
using System.Runtime.Intrinsics.X86;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp
{
public static class DataSourceHelper
{
private const int FixedPointPrecision = 15;
public struct WaveBufferInformation
{
public uint SourceSampleRate;
public float Pitch;
public ulong ExtraParameter;
public ulong ExtraParameterSize;
public int ChannelIndex;
public int ChannelCount;
public DecodingBehaviour DecodingBehaviour;
public SampleRateConversionQuality SrcQuality;
public SampleFormat SampleFormat;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetPitchLimitBySrcQuality(SampleRateConversionQuality quality)
{
return quality switch
{
SampleRateConversionQuality.Default or SampleRateConversionQuality.Low => 4,
SampleRateConversionQuality.High => 8,
_ => throw new ArgumentException(quality.ToString()),
};
}
public static void ProcessWaveBuffers(IVirtualMemoryManager memoryManager, Span<float> outputBuffer, ref WaveBufferInformation info, Span<WaveBuffer> wavebuffers, ref VoiceUpdateState voiceState, uint targetSampleRate, int sampleCount)
{
const int tempBufferSize = 0x3F00;
Span<short> tempBuffer = stackalloc short[tempBufferSize];
float sampleRateRatio = (float)info.SourceSampleRate / targetSampleRate * info.Pitch;
float fraction = voiceState.Fraction;
int waveBufferIndex = (int)voiceState.WaveBufferIndex;
ulong playedSampleCount = voiceState.PlayedSampleCount;
int offset = voiceState.Offset;
uint waveBufferConsumed = voiceState.WaveBufferConsumed;
int pitchMaxLength = GetPitchLimitBySrcQuality(info.SrcQuality);
int totalNeededSize = (int)MathF.Truncate(fraction + sampleRateRatio * sampleCount);
if (totalNeededSize + pitchMaxLength <= tempBufferSize && totalNeededSize >= 0)
{
int sourceSampleCountToProcess = sampleCount;
int maxSampleCountPerIteration = Math.Min((int)MathF.Truncate((tempBufferSize - fraction) / sampleRateRatio), sampleCount);
bool isStarving = false;
int i = 0;
while (i < sourceSampleCountToProcess)
{
int tempBufferIndex = 0;
if (!info.DecodingBehaviour.HasFlag(DecodingBehaviour.SkipPitchAndSampleRateConversion))
{
voiceState.Pitch.AsSpan().Slice(0, pitchMaxLength).CopyTo(tempBuffer);
tempBufferIndex += pitchMaxLength;
}
int sampleCountToProcess = Math.Min(sourceSampleCountToProcess, maxSampleCountPerIteration);
int y = 0;
int sampleCountToDecode = (int)MathF.Truncate(fraction + sampleRateRatio * sampleCountToProcess);
while (y < sampleCountToDecode)
{
if (waveBufferIndex >= Constants.VoiceWaveBufferCount)
{
waveBufferIndex = 0;
playedSampleCount = 0;
}
if (!voiceState.IsWaveBufferValid[waveBufferIndex])
{
isStarving = true;
break;
}
ref WaveBuffer waveBuffer = ref wavebuffers[waveBufferIndex];
if (offset == 0 && info.SampleFormat == SampleFormat.Adpcm && waveBuffer.Context != 0)
{
voiceState.LoopContext = memoryManager.Read<AdpcmLoopContext>(waveBuffer.Context);
}
Span<short> tempSpan = tempBuffer.Slice(tempBufferIndex + y);
int decodedSampleCount = -1;
int targetSampleStartOffset;
int targetSampleEndOffset;
if (voiceState.LoopCount > 0 && waveBuffer.LoopStartSampleOffset != 0 && waveBuffer.LoopEndSampleOffset != 0 && waveBuffer.LoopStartSampleOffset <= waveBuffer.LoopEndSampleOffset)
{
targetSampleStartOffset = (int)waveBuffer.LoopStartSampleOffset;
targetSampleEndOffset = (int)waveBuffer.LoopEndSampleOffset;
}
else
{
targetSampleStartOffset = (int)waveBuffer.StartSampleOffset;
targetSampleEndOffset = (int)waveBuffer.EndSampleOffset;
}
int targetWaveBufferSampleCount = targetSampleEndOffset - targetSampleStartOffset;
switch (info.SampleFormat)
{
case SampleFormat.Adpcm:
ReadOnlySpan<byte> waveBufferAdpcm = ReadOnlySpan<byte>.Empty;
if (waveBuffer.Buffer != 0 && waveBuffer.BufferSize != 0)
{
// TODO: we are possibly copying a lot of unneeded data here, we should only take what we need.
waveBufferAdpcm = memoryManager.GetSpan(waveBuffer.Buffer, (int)waveBuffer.BufferSize);
}
ReadOnlySpan<short> coefficients = MemoryMarshal.Cast<byte, short>(memoryManager.GetSpan(info.ExtraParameter, (int)info.ExtraParameterSize));
decodedSampleCount = AdpcmHelper.Decode(tempSpan, waveBufferAdpcm, targetSampleStartOffset, targetSampleEndOffset, offset, sampleCountToDecode - y, coefficients, ref voiceState.LoopContext);
break;
case SampleFormat.PcmInt16:
ReadOnlySpan<short> waveBufferPcm16 = ReadOnlySpan<short>.Empty;
if (waveBuffer.Buffer != 0 && waveBuffer.BufferSize != 0)
{
ulong bufferOffset = waveBuffer.Buffer + PcmHelper.GetBufferOffset<short>(targetSampleStartOffset, offset, info.ChannelCount);
int bufferSize = PcmHelper.GetBufferSize<short>(targetSampleStartOffset, targetSampleEndOffset, offset, sampleCountToDecode - y) * info.ChannelCount;
waveBufferPcm16 = MemoryMarshal.Cast<byte, short>(memoryManager.GetSpan(bufferOffset, bufferSize));
}
decodedSampleCount = PcmHelper.Decode(tempSpan, waveBufferPcm16, targetSampleStartOffset, targetSampleEndOffset, info.ChannelIndex, info.ChannelCount);
break;
case SampleFormat.PcmFloat:
ReadOnlySpan<float> waveBufferPcmFloat = ReadOnlySpan<float>.Empty;
if (waveBuffer.Buffer != 0 && waveBuffer.BufferSize != 0)
{
ulong bufferOffset = waveBuffer.Buffer + PcmHelper.GetBufferOffset<float>(targetSampleStartOffset, offset, info.ChannelCount);
int bufferSize = PcmHelper.GetBufferSize<float>(targetSampleStartOffset, targetSampleEndOffset, offset, sampleCountToDecode - y) * info.ChannelCount;
waveBufferPcmFloat = MemoryMarshal.Cast<byte, float>(memoryManager.GetSpan(bufferOffset, bufferSize));
}
decodedSampleCount = PcmHelper.Decode(tempSpan, waveBufferPcmFloat, targetSampleStartOffset, targetSampleEndOffset, info.ChannelIndex, info.ChannelCount);
break;
default:
Logger.Error?.Print(LogClass.AudioRenderer, $"Unsupported sample format " + info.SampleFormat);
break;
}
Debug.Assert(decodedSampleCount <= sampleCountToDecode);
if (decodedSampleCount < 0)
{
Logger.Warning?.Print(LogClass.AudioRenderer, "Decoding failed, skipping WaveBuffer");
voiceState.MarkEndOfBufferWaveBufferProcessing(ref waveBuffer, ref waveBufferIndex, ref waveBufferConsumed, ref playedSampleCount);
decodedSampleCount = 0;
}
y += decodedSampleCount;
offset += decodedSampleCount;
playedSampleCount += (uint)decodedSampleCount;
if (offset >= targetWaveBufferSampleCount || decodedSampleCount == 0)
{
offset = 0;
if (waveBuffer.Looping)
{
voiceState.LoopCount++;
if (waveBuffer.LoopCount >= 0)
{
if (decodedSampleCount == 0 || voiceState.LoopCount > waveBuffer.LoopCount)
{
voiceState.MarkEndOfBufferWaveBufferProcessing(ref waveBuffer, ref waveBufferIndex, ref waveBufferConsumed, ref playedSampleCount);
}
}
if (decodedSampleCount == 0)
{
isStarving = true;
break;
}
if (info.DecodingBehaviour.HasFlag(DecodingBehaviour.PlayedSampleCountResetWhenLooping))
{
playedSampleCount = 0;
}
}
else
{
voiceState.MarkEndOfBufferWaveBufferProcessing(ref waveBuffer, ref waveBufferIndex, ref waveBufferConsumed, ref playedSampleCount);
}
}
}
Span<int> outputSpanInt = MemoryMarshal.Cast<float, int>(outputBuffer.Slice(i));
if (info.DecodingBehaviour.HasFlag(DecodingBehaviour.SkipPitchAndSampleRateConversion))
{
for (int j = 0; j < y; j++)
{
outputBuffer[j] = tempBuffer[j];
}
}
else
{
Span<short> tempSpan = tempBuffer.Slice(tempBufferIndex + y);
tempSpan.Slice(0, sampleCountToDecode - y).Fill(0);
ToFloat(outputBuffer, outputSpanInt, sampleCountToProcess);
ResamplerHelper.Resample(outputBuffer, tempBuffer, sampleRateRatio, ref fraction, sampleCountToProcess, info.SrcQuality, y != sourceSampleCountToProcess || info.Pitch != 1.0f);
tempBuffer.Slice(sampleCountToDecode, pitchMaxLength).CopyTo(voiceState.Pitch.AsSpan());
}
i += sampleCountToProcess;
}
Debug.Assert(sourceSampleCountToProcess == i || !isStarving);
voiceState.WaveBufferConsumed = waveBufferConsumed;
voiceState.Offset = offset;
voiceState.PlayedSampleCount = playedSampleCount;
voiceState.WaveBufferIndex = (uint)waveBufferIndex;
voiceState.Fraction = fraction;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static void ToFloatAvx(Span<float> output, ReadOnlySpan<int> input, int sampleCount)
{
ReadOnlySpan<Vector256<int>> inputVec = MemoryMarshal.Cast<int, Vector256<int>>(input);
Span<Vector256<float>> outputVec = MemoryMarshal.Cast<float, Vector256<float>>(output);
int sisdStart = inputVec.Length * 8;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.ConvertToVector256Single(inputVec[i]);
}
for (int i = sisdStart; i < sampleCount; i++)
{
output[i] = input[i];
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static void ToFloatSse2(Span<float> output, ReadOnlySpan<int> input, int sampleCount)
{
ReadOnlySpan<Vector128<int>> inputVec = MemoryMarshal.Cast<int, Vector128<int>>(input);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(output);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Sse2.ConvertToVector128Single(inputVec[i]);
}
for (int i = sisdStart; i < sampleCount; i++)
{
output[i] = input[i];
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static void ToFloatAdvSimd(Span<float> output, ReadOnlySpan<int> input, int sampleCount)
{
ReadOnlySpan<Vector128<int>> inputVec = MemoryMarshal.Cast<int, Vector128<int>>(input);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(output);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = AdvSimd.ConvertToSingle(inputVec[i]);
}
for (int i = sisdStart; i < sampleCount; i++)
{
output[i] = input[i];
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ToFloatSlow(Span<float> output, ReadOnlySpan<int> input, int sampleCount)
{
for (int i = 0; i < sampleCount; i++)
{
output[i] = input[i];
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ToFloat(Span<float> output, ReadOnlySpan<int> input, int sampleCount)
{
if (Avx.IsSupported)
{
ToFloatAvx(output, input, sampleCount);
}
else if (Sse2.IsSupported)
{
ToFloatSse2(output, input, sampleCount);
}
else if (AdvSimd.IsSupported)
{
ToFloatAdvSimd(output, input, sampleCount);
}
else
{
ToFloatSlow(output, input, sampleCount);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ToIntAvx(Span<int> output, ReadOnlySpan<float> input, int sampleCount)
{
ReadOnlySpan<Vector256<float>> inputVec = MemoryMarshal.Cast<float, Vector256<float>>(input);
Span<Vector256<int>> outputVec = MemoryMarshal.Cast<int, Vector256<int>>(output);
int sisdStart = inputVec.Length * 8;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.ConvertToVector256Int32(inputVec[i]);
}
for (int i = sisdStart; i < sampleCount; i++)
{
output[i] = (int)input[i];
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ToIntSse2(Span<int> output, ReadOnlySpan<float> input, int sampleCount)
{
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(input);
Span<Vector128<int>> outputVec = MemoryMarshal.Cast<int, Vector128<int>>(output);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Sse2.ConvertToVector128Int32(inputVec[i]);
}
for (int i = sisdStart; i < sampleCount; i++)
{
output[i] = (int)input[i];
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ToIntAdvSimd(Span<int> output, ReadOnlySpan<float> input, int sampleCount)
{
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(input);
Span<Vector128<int>> outputVec = MemoryMarshal.Cast<int, Vector128<int>>(output);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = AdvSimd.ConvertToInt32RoundToZero(inputVec[i]);
}
for (int i = sisdStart; i < sampleCount; i++)
{
output[i] = (int)input[i];
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ToIntSlow(Span<int> output, ReadOnlySpan<float> input, int sampleCount)
{
for (int i = 0; i < sampleCount; i++)
{
output[i] = (int)input[i];
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ToInt(Span<int> output, ReadOnlySpan<float> input, int sampleCount)
{
if (Avx.IsSupported)
{
ToIntAvx(output, input, sampleCount);
}
else if (Sse2.IsSupported)
{
ToIntSse2(output, input, sampleCount);
}
else if (AdvSimd.IsSupported)
{
ToIntAdvSimd(output, input, sampleCount);
}
else
{
ToIntSlow(output, input, sampleCount);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void RemapLegacyChannelEffectMappingToChannelResourceMapping(bool isSupported, Span<ushort> bufferIndices)
{
if (!isSupported && bufferIndices.Length == 6)
{
ushort backLeft = bufferIndices[2];
ushort backRight = bufferIndices[3];
ushort frontCenter = bufferIndices[4];
ushort lowFrequency = bufferIndices[5];
bufferIndices[2] = frontCenter;
bufferIndices[3] = lowFrequency;
bufferIndices[4] = backLeft;
bufferIndices[5] = backRight;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void RemapChannelResourceMappingToLegacy(bool isSupported, Span<ushort> bufferIndices)
{
if (isSupported && bufferIndices.Length == 6)
{
ushort frontCenter = bufferIndices[2];
ushort lowFrequency = bufferIndices[3];
ushort backLeft = bufferIndices[4];
ushort backRight = bufferIndices[5];
bufferIndices[2] = backLeft;
bufferIndices[3] = backRight;
bufferIndices[4] = frontCenter;
bufferIndices[5] = lowFrequency;
}
}
}
}

View file

@ -0,0 +1,52 @@
namespace Ryujinx.Audio.Renderer.Dsp.Effect
{
public class DecayDelay : IDelayLine
{
private readonly IDelayLine _delayLine;
public uint CurrentSampleCount => _delayLine.CurrentSampleCount;
public uint SampleCountMax => _delayLine.SampleCountMax;
private float _decayRate;
public DecayDelay(IDelayLine delayLine)
{
_decayRate = 0.0f;
_delayLine = delayLine;
}
public void SetDecayRate(float decayRate)
{
_decayRate = decayRate;
}
public float Update(float value)
{
float delayLineValue = _delayLine.Read();
float processedValue = value - (_decayRate * delayLineValue);
return _delayLine.Update(processedValue) + processedValue * _decayRate;
}
public void SetDelay(float delayTime)
{
_delayLine.SetDelay(delayTime);
}
public float Read()
{
return _delayLine.Read();
}
public float TapUnsafe(uint sampleIndex, int offset)
{
return _delayLine.TapUnsafe(sampleIndex, offset);
}
public float Tap(uint sampleIndex)
{
return _delayLine.Tap(sampleIndex);
}
}
}

View file

@ -0,0 +1,78 @@
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Effect
{
public class DelayLine : IDelayLine
{
private float[] _workBuffer;
private uint _sampleRate;
private uint _currentSampleIndex;
private uint _lastSampleIndex;
public uint CurrentSampleCount { get; private set; }
public uint SampleCountMax { get; private set; }
public DelayLine(uint sampleRate, float delayTimeMax)
{
_sampleRate = sampleRate;
SampleCountMax = IDelayLine.GetSampleCount(_sampleRate, delayTimeMax);
_workBuffer = new float[SampleCountMax + 1];
SetDelay(delayTimeMax);
}
private void ConfigureDelay(uint targetSampleCount)
{
CurrentSampleCount = Math.Min(SampleCountMax, targetSampleCount);
_currentSampleIndex = 0;
if (CurrentSampleCount == 0)
{
_lastSampleIndex = 0;
}
else
{
_lastSampleIndex = CurrentSampleCount - 1;
}
}
public void SetDelay(float delayTime)
{
ConfigureDelay(IDelayLine.GetSampleCount(_sampleRate, delayTime));
}
public float Read()
{
return _workBuffer[_currentSampleIndex];
}
public float Update(float value)
{
float output = Read();
_workBuffer[_currentSampleIndex++] = value;
if (_currentSampleIndex >= _lastSampleIndex)
{
_currentSampleIndex = 0;
}
return output;
}
public float TapUnsafe(uint sampleIndex, int offset)
{
return IDelayLine.Tap(_workBuffer, (int)_currentSampleIndex, (int)sampleIndex + offset, (int)CurrentSampleCount);
}
public float Tap(uint sampleIndex)
{
if (sampleIndex >= CurrentSampleCount)
{
sampleIndex = CurrentSampleCount - 1;
}
return TapUnsafe(sampleIndex, -1);
}
}
}

View file

@ -0,0 +1,76 @@
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Effect
{
public class DelayLine3d : IDelayLine
{
private float[] _workBuffer;
private uint _sampleRate;
private uint _currentSampleIndex;
private uint _lastSampleIndex;
public uint CurrentSampleCount { get; private set; }
public uint SampleCountMax { get; private set; }
public DelayLine3d(uint sampleRate, float delayTimeMax)
{
_sampleRate = sampleRate;
SampleCountMax = IDelayLine.GetSampleCount(_sampleRate, delayTimeMax);
_workBuffer = new float[SampleCountMax + 1];
SetDelay(delayTimeMax);
}
private void ConfigureDelay(uint targetSampleCount)
{
if (SampleCountMax >= targetSampleCount)
{
CurrentSampleCount = targetSampleCount;
_lastSampleIndex = (_currentSampleIndex + targetSampleCount) % (SampleCountMax + 1);
}
}
public void SetDelay(float delayTime)
{
ConfigureDelay(IDelayLine.GetSampleCount(_sampleRate, delayTime));
}
public float Read()
{
return _workBuffer[_currentSampleIndex];
}
public float Update(float value)
{
Debug.Assert(!float.IsNaN(value) && !float.IsInfinity(value));
_workBuffer[_lastSampleIndex++] = value;
float output = Read();
_currentSampleIndex++;
if (_currentSampleIndex >= SampleCountMax)
{
_currentSampleIndex = 0;
}
if (_lastSampleIndex >= SampleCountMax)
{
_lastSampleIndex = 0;
}
return output;
}
public float TapUnsafe(uint sampleIndex, int offset)
{
return IDelayLine.Tap(_workBuffer, (int)_lastSampleIndex, (int)sampleIndex + offset, (int)SampleCountMax + 1);
}
public float Tap(uint sampleIndex)
{
return TapUnsafe(sampleIndex, -1);
}
}
}

View file

@ -0,0 +1,26 @@
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Effect
{
public struct ExponentialMovingAverage
{
private float _mean;
public ExponentialMovingAverage(float mean)
{
_mean = mean;
}
public float Read()
{
return _mean;
}
public float Update(float value, float alpha)
{
_mean += alpha * (value - _mean);
return _mean;
}
}
}

View file

@ -0,0 +1,37 @@
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Effect
{
public interface IDelayLine
{
uint CurrentSampleCount { get; }
uint SampleCountMax { get; }
void SetDelay(float delayTime);
float Read();
float Update(float value);
float TapUnsafe(uint sampleIndex, int offset);
float Tap(uint sampleIndex);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float Tap(Span<float> workBuffer, int baseIndex, int sampleIndex, int delaySampleCount)
{
int targetIndex = baseIndex - sampleIndex;
if (targetIndex < 0)
{
targetIndex += delaySampleCount;
}
return workBuffer[targetIndex];
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint GetSampleCount(uint sampleRate, float delayTime)
{
return (uint)MathF.Round(sampleRate * delayTime);
}
}
}

View file

@ -0,0 +1,39 @@
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp
{
public static class FixedPointHelper
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int ToInt(long value, int qBits)
{
return (int)(value >> qBits);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float ToFloat(long value, int qBits)
{
return (float)value / (1 << qBits);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float ConvertFloat(float value, int qBits)
{
return value / (1 << qBits);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int ToFixed(float value, int qBits)
{
return (int)(value * (1 << qBits));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int RoundUpAndToInt(long value, int qBits)
{
int half = 1 << (qBits - 1);
return ToInt(value + half, qBits);
}
}
}

View file

@ -0,0 +1,115 @@
using System;
using System.Reflection.Metadata;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp
{
public static class FloatingPointHelper
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float MultiplyRoundDown(float a, float b)
{
return RoundDown(a * b);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float RoundDown(float a)
{
return MathF.Round(a, 0);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float RoundUp(float a)
{
return MathF.Round(a);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float MultiplyRoundUp(float a, float b)
{
return RoundUp(a * b);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float Pow10(float x)
{
// NOTE: Nintendo implementation uses Q15 and a LUT for this, we don't.
// As such, we support the same ranges as Nintendo to avoid unexpected behaviours.
if (x >= 0.0f)
{
return 1.0f;
}
else if (x <= -5.3f)
{
return 0.0f;
}
return MathF.Pow(10, x);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float Log10(float x)
{
// NOTE: Nintendo uses an approximation of log10, we don't.
// As such, we support the same ranges as Nintendo to avoid unexpected behaviours.
return MathF.Pow(10, MathF.Max(x, 1.0e-10f));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float MeanSquare(ReadOnlySpan<float> inputs)
{
float res = 0.0f;
foreach (float input in inputs)
{
res += (input * input);
}
res /= inputs.Length;
return res;
}
/// <summary>
/// Map decibel to linear.
/// </summary>
/// <param name="db">The decibel value to convert</param>
/// <returns>Converted linear value/returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float DecibelToLinear(float db)
{
return MathF.Pow(10.0f, db / 20.0f);
}
/// <summary>
/// Map decibel to linear in [0, 2] range.
/// </summary>
/// <param name="db">The decibel value to convert</param>
/// <returns>Converted linear value in [0, 2] range</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float DecibelToLinearExtended(float db)
{
float tmp = MathF.Log2(DecibelToLinear(db));
return MathF.Truncate(tmp) + MathF.Pow(2.0f, tmp - MathF.Truncate(tmp));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float DegreesToRadians(float degrees)
{
return degrees * MathF.PI / 180.0f;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float Cos(float value)
{
return MathF.Cos(DegreesToRadians(value));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float Sin(float value)
{
return MathF.Sin(DegreesToRadians(value));
}
}
}

View file

@ -0,0 +1,130 @@
using System;
using System.Numerics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp
{
public static class PcmHelper
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int GetCountToDecode(int startSampleOffset, int endSampleOffset, int offset, int count)
{
return Math.Min(count, endSampleOffset - startSampleOffset - offset);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static ulong GetBufferOffset<T>(int startSampleOffset, int offset, int channelCount) where T : unmanaged
{
return (ulong)(Unsafe.SizeOf<T>() * channelCount * (startSampleOffset + offset));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int GetBufferSize<T>(int startSampleOffset, int endSampleOffset, int offset, int count) where T : unmanaged
{
return GetCountToDecode(startSampleOffset, endSampleOffset, offset, count) * Unsafe.SizeOf<T>();
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float ConvertSampleToPcmFloat(short sample)
{
return (float)sample / short.MaxValue;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static short ConvertSampleToPcmInt16(float sample)
{
return Saturate(sample * short.MaxValue);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ConvertSampleToPcm8(Span<sbyte> output, ReadOnlySpan<short> input)
{
for (int i = 0; i < input.Length; i++)
{
// Output most significant byte
output[i] = (sbyte)(input[i] >> 8);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ConvertSampleToPcm24(Span<byte> output, ReadOnlySpan<short> input)
{
for (int i = 0; i < input.Length; i++)
{
output[i * 3 + 2] = (byte)(input[i] >> 8);
output[i * 3 + 1] = (byte)(input[i] & 0xff);
output[i * 3 + 0] = 0;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ConvertSampleToPcm32(Span<int> output, ReadOnlySpan<short> input)
{
for (int i = 0; i < input.Length; i++)
{
output[i] = ((int)input[i]) << 16;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ConvertSampleToPcmFloat(Span<float> output, ReadOnlySpan<short> input)
{
for (int i = 0; i < input.Length; i++)
{
output[i] = ConvertSampleToPcmFloat(input[i]);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int Decode(Span<short> output, ReadOnlySpan<short> input, int startSampleOffset, int endSampleOffset, int channelIndex, int channelCount)
{
if (input.IsEmpty || endSampleOffset < startSampleOffset)
{
return 0;
}
int decodedCount = input.Length / channelCount;
for (int i = 0; i < decodedCount; i++)
{
output[i] = input[i * channelCount + channelIndex];
}
return decodedCount;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int Decode(Span<short> output, ReadOnlySpan<float> input, int startSampleOffset, int endSampleOffset, int channelIndex, int channelCount)
{
if (input.IsEmpty || endSampleOffset < startSampleOffset)
{
return 0;
}
int decodedCount = input.Length / channelCount;
for (int i = 0; i < decodedCount; i++)
{
output[i] = ConvertSampleToPcmInt16(input[i * channelCount + channelIndex]);
}
return decodedCount;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static short Saturate(float value)
{
if (value > short.MaxValue)
{
return short.MaxValue;
}
if (value < short.MinValue)
{
return short.MinValue;
}
return (short)value;
}
}
}

View file

@ -0,0 +1,604 @@
using System;
using System.Linq;
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp
{
public static class ResamplerHelper
{
#region "Default Quality Lookup Tables"
private static short[] _normalCurveLut0 = new short[]
{
6600, 19426, 6722, 3, 6479, 19424, 6845, 9, 6359, 19419, 6968, 15, 6239, 19412, 7093, 22,
6121, 19403, 7219, 28, 6004, 19391, 7345, 34, 5888, 19377, 7472, 41, 5773, 19361, 7600, 48,
5659, 19342, 7728, 55, 5546, 19321, 7857, 62, 5434, 19298, 7987, 69, 5323, 19273, 8118, 77,
5213, 19245, 8249, 84, 5104, 19215, 8381, 92, 4997, 19183, 8513, 101, 4890, 19148, 8646, 109,
4785, 19112, 8780, 118, 4681, 19073, 8914, 127, 4579, 19031, 9048, 137, 4477, 18988, 9183, 147,
4377, 18942, 9318, 157, 4277, 18895, 9454, 168, 4179, 18845, 9590, 179, 4083, 18793, 9726, 190,
3987, 18738, 9863, 202, 3893, 18682, 10000, 215, 3800, 18624, 10137, 228, 3709, 18563, 10274, 241,
3618, 18500, 10411, 255, 3529, 18436, 10549, 270, 3441, 18369, 10687, 285, 3355, 18300, 10824, 300,
3269, 18230, 10962, 317, 3186, 18157, 11100, 334, 3103, 18082, 11238, 351, 3022, 18006, 11375, 369,
2942, 17927, 11513, 388, 2863, 17847, 11650, 408, 2785, 17765, 11788, 428, 2709, 17681, 11925, 449,
2635, 17595, 12062, 471, 2561, 17507, 12198, 494, 2489, 17418, 12334, 517, 2418, 17327, 12470, 541,
2348, 17234, 12606, 566, 2280, 17140, 12741, 592, 2213, 17044, 12876, 619, 2147, 16946, 13010, 647,
2083, 16846, 13144, 675, 2020, 16745, 13277, 704, 1958, 16643, 13409, 735, 1897, 16539, 13541, 766,
1838, 16434, 13673, 798, 1780, 16327, 13803, 832, 1723, 16218, 13933, 866, 1667, 16109, 14062, 901,
1613, 15998, 14191, 937, 1560, 15885, 14318, 975, 1508, 15772, 14445, 1013, 1457, 15657, 14571, 1052,
1407, 15540, 14695, 1093, 1359, 15423, 14819, 1134, 1312, 15304, 14942, 1177, 1266, 15185, 15064, 1221,
1221, 15064, 15185, 1266, 1177, 14942, 15304, 1312, 1134, 14819, 15423, 1359, 1093, 14695, 15540, 1407,
1052, 14571, 15657, 1457, 1013, 14445, 15772, 1508, 975, 14318, 15885, 1560, 937, 14191, 15998, 1613,
901, 14062, 16109, 1667, 866, 13933, 16218, 1723, 832, 13803, 16327, 1780, 798, 13673, 16434, 1838,
766, 13541, 16539, 1897, 735, 13409, 16643, 1958, 704, 13277, 16745, 2020, 675, 13144, 16846, 2083,
647, 13010, 16946, 2147, 619, 12876, 17044, 2213, 592, 12741, 17140, 2280, 566, 12606, 17234, 2348,
541, 12470, 17327, 2418, 517, 12334, 17418, 2489, 494, 12198, 17507, 2561, 471, 12062, 17595, 2635,
449, 11925, 17681, 2709, 428, 11788, 17765, 2785, 408, 11650, 17847, 2863, 388, 11513, 17927, 2942,
369, 11375, 18006, 3022, 351, 11238, 18082, 3103, 334, 11100, 18157, 3186, 317, 10962, 18230, 3269,
300, 10824, 18300, 3355, 285, 10687, 18369, 3441, 270, 10549, 18436, 3529, 255, 10411, 18500, 3618,
241, 10274, 18563, 3709, 228, 10137, 18624, 3800, 215, 10000, 18682, 3893, 202, 9863, 18738, 3987,
190, 9726, 18793, 4083, 179, 9590, 18845, 4179, 168, 9454, 18895, 4277, 157, 9318, 18942, 4377,
147, 9183, 18988, 4477, 137, 9048, 19031, 4579, 127, 8914, 19073, 4681, 118, 8780, 19112, 4785,
109, 8646, 19148, 4890, 101, 8513, 19183, 4997, 92, 8381, 19215, 5104, 84, 8249, 19245, 5213,
77, 8118, 19273, 5323, 69, 7987, 19298, 5434, 62, 7857, 19321, 5546, 55, 7728, 19342, 5659,
48, 7600, 19361, 5773, 41, 7472, 19377, 5888, 34, 7345, 19391, 6004, 28, 7219, 19403, 6121,
22, 7093, 19412, 6239, 15, 6968, 19419, 6359, 9, 6845, 19424, 6479, 3, 6722, 19426, 6600
};
private static short[] _normalCurveLut1 = new short[]
{
-68, 32639, 69, -5, -200, 32630, 212, -15, -328, 32613, 359, -26, -450, 32586, 512, -36,
-568, 32551, 669, -47, -680, 32507, 832, -58, -788, 32454, 1000, -69, -891, 32393, 1174, -80,
-990, 32323, 1352, -92, -1084, 32244, 1536, -103, -1173, 32157, 1724, -115, -1258, 32061, 1919, -128,
-1338, 31956, 2118, -140, -1414, 31844, 2322, -153, -1486, 31723, 2532, -167, -1554, 31593, 2747, -180,
-1617, 31456, 2967, -194, -1676, 31310, 3192, -209, -1732, 31157, 3422, -224, -1783, 30995, 3657, -240,
-1830, 30826, 3897, -256, -1874, 30649, 4143, -272, -1914, 30464, 4393, -289, -1951, 30272, 4648, -307,
-1984, 30072, 4908, -325, -2014, 29866, 5172, -343, -2040, 29652, 5442, -362, -2063, 29431, 5716, -382,
-2083, 29203, 5994, -403, -2100, 28968, 6277, -424, -2114, 28727, 6565, -445, -2125, 28480, 6857, -468,
-2133, 28226, 7153, -490, -2139, 27966, 7453, -514, -2142, 27700, 7758, -538, -2142, 27428, 8066, -563,
-2141, 27151, 8378, -588, -2136, 26867, 8694, -614, -2130, 26579, 9013, -641, -2121, 26285, 9336, -668,
-2111, 25987, 9663, -696, -2098, 25683, 9993, -724, -2084, 25375, 10326, -753, -2067, 25063, 10662, -783,
-2049, 24746, 11000, -813, -2030, 24425, 11342, -844, -2009, 24100, 11686, -875, -1986, 23771, 12033, -907,
-1962, 23438, 12382, -939, -1937, 23103, 12733, -972, -1911, 22764, 13086, -1005, -1883, 22422, 13441, -1039,
-1855, 22077, 13798, -1072, -1825, 21729, 14156, -1107, -1795, 21380, 14516, -1141, -1764, 21027, 14877, -1176,
-1732, 20673, 15239, -1211, -1700, 20317, 15602, -1246, -1667, 19959, 15965, -1282, -1633, 19600, 16329, -1317,
-1599, 19239, 16694, -1353, -1564, 18878, 17058, -1388, -1530, 18515, 17423, -1424, -1495, 18151, 17787, -1459,
-1459, 17787, 18151, -1495, -1424, 17423, 18515, -1530, -1388, 17058, 18878, -1564, -1353, 16694, 19239, -1599,
-1317, 16329, 19600, -1633, -1282, 15965, 19959, -1667, -1246, 15602, 20317, -1700, -1211, 15239, 20673, -1732,
-1176, 14877, 21027, -1764, -1141, 14516, 21380, -1795, -1107, 14156, 21729, -1825, -1072, 13798, 22077, -1855,
-1039, 13441, 22422, -1883, -1005, 13086, 22764, -1911, -972, 12733, 23103, -1937, -939, 12382, 23438, -1962,
-907, 12033, 23771, -1986, -875, 11686, 24100, -2009, -844, 11342, 24425, -2030, -813, 11000, 24746, -2049,
-783, 10662, 25063, -2067, -753, 10326, 25375, -2084, -724, 9993, 25683, -2098, -696, 9663, 25987, -2111,
-668, 9336, 26285, -2121, -641, 9013, 26579, -2130, -614, 8694, 26867, -2136, -588, 8378, 27151, -2141,
-563, 8066, 27428, -2142, -538, 7758, 27700, -2142, -514, 7453, 27966, -2139, -490, 7153, 28226, -2133,
-468, 6857, 28480, -2125, -445, 6565, 28727, -2114, -424, 6277, 28968, -2100, -403, 5994, 29203, -2083,
-382, 5716, 29431, -2063, -362, 5442, 29652, -2040, -343, 5172, 29866, -2014, -325, 4908, 30072, -1984,
-307, 4648, 30272, -1951, -289, 4393, 30464, -1914, -272, 4143, 30649, -1874, -256, 3897, 30826, -1830,
-240, 3657, 30995, -1783, -224, 3422, 31157, -1732, -209, 3192, 31310, -1676, -194, 2967, 31456, -1617,
-180, 2747, 31593, -1554, -167, 2532, 31723, -1486, -153, 2322, 31844, -1414, -140, 2118, 31956, -1338,
-128, 1919, 32061, -1258, -115, 1724, 32157, -1173, -103, 1536, 32244, -1084, -92, 1352, 32323, -990,
-80, 1174, 32393, -891, -69, 1000, 32454, -788, -58, 832, 32507, -680, -47, 669, 32551, -568,
-36, 512, 32586, -450, -26, 359, 32613, -328, -15, 212, 32630, -200, -5, 69, 32639, -68
};
private static short[] _normalCurveLut2 = new short[]
{
3195, 26287, 3329, -32, 3064, 26281, 3467, -34, 2936, 26270, 3608, -38, 2811, 26253, 3751, -42,
2688, 26230, 3897, -46, 2568, 26202, 4046, -50, 2451, 26169, 4199, -54, 2338, 26130, 4354, -58,
2227, 26085, 4512, -63, 2120, 26035, 4673, -67, 2015, 25980, 4837, -72, 1912, 25919, 5004, -76,
1813, 25852, 5174, -81, 1716, 25780, 5347, -87, 1622, 25704, 5522, -92, 1531, 25621, 5701, -98,
1442, 25533, 5882, -103, 1357, 25440, 6066, -109, 1274, 25342, 6253, -115, 1193, 25239, 6442, -121,
1115, 25131, 6635, -127, 1040, 25018, 6830, -133, 967, 24899, 7027, -140, 897, 24776, 7227, -146,
829, 24648, 7430, -153, 764, 24516, 7635, -159, 701, 24379, 7842, -166, 641, 24237, 8052, -174,
583, 24091, 8264, -181, 526, 23940, 8478, -187, 472, 23785, 8695, -194, 420, 23626, 8914, -202,
371, 23462, 9135, -209, 324, 23295, 9358, -215, 279, 23123, 9583, -222, 236, 22948, 9809, -230,
194, 22769, 10038, -237, 154, 22586, 10269, -243, 117, 22399, 10501, -250, 81, 22208, 10735, -258,
47, 22015, 10970, -265, 15, 21818, 11206, -271, -16, 21618, 11444, -277, -44, 21415, 11684, -283,
-71, 21208, 11924, -290, -97, 20999, 12166, -296, -121, 20786, 12409, -302, -143, 20571, 12653, -306,
-163, 20354, 12898, -311, -183, 20134, 13143, -316, -201, 19911, 13389, -321, -218, 19686, 13635, -325,
-234, 19459, 13882, -328, -248, 19230, 14130, -332, -261, 18998, 14377, -335, -273, 18765, 14625, -337,
-284, 18531, 14873, -339, -294, 18295, 15121, -341, -302, 18057, 15369, -341, -310, 17817, 15617, -341,
-317, 17577, 15864, -340, -323, 17335, 16111, -340, -328, 17092, 16357, -338, -332, 16848, 16603, -336,
-336, 16603, 16848, -332, -338, 16357, 17092, -328, -340, 16111, 17335, -323, -340, 15864, 17577, -317,
-341, 15617, 17817, -310, -341, 15369, 18057, -302, -341, 15121, 18295, -294, -339, 14873, 18531, -284,
-337, 14625, 18765, -273, -335, 14377, 18998, -261, -332, 14130, 19230, -248, -328, 13882, 19459, -234,
-325, 13635, 19686, -218, -321, 13389, 19911, -201, -316, 13143, 20134, -183, -311, 12898, 20354, -163,
-306, 12653, 20571, -143, -302, 12409, 20786, -121, -296, 12166, 20999, -97, -290, 11924, 21208, -71,
-283, 11684, 21415, -44, -277, 11444, 21618, -16, -271, 11206, 21818, 15, -265, 10970, 22015, 47,
-258, 10735, 22208, 81, -250, 10501, 22399, 117, -243, 10269, 22586, 154, -237, 10038, 22769, 194,
-230, 9809, 22948, 236, -222, 9583, 23123, 279, -215, 9358, 23295, 324, -209, 9135, 23462, 371,
-202, 8914, 23626, 420, -194, 8695, 23785, 472, -187, 8478, 23940, 526, -181, 8264, 24091, 583,
-174, 8052, 24237, 641, -166, 7842, 24379, 701, -159, 7635, 24516, 764, -153, 7430, 24648, 829,
-146, 7227, 24776, 897, -140, 7027, 24899, 967, -133, 6830, 25018, 1040, -127, 6635, 25131, 1115,
-121, 6442, 25239, 1193, -115, 6253, 25342, 1274, -109, 6066, 25440, 1357, -103, 5882, 25533, 1442,
-98, 5701, 25621, 1531, -92, 5522, 25704, 1622, -87, 5347, 25780, 1716, -81, 5174, 25852, 1813,
-76, 5004, 25919, 1912, -72, 4837, 25980, 2015, -67, 4673, 26035, 2120, -63, 4512, 26085, 2227,
-58, 4354, 26130, 2338, -54, 4199, 26169, 2451, -50, 4046, 26202, 2568, -46, 3897, 26230, 2688,
-42, 3751, 26253, 2811, -38, 3608, 26270, 2936, -34, 3467, 26281, 3064, -32, 3329, 26287, 3195
};
#endregion
#region "High Quality Lookup Tables"
private static short[] _highCurveLut0 = new short[]
{
-582, -23, 8740, 16386, 8833, 8, -590, 0, -573, -54, 8647, 16385, 8925, 40, -598, -1,
-565, -84, 8555, 16383, 9018, 72, -606, -1, -557, -113, 8462, 16379, 9110, 105, -614, -2,
-549, -142, 8370, 16375, 9203, 139, -622, -2, -541, -170, 8277, 16369, 9295, 173, -630, -3,
-533, -198, 8185, 16362, 9387, 208, -638, -4, -525, -225, 8093, 16354, 9480, 244, -646, -5,
-516, -251, 8000, 16344, 9572, 280, -654, -5, -508, -277, 7908, 16334, 9664, 317, -662, -6,
-500, -302, 7816, 16322, 9756, 355, -670, -7, -492, -327, 7724, 16310, 9847, 393, -678, -8,
-484, -351, 7632, 16296, 9939, 432, -686, -9, -476, -374, 7540, 16281, 10030, 471, -694, -10,
-468, -397, 7449, 16265, 10121, 511, -702, -11, -460, -419, 7357, 16247, 10212, 552, -709, -13,
-452, -441, 7266, 16229, 10303, 593, -717, -14, -445, -462, 7175, 16209, 10394, 635, -724, -15,
-437, -483, 7084, 16189, 10484, 678, -732, -16, -429, -503, 6994, 16167, 10574, 722, -739, -18,
-421, -523, 6903, 16144, 10664, 766, -747, -19, -414, -542, 6813, 16120, 10754, 810, -754, -21,
-406, -560, 6723, 16095, 10843, 856, -761, -22, -398, -578, 6633, 16068, 10932, 902, -768, -24,
-391, -596, 6544, 16041, 11021, 949, -775, -26, -383, -612, 6454, 16012, 11109, 996, -782, -27,
-376, -629, 6366, 15983, 11197, 1044, -789, -29, -368, -645, 6277, 15952, 11285, 1093, -796, -31,
-361, -660, 6189, 15920, 11372, 1142, -802, -33, -354, -675, 6100, 15887, 11459, 1192, -809, -35,
-347, -689, 6013, 15853, 11546, 1243, -815, -37, -339, -703, 5925, 15818, 11632, 1294, -821, -39,
-332, -717, 5838, 15782, 11718, 1346, -827, -41, -325, -730, 5751, 15745, 11803, 1399, -833, -43,
-318, -742, 5665, 15707, 11888, 1452, -839, -46, -312, -754, 5579, 15668, 11973, 1506, -845, -48,
-305, -766, 5493, 15627, 12057, 1561, -850, -50, -298, -777, 5408, 15586, 12140, 1616, -855, -53,
-291, -787, 5323, 15544, 12224, 1672, -861, -56, -285, -798, 5239, 15500, 12306, 1729, -866, -58,
-278, -807, 5155, 15456, 12388, 1786, -871, -61, -272, -817, 5071, 15410, 12470, 1844, -875, -64,
-265, -826, 4988, 15364, 12551, 1902, -880, -67, -259, -834, 4905, 15317, 12631, 1962, -884, -70,
-253, -842, 4823, 15268, 12711, 2022, -888, -73, -247, -850, 4741, 15219, 12790, 2082, -892, -76,
-241, -857, 4659, 15168, 12869, 2143, -896, -79, -235, -864, 4578, 15117, 12947, 2205, -899, -82,
-229, -870, 4498, 15065, 13025, 2267, -903, -85, -223, -876, 4417, 15012, 13102, 2331, -906, -89,
-217, -882, 4338, 14958, 13178, 2394, -909, -92, -211, -887, 4259, 14903, 13254, 2459, -911, -96,
-206, -892, 4180, 14847, 13329, 2523, -914, -100, -200, -896, 4102, 14790, 13403, 2589, -916, -103,
-195, -900, 4024, 14732, 13477, 2655, -918, -107, -190, -904, 3947, 14673, 13550, 2722, -919, -111,
-184, -908, 3871, 14614, 13622, 2789, -921, -115, -179, -911, 3795, 14553, 13693, 2857, -922, -119,
-174, -913, 3719, 14492, 13764, 2926, -923, -123, -169, -916, 3644, 14430, 13834, 2995, -923, -127,
-164, -918, 3570, 14367, 13904, 3065, -924, -132, -159, -920, 3496, 14303, 13972, 3136, -924, -136,
-154, -921, 3423, 14239, 14040, 3207, -924, -140, -150, -922, 3350, 14173, 14107, 3278, -923, -145,
-145, -923, 3278, 14107, 14173, 3350, -922, -150, -140, -924, 3207, 14040, 14239, 3423, -921, -154,
-136, -924, 3136, 13972, 14303, 3496, -920, -159, -132, -924, 3065, 13904, 14367, 3570, -918, -164,
-127, -923, 2995, 13834, 14430, 3644, -916, -169, -123, -923, 2926, 13764, 14492, 3719, -913, -174,
-119, -922, 2857, 13693, 14553, 3795, -911, -179, -115, -921, 2789, 13622, 14614, 3871, -908, -184,
-111, -919, 2722, 13550, 14673, 3947, -904, -190, -107, -918, 2655, 13477, 14732, 4024, -900, -195,
-103, -916, 2589, 13403, 14790, 4102, -896, -200, -100, -914, 2523, 13329, 14847, 4180, -892, -206,
-96, -911, 2459, 13254, 14903, 4259, -887, -211, -92, -909, 2394, 13178, 14958, 4338, -882, -217,
-89, -906, 2331, 13102, 15012, 4417, -876, -223, -85, -903, 2267, 13025, 15065, 4498, -870, -229,
-82, -899, 2205, 12947, 15117, 4578, -864, -235, -79, -896, 2143, 12869, 15168, 4659, -857, -241,
-76, -892, 2082, 12790, 15219, 4741, -850, -247, -73, -888, 2022, 12711, 15268, 4823, -842, -253,
-70, -884, 1962, 12631, 15317, 4905, -834, -259, -67, -880, 1902, 12551, 15364, 4988, -826, -265,
-64, -875, 1844, 12470, 15410, 5071, -817, -272, -61, -871, 1786, 12388, 15456, 5155, -807, -278,
-58, -866, 1729, 12306, 15500, 5239, -798, -285, -56, -861, 1672, 12224, 15544, 5323, -787, -291,
-53, -855, 1616, 12140, 15586, 5408, -777, -298, -50, -850, 1561, 12057, 15627, 5493, -766, -305,
-48, -845, 1506, 11973, 15668, 5579, -754, -312, -46, -839, 1452, 11888, 15707, 5665, -742, -318,
-43, -833, 1399, 11803, 15745, 5751, -730, -325, -41, -827, 1346, 11718, 15782, 5838, -717, -332,
-39, -821, 1294, 11632, 15818, 5925, -703, -339, -37, -815, 1243, 11546, 15853, 6013, -689, -347,
-35, -809, 1192, 11459, 15887, 6100, -675, -354, -33, -802, 1142, 11372, 15920, 6189, -660, -361,
-31, -796, 1093, 11285, 15952, 6277, -645, -368, -29, -789, 1044, 11197, 15983, 6366, -629, -376,
-27, -782, 996, 11109, 16012, 6454, -612, -383, -26, -775, 949, 11021, 16041, 6544, -596, -391,
-24, -768, 902, 10932, 16068, 6633, -578, -398, -22, -761, 856, 10843, 16095, 6723, -560, -406,
-21, -754, 810, 10754, 16120, 6813, -542, -414, -19, -747, 766, 10664, 16144, 6903, -523, -421,
-18, -739, 722, 10574, 16167, 6994, -503, -429, -16, -732, 678, 10484, 16189, 7084, -483, -437,
-15, -724, 635, 10394, 16209, 7175, -462, -445, -14, -717, 593, 10303, 16229, 7266, -441, -452,
-13, -709, 552, 10212, 16247, 7357, -419, -460, -11, -702, 511, 10121, 16265, 7449, -397, -468,
-10, -694, 471, 10030, 16281, 7540, -374, -476, -9, -686, 432, 9939, 16296, 7632, -351, -484,
-8, -678, 393, 9847, 16310, 7724, -327, -492, -7, -670, 355, 9756, 16322, 7816, -302, -500,
-6, -662, 317, 9664, 16334, 7908, -277, -508, -5, -654, 280, 9572, 16344, 8000, -251, -516,
-5, -646, 244, 9480, 16354, 8093, -225, -525, -4, -638, 208, 9387, 16362, 8185, -198, -533,
-3, -630, 173, 9295, 16369, 8277, -170, -541, -2, -622, 139, 9203, 16375, 8370, -142, -549,
-2, -614, 105, 9110, 16379, 8462, -113, -557, -1, -606, 72, 9018, 16383, 8555, -84, -565,
-1, -598, 40, 8925, 16385, 8647, -54, -573, 0, -590, 8, 8833, 16386, 8740, -23, -582,
};
private static short[] _highCurveLut1 = new short[]
{
-12, 47, -134, 32767, 81, -16, 2, 0, -26, 108, -345, 32760, 301, -79, 17, -1,
-40, 168, -552, 32745, 526, -144, 32, -2, -53, 226, -753, 32723, 755, -210, 47, -3,
-66, 284, -950, 32694, 989, -277, 63, -5, -78, 340, -1143, 32658, 1226, -346, 79, -6,
-90, 394, -1331, 32615, 1469, -415, 96, -8, -101, 447, -1514, 32564, 1715, -486, 113, -9,
-112, 499, -1692, 32506, 1966, -557, 130, -11, -123, 550, -1865, 32441, 2221, -630, 148, -13,
-133, 599, -2034, 32369, 2480, -703, 166, -14, -143, 646, -2198, 32290, 2743, -778, 185, -16,
-152, 693, -2357, 32204, 3010, -853, 204, -18, -162, 738, -2512, 32110, 3281, -929, 223, -20,
-170, 781, -2662, 32010, 3555, -1007, 242, -23, -178, 823, -2807, 31903, 3834, -1084, 262, -25,
-186, 864, -2947, 31789, 4116, -1163, 282, -27, -194, 903, -3082, 31668, 4403, -1242, 303, -30,
-201, 940, -3213, 31540, 4692, -1322, 323, -32, -207, 977, -3339, 31406, 4985, -1403, 344, -35,
-214, 1011, -3460, 31265, 5282, -1484, 365, -37, -220, 1045, -3577, 31117, 5582, -1566, 387, -40,
-225, 1077, -3688, 30963, 5885, -1648, 409, -43, -230, 1107, -3796, 30802, 6191, -1730, 431, -46,
-235, 1136, -3898, 30635, 6501, -1813, 453, -49, -240, 1164, -3996, 30462, 6813, -1896, 475, -52,
-244, 1190, -4089, 30282, 7128, -1980, 498, -55, -247, 1215, -4178, 30097, 7446, -2064, 520, -58,
-251, 1239, -4262, 29905, 7767, -2148, 543, -62, -254, 1261, -4342, 29707, 8091, -2231, 566, -65,
-257, 1281, -4417, 29503, 8416, -2315, 589, -69, -259, 1301, -4488, 29293, 8745, -2399, 613, -72,
-261, 1319, -4555, 29078, 9075, -2483, 636, -76, -263, 1336, -4617, 28857, 9408, -2567, 659, -80,
-265, 1351, -4674, 28631, 9743, -2651, 683, -83, -266, 1365, -4728, 28399, 10080, -2734, 706, -87,
-267, 1378, -4777, 28161, 10418, -2817, 730, -91, -267, 1389, -4822, 27919, 10759, -2899, 753, -95,
-268, 1400, -4863, 27671, 11100, -2981, 777, -99, -268, 1409, -4900, 27418, 11444, -3063, 800, -103,
-268, 1416, -4933, 27161, 11789, -3144, 824, -107, -267, 1423, -4962, 26898, 12135, -3224, 847, -112,
-267, 1428, -4987, 26631, 12482, -3303, 870, -116, -266, 1433, -5008, 26359, 12830, -3382, 893, -120,
-265, 1436, -5026, 26083, 13179, -3460, 916, -125, -264, 1438, -5039, 25802, 13529, -3537, 939, -129,
-262, 1438, -5049, 25517, 13880, -3613, 962, -133, -260, 1438, -5055, 25228, 14231, -3687, 984, -138,
-258, 1437, -5058, 24935, 14582, -3761, 1006, -142, -256, 1435, -5058, 24639, 14934, -3833, 1028, -147,
-254, 1431, -5053, 24338, 15286, -3904, 1049, -151, -252, 1427, -5046, 24034, 15638, -3974, 1071, -155,
-249, 1422, -5035, 23726, 15989, -4042, 1091, -160, -246, 1416, -5021, 23415, 16341, -4109, 1112, -164,
-243, 1408, -5004, 23101, 16691, -4174, 1132, -169, -240, 1400, -4984, 22783, 17042, -4237, 1152, -173,
-237, 1392, -4960, 22463, 17392, -4299, 1171, -178, -234, 1382, -4934, 22140, 17740, -4358, 1190, -182,
-230, 1371, -4905, 21814, 18088, -4416, 1209, -186, -227, 1360, -4873, 21485, 18435, -4472, 1226, -191,
-223, 1348, -4839, 21154, 18781, -4526, 1244, -195, -219, 1335, -4801, 20821, 19125, -4578, 1260, -199,
-215, 1321, -4761, 20486, 19468, -4627, 1277, -203, -211, 1307, -4719, 20148, 19809, -4674, 1292, -207,
-207, 1292, -4674, 19809, 20148, -4719, 1307, -211, -203, 1277, -4627, 19468, 20486, -4761, 1321, -215,
-199, 1260, -4578, 19125, 20821, -4801, 1335, -219, -195, 1244, -4526, 18781, 21154, -4839, 1348, -223,
-191, 1226, -4472, 18435, 21485, -4873, 1360, -227, -186, 1209, -4416, 18088, 21814, -4905, 1371, -230,
-182, 1190, -4358, 17740, 22140, -4934, 1382, -234, -178, 1171, -4299, 17392, 22463, -4960, 1392, -237,
-173, 1152, -4237, 17042, 22783, -4984, 1400, -240, -169, 1132, -4174, 16691, 23101, -5004, 1408, -243,
-164, 1112, -4109, 16341, 23415, -5021, 1416, -246, -160, 1091, -4042, 15989, 23726, -5035, 1422, -249,
-155, 1071, -3974, 15638, 24034, -5046, 1427, -252, -151, 1049, -3904, 15286, 24338, -5053, 1431, -254,
-147, 1028, -3833, 14934, 24639, -5058, 1435, -256, -142, 1006, -3761, 14582, 24935, -5058, 1437, -258,
-138, 984, -3687, 14231, 25228, -5055, 1438, -260, -133, 962, -3613, 13880, 25517, -5049, 1438, -262,
-129, 939, -3537, 13529, 25802, -5039, 1438, -264, -125, 916, -3460, 13179, 26083, -5026, 1436, -265,
-120, 893, -3382, 12830, 26359, -5008, 1433, -266, -116, 870, -3303, 12482, 26631, -4987, 1428, -267,
-112, 847, -3224, 12135, 26898, -4962, 1423, -267, -107, 824, -3144, 11789, 27161, -4933, 1416, -268,
-103, 800, -3063, 11444, 27418, -4900, 1409, -268, -99, 777, -2981, 11100, 27671, -4863, 1400, -268,
-95, 753, -2899, 10759, 27919, -4822, 1389, -267, -91, 730, -2817, 10418, 28161, -4777, 1378, -267,
-87, 706, -2734, 10080, 28399, -4728, 1365, -266, -83, 683, -2651, 9743, 28631, -4674, 1351, -265,
-80, 659, -2567, 9408, 28857, -4617, 1336, -263, -76, 636, -2483, 9075, 29078, -4555, 1319, -261,
-72, 613, -2399, 8745, 29293, -4488, 1301, -259, -69, 589, -2315, 8416, 29503, -4417, 1281, -257,
-65, 566, -2231, 8091, 29707, -4342, 1261, -254, -62, 543, -2148, 7767, 29905, -4262, 1239, -251,
-58, 520, -2064, 7446, 30097, -4178, 1215, -247, -55, 498, -1980, 7128, 30282, -4089, 1190, -244,
-52, 475, -1896, 6813, 30462, -3996, 1164, -240, -49, 453, -1813, 6501, 30635, -3898, 1136, -235,
-46, 431, -1730, 6191, 30802, -3796, 1107, -230, -43, 409, -1648, 5885, 30963, -3688, 1077, -225,
-40, 387, -1566, 5582, 31117, -3577, 1045, -220, -37, 365, -1484, 5282, 31265, -3460, 1011, -214,
-35, 344, -1403, 4985, 31406, -3339, 977, -207, -32, 323, -1322, 4692, 31540, -3213, 940, -201,
-30, 303, -1242, 4403, 31668, -3082, 903, -194, -27, 282, -1163, 4116, 31789, -2947, 864, -186,
-25, 262, -1084, 3834, 31903, -2807, 823, -178, -23, 242, -1007, 3555, 32010, -2662, 781, -170,
-20, 223, -929, 3281, 32110, -2512, 738, -162, -18, 204, -853, 3010, 32204, -2357, 693, -152,
-16, 185, -778, 2743, 32290, -2198, 646, -143, -14, 166, -703, 2480, 32369, -2034, 599, -133,
-13, 148, -630, 2221, 32441, -1865, 550, -123, -11, 130, -557, 1966, 32506, -1692, 499, -112,
-9, 113, -486, 1715, 32564, -1514, 447, -101, -8, 96, -415, 1469, 32615, -1331, 394, -90,
-6, 79, -346, 1226, 32658, -1143, 340, -78, -5, 63, -277, 989, 32694, -950, 284, -66,
-3, 47, -210, 755, 32723, -753, 226, -53, -2, 32, -144, 526, 32745, -552, 168, -40,
-1, 17, -79, 301, 32760, -345, 108, -26, 0, 2, -16, 81, 32767, -134, 47, -12,
};
private static short[] _highCurveLut2 = new short[]
{
418, -2538, 6118, 24615, 6298, -2563, 417, 0, 420, -2512, 5939, 24611, 6479, -2588, 415, 1,
421, -2485, 5761, 24605, 6662, -2612, 412, 2, 422, -2458, 5585, 24595, 6846, -2635, 409, 3,
423, -2430, 5410, 24582, 7030, -2658, 406, 4, 423, -2402, 5236, 24565, 7216, -2680, 403, 5,
423, -2373, 5064, 24546, 7403, -2701, 399, 6, 423, -2343, 4893, 24523, 7591, -2721, 395, 7,
423, -2313, 4724, 24496, 7780, -2741, 391, 8, 422, -2283, 4556, 24467, 7970, -2759, 386, 9,
421, -2252, 4390, 24434, 8161, -2777, 381, 11, 420, -2221, 4225, 24398, 8353, -2794, 376, 12,
419, -2190, 4062, 24359, 8545, -2810, 370, 14, 418, -2158, 3900, 24316, 8739, -2825, 364, 15,
416, -2126, 3740, 24271, 8933, -2839, 358, 17, 414, -2093, 3582, 24222, 9127, -2851, 351, 19,
412, -2060, 3425, 24170, 9323, -2863, 344, 21, 410, -2027, 3270, 24115, 9519, -2874, 336, 22,
407, -1993, 3117, 24056, 9715, -2884, 328, 24, 404, -1960, 2966, 23995, 9912, -2893, 319, 26,
402, -1926, 2816, 23930, 10110, -2900, 311, 29, 398, -1892, 2668, 23863, 10308, -2907, 301, 31,
395, -1858, 2522, 23792, 10506, -2912, 292, 33, 392, -1823, 2378, 23718, 10705, -2916, 282, 35,
389, -1789, 2235, 23641, 10904, -2919, 271, 38, 385, -1754, 2095, 23561, 11103, -2920, 261, 40,
381, -1719, 1956, 23478, 11303, -2921, 249, 43, 377, -1684, 1819, 23393, 11502, -2920, 238, 45,
373, -1649, 1684, 23304, 11702, -2917, 225, 48, 369, -1615, 1551, 23212, 11902, -2914, 213, 51,
365, -1580, 1420, 23118, 12102, -2909, 200, 54, 361, -1545, 1291, 23020, 12302, -2902, 186, 57,
356, -1510, 1163, 22920, 12502, -2895, 173, 60, 352, -1475, 1038, 22817, 12702, -2885, 158, 63,
347, -1440, 915, 22711, 12901, -2875, 143, 66, 342, -1405, 793, 22602, 13101, -2863, 128, 69,
338, -1370, 674, 22491, 13300, -2849, 113, 73, 333, -1335, 557, 22377, 13499, -2834, 97, 76,
328, -1301, 441, 22260, 13698, -2817, 80, 80, 323, -1266, 328, 22141, 13896, -2799, 63, 83,
318, -1232, 217, 22019, 14094, -2779, 46, 87, 313, -1197, 107, 21894, 14291, -2758, 28, 91,
307, -1163, 0, 21767, 14488, -2735, 9, 95, 302, -1129, -105, 21637, 14684, -2710, -9, 98,
297, -1096, -208, 21506, 14879, -2684, -29, 102, 292, -1062, -310, 21371, 15074, -2656, -48, 106,
286, -1029, -409, 21234, 15268, -2626, -69, 111, 281, -996, -506, 21095, 15461, -2595, -89, 115,
276, -963, -601, 20954, 15654, -2562, -110, 119, 270, -930, -694, 20810, 15846, -2527, -132, 123,
265, -898, -785, 20664, 16036, -2490, -154, 128, 260, -866, -874, 20516, 16226, -2452, -176, 132,
254, -834, -961, 20366, 16415, -2411, -199, 137, 249, -803, -1046, 20213, 16602, -2369, -222, 141,
243, -771, -1129, 20059, 16789, -2326, -246, 146, 238, -740, -1209, 19902, 16974, -2280, -270, 151,
233, -710, -1288, 19744, 17158, -2232, -294, 156, 227, -680, -1365, 19583, 17341, -2183, -319, 160,
222, -650, -1440, 19421, 17523, -2132, -345, 165, 217, -620, -1513, 19257, 17703, -2079, -370, 170,
211, -591, -1583, 19091, 17882, -2023, -396, 175, 206, -562, -1652, 18923, 18059, -1966, -423, 180,
201, -533, -1719, 18754, 18235, -1907, -450, 185, 196, -505, -1784, 18582, 18410, -1847, -477, 191,
191, -477, -1847, 18410, 18582, -1784, -505, 196, 185, -450, -1907, 18235, 18754, -1719, -533, 201,
180, -423, -1966, 18059, 18923, -1652, -562, 206, 175, -396, -2023, 17882, 19091, -1583, -591, 211,
170, -370, -2079, 17703, 19257, -1513, -620, 217, 165, -345, -2132, 17523, 19421, -1440, -650, 222,
160, -319, -2183, 17341, 19583, -1365, -680, 227, 156, -294, -2232, 17158, 19744, -1288, -710, 233,
151, -270, -2280, 16974, 19902, -1209, -740, 238, 146, -246, -2326, 16789, 20059, -1129, -771, 243,
141, -222, -2369, 16602, 20213, -1046, -803, 249, 137, -199, -2411, 16415, 20366, -961, -834, 254,
132, -176, -2452, 16226, 20516, -874, -866, 260, 128, -154, -2490, 16036, 20664, -785, -898, 265,
123, -132, -2527, 15846, 20810, -694, -930, 270, 119, -110, -2562, 15654, 20954, -601, -963, 276,
115, -89, -2595, 15461, 21095, -506, -996, 281, 111, -69, -2626, 15268, 21234, -409, -1029, 286,
106, -48, -2656, 15074, 21371, -310, -1062, 292, 102, -29, -2684, 14879, 21506, -208, -1096, 297,
98, -9, -2710, 14684, 21637, -105, -1129, 302, 95, 9, -2735, 14488, 21767, 0, -1163, 307,
91, 28, -2758, 14291, 21894, 107, -1197, 313, 87, 46, -2779, 14094, 22019, 217, -1232, 318,
83, 63, -2799, 13896, 22141, 328, -1266, 323, 80, 80, -2817, 13698, 22260, 441, -1301, 328,
76, 97, -2834, 13499, 22377, 557, -1335, 333, 73, 113, -2849, 13300, 22491, 674, -1370, 338,
69, 128, -2863, 13101, 22602, 793, -1405, 342, 66, 143, -2875, 12901, 22711, 915, -1440, 347,
63, 158, -2885, 12702, 22817, 1038, -1475, 352, 60, 173, -2895, 12502, 22920, 1163, -1510, 356,
57, 186, -2902, 12302, 23020, 1291, -1545, 361, 54, 200, -2909, 12102, 23118, 1420, -1580, 365,
51, 213, -2914, 11902, 23212, 1551, -1615, 369, 48, 225, -2917, 11702, 23304, 1684, -1649, 373,
45, 238, -2920, 11502, 23393, 1819, -1684, 377, 43, 249, -2921, 11303, 23478, 1956, -1719, 381,
40, 261, -2920, 11103, 23561, 2095, -1754, 385, 38, 271, -2919, 10904, 23641, 2235, -1789, 389,
35, 282, -2916, 10705, 23718, 2378, -1823, 392, 33, 292, -2912, 10506, 23792, 2522, -1858, 395,
31, 301, -2907, 10308, 23863, 2668, -1892, 398, 29, 311, -2900, 10110, 23930, 2816, -1926, 402,
26, 319, -2893, 9912, 23995, 2966, -1960, 404, 24, 328, -2884, 9715, 24056, 3117, -1993, 407,
22, 336, -2874, 9519, 24115, 3270, -2027, 410, 21, 344, -2863, 9323, 24170, 3425, -2060, 412,
19, 351, -2851, 9127, 24222, 3582, -2093, 414, 17, 358, -2839, 8933, 24271, 3740, -2126, 416,
15, 364, -2825, 8739, 24316, 3900, -2158, 418, 14, 370, -2810, 8545, 24359, 4062, -2190, 419,
12, 376, -2794, 8353, 24398, 4225, -2221, 420, 11, 381, -2777, 8161, 24434, 4390, -2252, 421,
9, 386, -2759, 7970, 24467, 4556, -2283, 422, 8, 391, -2741, 7780, 24496, 4724, -2313, 423,
7, 395, -2721, 7591, 24523, 4893, -2343, 423, 6, 399, -2701, 7403, 24546, 5064, -2373, 423,
5, 403, -2680, 7216, 24565, 5236, -2402, 423, 4, 406, -2658, 7030, 24582, 5410, -2430, 423,
3, 409, -2635, 6846, 24595, 5585, -2458, 422, 2, 412, -2612, 6662, 24605, 5761, -2485, 421,
1, 415, -2588, 6479, 24611, 5939, -2512, 420, 0, 417, -2563, 6298, 24615, 6118, -2538, 418,
};
#endregion
private static float[] _normalCurveLut0F;
private static float[] _normalCurveLut1F;
private static float[] _normalCurveLut2F;
private static float[] _highCurveLut0F;
private static float[] _highCurveLut1F;
private static float[] _highCurveLut2F;
static ResamplerHelper()
{
_normalCurveLut0F = _normalCurveLut0.Select(x => x / 32768f).ToArray();
_normalCurveLut1F = _normalCurveLut1.Select(x => x / 32768f).ToArray();
_normalCurveLut2F = _normalCurveLut2.Select(x => x / 32768f).ToArray();
_highCurveLut0F = _highCurveLut0.Select(x => x / 32768f).ToArray();
_highCurveLut1F = _highCurveLut1.Select(x => x / 32768f).ToArray();
_highCurveLut2F = _highCurveLut2.Select(x => x / 32768f).ToArray();
}
private const int FixedPointPrecision = 15;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void Resample(Span<float> outputBuffer, ReadOnlySpan<short> inputBuffer, float ratio, ref float fraction, int sampleCount, SampleRateConversionQuality srcQuality, bool needPitch)
{
switch (srcQuality)
{
case SampleRateConversionQuality.Default:
ResampleDefaultQuality(outputBuffer, inputBuffer, ratio, ref fraction, sampleCount, needPitch);
break;
case SampleRateConversionQuality.Low:
ResampleLowQuality(outputBuffer, inputBuffer, ratio, ref fraction, sampleCount);
break;
case SampleRateConversionQuality.High:
ResampleHighQuality(outputBuffer, inputBuffer, ratio, ref fraction, sampleCount);
break;
default:
throw new NotImplementedException($"{srcQuality}");
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static ReadOnlySpan<float> GetDefaultParameter(float ratio)
{
if (ratio <= 1.0f)
{
return _normalCurveLut1F;
}
else if (ratio > 1.333313f)
{
return _normalCurveLut0F;
}
return _normalCurveLut2F;
}
private unsafe static void ResampleDefaultQuality(Span<float> outputBuffer, ReadOnlySpan<short> inputBuffer, float ratio, ref float fraction, int sampleCount, bool needPitch)
{
ReadOnlySpan<float> parameters = GetDefaultParameter(ratio);
int inputBufferIndex = 0, i = 0;
// TODO: REV8 fast path (when needPitch == false the input index progression is constant + we need SIMD)
if (Sse41.IsSupported)
{
if (ratio == 1f)
{
fixed (short* pInput = inputBuffer)
fixed (float* pOutput = outputBuffer, pParameters = parameters)
{
Vector128<float> parameter = Sse.LoadVector128(pParameters);
for (; i < (sampleCount & ~3); i += 4)
{
Vector128<int> intInput0 = Sse41.ConvertToVector128Int32(pInput + (uint)i);
Vector128<int> intInput1 = Sse41.ConvertToVector128Int32(pInput + (uint)i + 1);
Vector128<int> intInput2 = Sse41.ConvertToVector128Int32(pInput + (uint)i + 2);
Vector128<int> intInput3 = Sse41.ConvertToVector128Int32(pInput + (uint)i + 3);
Vector128<float> input0 = Sse2.ConvertToVector128Single(intInput0);
Vector128<float> input1 = Sse2.ConvertToVector128Single(intInput1);
Vector128<float> input2 = Sse2.ConvertToVector128Single(intInput2);
Vector128<float> input3 = Sse2.ConvertToVector128Single(intInput3);
Vector128<float> mix0 = Sse.Multiply(input0, parameter);
Vector128<float> mix1 = Sse.Multiply(input1, parameter);
Vector128<float> mix2 = Sse.Multiply(input2, parameter);
Vector128<float> mix3 = Sse.Multiply(input3, parameter);
Vector128<float> mix01 = Sse3.HorizontalAdd(mix0, mix1);
Vector128<float> mix23 = Sse3.HorizontalAdd(mix2, mix3);
Vector128<float> mix0123 = Sse3.HorizontalAdd(mix01, mix23);
Sse.Store(pOutput + (uint)i, Sse41.RoundToNearestInteger(mix0123));
}
}
inputBufferIndex = i;
}
else
{
fixed (short* pInput = inputBuffer)
fixed (float* pOutput = outputBuffer, pParameters = parameters)
{
for (; i < (sampleCount & ~3); i += 4)
{
uint baseIndex0 = (uint)(fraction * 128) * 4;
uint inputIndex0 = (uint)inputBufferIndex;
fraction += ratio;
uint baseIndex1 = ((uint)(fraction * 128) & 127) * 4;
uint inputIndex1 = (uint)inputBufferIndex + (uint)fraction;
fraction += ratio;
uint baseIndex2 = ((uint)(fraction * 128) & 127) * 4;
uint inputIndex2 = (uint)inputBufferIndex + (uint)fraction;
fraction += ratio;
uint baseIndex3 = ((uint)(fraction * 128) & 127) * 4;
uint inputIndex3 = (uint)inputBufferIndex + (uint)fraction;
fraction += ratio;
inputBufferIndex += (int)fraction;
// Only keep lower part (safe as fraction isn't supposed to be negative)
fraction -= (int)fraction;
Vector128<float> parameter0 = Sse.LoadVector128(pParameters + baseIndex0);
Vector128<float> parameter1 = Sse.LoadVector128(pParameters + baseIndex1);
Vector128<float> parameter2 = Sse.LoadVector128(pParameters + baseIndex2);
Vector128<float> parameter3 = Sse.LoadVector128(pParameters + baseIndex3);
Vector128<int> intInput0 = Sse41.ConvertToVector128Int32(pInput + inputIndex0);
Vector128<int> intInput1 = Sse41.ConvertToVector128Int32(pInput + inputIndex1);
Vector128<int> intInput2 = Sse41.ConvertToVector128Int32(pInput + inputIndex2);
Vector128<int> intInput3 = Sse41.ConvertToVector128Int32(pInput + inputIndex3);
Vector128<float> input0 = Sse2.ConvertToVector128Single(intInput0);
Vector128<float> input1 = Sse2.ConvertToVector128Single(intInput1);
Vector128<float> input2 = Sse2.ConvertToVector128Single(intInput2);
Vector128<float> input3 = Sse2.ConvertToVector128Single(intInput3);
Vector128<float> mix0 = Sse.Multiply(input0, parameter0);
Vector128<float> mix1 = Sse.Multiply(input1, parameter1);
Vector128<float> mix2 = Sse.Multiply(input2, parameter2);
Vector128<float> mix3 = Sse.Multiply(input3, parameter3);
Vector128<float> mix01 = Sse3.HorizontalAdd(mix0, mix1);
Vector128<float> mix23 = Sse3.HorizontalAdd(mix2, mix3);
Vector128<float> mix0123 = Sse3.HorizontalAdd(mix01, mix23);
Sse.Store(pOutput + (uint)i, Sse41.RoundToNearestInteger(mix0123));
}
}
}
}
for (; i < sampleCount; i++)
{
int baseIndex = (int)(fraction * 128) * 4;
ReadOnlySpan<float> parameter = parameters.Slice(baseIndex, 4);
ReadOnlySpan<short> currentInput = inputBuffer.Slice(inputBufferIndex, 4);
outputBuffer[i] = (float)Math.Round(currentInput[0] * parameter[0] +
currentInput[1] * parameter[1] +
currentInput[2] * parameter[2] +
currentInput[3] * parameter[3]);
fraction += ratio;
inputBufferIndex += (int)fraction;
// Only keep lower part (safe as fraction isn't supposed to be negative)
fraction -= (int)fraction;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static ReadOnlySpan<float> GetHighParameter(float ratio)
{
if (ratio <= 1.0f)
{
return _highCurveLut1F;
}
else if (ratio > 1.333313f)
{
return _highCurveLut0F;
}
return _highCurveLut2F;
}
private static unsafe void ResampleHighQuality(Span<float> outputBuffer, ReadOnlySpan<short> inputBuffer, float ratio, ref float fraction, int sampleCount)
{
ReadOnlySpan<float> parameters = GetHighParameter(ratio);
int inputBufferIndex = 0;
if (Avx2.IsSupported)
{
// Fast path; assumes 256-bit vectors for simplicity because the filter is 8 taps
fixed (short* pInput = inputBuffer)
fixed (float* pParameters = parameters)
{
for (int i = 0; i < sampleCount; i++)
{
int baseIndex = (int)(fraction * 128) * 8;
Vector256<int> intInput = Avx2.ConvertToVector256Int32(pInput + inputBufferIndex);
Vector256<float> floatInput = Avx.ConvertToVector256Single(intInput);
Vector256<float> parameter = Avx.LoadVector256(pParameters + baseIndex);
Vector256<float> dp = Avx.DotProduct(floatInput, parameter, control: 0xFF);
// avx2 does an 8-element dot product piecewise so we have to sum up 2 intermediate results
outputBuffer[i] = (float)Math.Round(dp[0] + dp[4]);
fraction += ratio;
inputBufferIndex += (int)MathF.Truncate(fraction);
fraction -= (int)fraction;
}
}
}
else
{
for (int i = 0; i < sampleCount; i++)
{
int baseIndex = (int)(fraction * 128) * 8;
ReadOnlySpan<float> parameter = parameters.Slice(baseIndex, 8);
ReadOnlySpan<short> currentInput = inputBuffer.Slice(inputBufferIndex, 8);
outputBuffer[i] = (float)Math.Round(currentInput[0] * parameter[0] +
currentInput[1] * parameter[1] +
currentInput[2] * parameter[2] +
currentInput[3] * parameter[3] +
currentInput[4] * parameter[4] +
currentInput[5] * parameter[5] +
currentInput[6] * parameter[6] +
currentInput[7] * parameter[7]);
fraction += ratio;
inputBufferIndex += (int)MathF.Truncate(fraction);
fraction -= (int)fraction;
}
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ResampleLowQuality(Span<float> outputBuffer, ReadOnlySpan<short> inputBuffer, float ratio, ref float fraction, int sampleCount)
{
int inputBufferIndex = 0;
for (int i = 0; i < sampleCount; i++)
{
int outputData = inputBuffer[inputBufferIndex];
if (fraction > 1.0f)
{
outputData = inputBuffer[inputBufferIndex + 1];
}
outputBuffer[i] = outputData;
fraction += ratio;
inputBufferIndex += (int)MathF.Truncate(fraction);
fraction -= (int)fraction;
}
}
}
}

View file

@ -0,0 +1,12 @@
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Dsp.State
{
[StructLayout(LayoutKind.Sequential, Pack = 1, Size = 6)]
public struct AdpcmLoopContext
{
public short PredScale;
public short History0;
public short History1;
}
}

View file

@ -0,0 +1,74 @@
using Ryujinx.Memory;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Dsp.State
{
[StructLayout(LayoutKind.Sequential, Pack = 1, Size = 0x80)]
public struct AuxiliaryBufferHeader
{
[StructLayout(LayoutKind.Sequential, Pack = 1, Size = 0x40)]
public struct AuxiliaryBufferInfo
{
private const uint ReadOffsetPosition = 0x0;
private const uint WriteOffsetPosition = 0x4;
private const uint LostSampleCountPosition = 0x8;
private const uint TotalSampleCountPosition = 0xC;
public uint ReadOffset;
public uint WriteOffset;
public uint LostSampleCount;
public uint TotalSampleCount;
private unsafe fixed uint _unknown[12];
public static uint GetReadOffset(IVirtualMemoryManager manager, ulong bufferAddress)
{
return manager.Read<uint>(bufferAddress + ReadOffsetPosition);
}
public static uint GetWriteOffset(IVirtualMemoryManager manager, ulong bufferAddress)
{
return manager.Read<uint>(bufferAddress + WriteOffsetPosition);
}
public static uint GetLostSampleCount(IVirtualMemoryManager manager, ulong bufferAddress)
{
return manager.Read<uint>(bufferAddress + LostSampleCountPosition);
}
public static uint GetTotalSampleCount(IVirtualMemoryManager manager, ulong bufferAddress)
{
return manager.Read<uint>(bufferAddress + TotalSampleCountPosition);
}
public static void SetReadOffset(IVirtualMemoryManager manager, ulong bufferAddress, uint value)
{
manager.Write(bufferAddress + ReadOffsetPosition, value);
}
public static void SetWriteOffset(IVirtualMemoryManager manager, ulong bufferAddress, uint value)
{
manager.Write(bufferAddress + WriteOffsetPosition, value);
}
public static void SetLostSampleCount(IVirtualMemoryManager manager, ulong bufferAddress, uint value)
{
manager.Write(bufferAddress + LostSampleCountPosition, value);
}
public static void SetTotalSampleCount(IVirtualMemoryManager manager, ulong bufferAddress, uint value)
{
manager.Write(bufferAddress + TotalSampleCountPosition, value);
}
public static void Reset(IVirtualMemoryManager manager, ulong bufferAddress)
{
// NOTE: Lost sample count is never reset, since REV10.
manager.Write(bufferAddress + ReadOffsetPosition, 0UL);
manager.Write(bufferAddress + TotalSampleCountPosition, 0);
}
}
public AuxiliaryBufferInfo CpuBufferInfo;
public AuxiliaryBufferInfo DspBufferInfo;
}
}

View file

@ -0,0 +1,13 @@
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Dsp.State
{
[StructLayout(LayoutKind.Sequential, Pack = 1, Size = 0x10)]
public struct BiquadFilterState
{
public float State0;
public float State1;
public float State2;
public float State3;
}
}

View file

@ -0,0 +1,51 @@
using Ryujinx.Audio.Renderer.Dsp.Effect;
using Ryujinx.Audio.Renderer.Parameter.Effect;
namespace Ryujinx.Audio.Renderer.Dsp.State
{
public class CompressorState
{
public ExponentialMovingAverage InputMovingAverage;
public float Unknown4;
public ExponentialMovingAverage CompressionGainAverage;
public float CompressorGainReduction;
public float Unknown10;
public float Unknown14;
public float PreviousCompressionEmaAlpha;
public float MakeupGain;
public float OutputGain;
public CompressorState(ref CompressorParameter parameter)
{
InputMovingAverage = new ExponentialMovingAverage(0.0f);
Unknown4 = 1.0f;
CompressionGainAverage = new ExponentialMovingAverage(1.0f);
UpdateParameter(ref parameter);
}
public void UpdateParameter(ref CompressorParameter parameter)
{
float threshold = parameter.Threshold;
float ratio = 1.0f / parameter.Ratio;
float attackCoefficient = parameter.AttackCoefficient;
float makeupGain;
if (parameter.MakeupGainEnabled)
{
makeupGain = (threshold * 0.5f * (ratio - 1.0f)) - 3.0f;
}
else
{
makeupGain = 0.0f;
}
PreviousCompressionEmaAlpha = attackCoefficient;
MakeupGain = makeupGain;
CompressorGainReduction = (1.0f - ratio) / Constants.ChannelCountMax;
Unknown10 = threshold - 1.5f;
Unknown14 = threshold + 1.5f;
OutputGain = FloatingPointHelper.DecibelToLinearExtended(parameter.OutputGain + makeupGain);
}
}
}

View file

@ -0,0 +1,67 @@
using Ryujinx.Audio.Renderer.Dsp.Effect;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.State
{
public class DelayState
{
public DelayLine[] DelayLines { get; }
public float[] LowPassZ { get; set; }
public float FeedbackGain { get; private set; }
public float DelayFeedbackBaseGain { get; private set; }
public float DelayFeedbackCrossGain { get; private set; }
public float LowPassFeedbackGain { get; private set; }
public float LowPassBaseGain { get; private set; }
private const int FixedPointPrecision = 14;
public DelayState(ref DelayParameter parameter, ulong workBuffer)
{
DelayLines = new DelayLine[parameter.ChannelCount];
LowPassZ = new float[parameter.ChannelCount];
uint sampleRate = (uint)FixedPointHelper.ToInt(parameter.SampleRate, FixedPointPrecision) / 1000;
for (int i = 0; i < DelayLines.Length; i++)
{
DelayLines[i] = new DelayLine(sampleRate, parameter.DelayTimeMax);
DelayLines[i].SetDelay(parameter.DelayTime);
}
UpdateParameter(ref parameter);
}
public void UpdateParameter(ref DelayParameter parameter)
{
FeedbackGain = FixedPointHelper.ToFloat(parameter.FeedbackGain, FixedPointPrecision) * 0.98f;
float channelSpread = FixedPointHelper.ToFloat(parameter.ChannelSpread, FixedPointPrecision);
DelayFeedbackBaseGain = (1.0f - channelSpread) * FeedbackGain;
if (parameter.ChannelCount == 4 || parameter.ChannelCount == 6)
{
DelayFeedbackCrossGain = channelSpread * 0.5f * FeedbackGain;
}
else
{
DelayFeedbackCrossGain = channelSpread * FeedbackGain;
}
LowPassFeedbackGain = 0.95f * FixedPointHelper.ToFloat(parameter.LowPassAmount, FixedPointPrecision);
LowPassBaseGain = 1.0f - LowPassFeedbackGain;
}
public void UpdateLowPassFilter(ref float tempRawRef, uint channelCount)
{
for (int i = 0; i < channelCount; i++)
{
float lowPassResult = LowPassFeedbackGain * LowPassZ[i] + Unsafe.Add(ref tempRawRef, i) * LowPassBaseGain;
LowPassZ[i] = lowPassResult;
DelayLines[i].Update(lowPassResult);
}
}
}
}

View file

@ -0,0 +1,31 @@
using Ryujinx.Audio.Renderer.Dsp.Effect;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.State
{
public class LimiterState
{
public ExponentialMovingAverage[] DetectorAverage;
public ExponentialMovingAverage[] CompressionGainAverage;
public float[] DelayedSampleBuffer;
public int[] DelayedSampleBufferPosition;
public LimiterState(ref LimiterParameter parameter, ulong workBuffer)
{
DetectorAverage = new ExponentialMovingAverage[parameter.ChannelCount];
CompressionGainAverage = new ExponentialMovingAverage[parameter.ChannelCount];
DelayedSampleBuffer = new float[parameter.ChannelCount * parameter.DelayBufferSampleCountMax];
DelayedSampleBufferPosition = new int[parameter.ChannelCount];
DetectorAverage.AsSpan().Fill(new ExponentialMovingAverage(0.0f));
CompressionGainAverage.AsSpan().Fill(new ExponentialMovingAverage(1.0f));
DelayedSampleBufferPosition.AsSpan().Fill(0);
DelayedSampleBuffer.AsSpan().Fill(0.0f);
UpdateParameter(ref parameter);
}
public void UpdateParameter(ref LimiterParameter parameter) { }
}
}

View file

@ -0,0 +1,119 @@
using Ryujinx.Audio.Renderer.Dsp.Effect;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.State
{
public class Reverb3dState
{
private readonly float[] FdnDelayMinTimes = new float[4] { 5.0f, 6.0f, 13.0f, 14.0f };
private readonly float[] FdnDelayMaxTimes = new float[4] { 45.704f, 82.782f, 149.94f, 271.58f };
private readonly float[] DecayDelayMaxTimes1 = new float[4] { 17.0f, 13.0f, 9.0f, 7.0f };
private readonly float[] DecayDelayMaxTimes2 = new float[4] { 19.0f, 11.0f, 10.0f, 6.0f };
private readonly float[] EarlyDelayTimes = new float[20] { 0.017136f, 0.059154f, 0.16173f, 0.39019f, 0.42526f, 0.45541f, 0.68974f, 0.74591f, 0.83384f, 0.8595f, 0.0f, 0.075024f, 0.16879f, 0.2999f, 0.33744f, 0.3719f, 0.59901f, 0.71674f, 0.81786f, 0.85166f };
public readonly float[] EarlyGain = new float[20] { 0.67096f, 0.61027f, 1.0f, 0.35680f, 0.68361f, 0.65978f, 0.51939f, 0.24712f, 0.45945f, 0.45021f, 0.64196f, 0.54879f, 0.92925f, 0.38270f, 0.72867f, 0.69794f, 0.5464f, 0.24563f, 0.45214f, 0.44042f };
public IDelayLine[] FdnDelayLines { get; }
public DecayDelay[] DecayDelays1 { get; }
public DecayDelay[] DecayDelays2 { get; }
public IDelayLine PreDelayLine { get; }
public IDelayLine FrontCenterDelayLine { get; }
public float DryGain { get; private set; }
public uint[] EarlyDelayTime { get; private set; }
public float PreviousPreDelayValue { get; set; }
public float PreviousPreDelayGain { get; private set; }
public float TargetPreDelayGain { get; private set; }
public float EarlyReflectionsGain { get; private set; }
public float LateReverbGain { get; private set; }
public uint ReflectionDelayTime { get; private set; }
public float EchoLateReverbDecay { get; private set; }
public float[] DecayDirectFdnGain { get; private set; }
public float[] DecayCurrentFdnGain { get; private set; }
public float[] DecayCurrentOutputGain { get; private set; }
public float[] PreviousFeedbackOutputDecayed { get; private set; }
public Reverb3dState(ref Reverb3dParameter parameter, ulong workBuffer)
{
FdnDelayLines = new IDelayLine[4];
DecayDelays1 = new DecayDelay[4];
DecayDelays2 = new DecayDelay[4];
DecayDirectFdnGain = new float[4];
DecayCurrentFdnGain = new float[4];
DecayCurrentOutputGain = new float[4];
PreviousFeedbackOutputDecayed = new float[4];
uint sampleRate = parameter.SampleRate / 1000;
for (int i = 0; i < 4; i++)
{
FdnDelayLines[i] = new DelayLine3d(sampleRate, FdnDelayMaxTimes[i]);
DecayDelays1[i] = new DecayDelay(new DelayLine3d(sampleRate, DecayDelayMaxTimes1[i]));
DecayDelays2[i] = new DecayDelay(new DelayLine3d(sampleRate, DecayDelayMaxTimes2[i]));
}
PreDelayLine = new DelayLine3d(sampleRate, 400);
FrontCenterDelayLine = new DelayLine3d(sampleRate, 5);
UpdateParameter(ref parameter);
}
public void UpdateParameter(ref Reverb3dParameter parameter)
{
uint sampleRate = parameter.SampleRate / 1000;
EarlyDelayTime = new uint[20];
DryGain = parameter.DryGain;
PreviousFeedbackOutputDecayed.AsSpan().Fill(0);
PreviousPreDelayValue = 0;
EarlyReflectionsGain = FloatingPointHelper.Pow10(Math.Min(parameter.RoomGain + parameter.ReflectionsGain, 5000.0f) / 2000.0f);
LateReverbGain = FloatingPointHelper.Pow10(Math.Min(parameter.RoomGain + parameter.ReverbGain, 5000.0f) / 2000.0f);
float highFrequencyRoomGain = FloatingPointHelper.Pow10(parameter.RoomHf / 2000.0f);
if (highFrequencyRoomGain < 1.0f)
{
float tempA = 1.0f - highFrequencyRoomGain;
float tempB = 2.0f - ((2.0f * highFrequencyRoomGain) * FloatingPointHelper.Cos(256.0f * parameter.HfReference / parameter.SampleRate));
float tempC = MathF.Sqrt(MathF.Pow(tempB, 2) - (4.0f * (1.0f - highFrequencyRoomGain) * (1.0f - highFrequencyRoomGain)));
PreviousPreDelayGain = (tempB - tempC) / (2.0f * tempA);
TargetPreDelayGain = 1.0f - PreviousPreDelayGain;
}
else
{
PreviousPreDelayGain = 0.0f;
TargetPreDelayGain = 1.0f;
}
ReflectionDelayTime = IDelayLine.GetSampleCount(sampleRate, 1000.0f * (parameter.ReflectionDelay + parameter.ReverbDelayTime));
EchoLateReverbDecay = 0.6f * parameter.Diffusion * 0.01f;
for (int i = 0; i < FdnDelayLines.Length; i++)
{
FdnDelayLines[i].SetDelay(FdnDelayMinTimes[i] + (parameter.Density / 100 * (FdnDelayMaxTimes[i] - FdnDelayMinTimes[i])));
uint tempSampleCount = FdnDelayLines[i].CurrentSampleCount + DecayDelays1[i].CurrentSampleCount + DecayDelays2[i].CurrentSampleCount;
float tempA = (-60.0f * tempSampleCount) / (parameter.DecayTime * parameter.SampleRate);
float tempB = tempA / parameter.HfDecayRatio;
float tempC = FloatingPointHelper.Cos(128.0f * 0.5f * parameter.HfReference / parameter.SampleRate) / FloatingPointHelper.Sin(128.0f * 0.5f * parameter.HfReference / parameter.SampleRate);
float tempD = FloatingPointHelper.Pow10((tempB - tempA) / 40.0f);
float tempE = FloatingPointHelper.Pow10((tempB + tempA) / 40.0f) * 0.7071f;
DecayDirectFdnGain[i] = tempE * ((tempD * tempC) + 1.0f) / (tempC + tempD);
DecayCurrentFdnGain[i] = tempE * (1.0f - (tempD * tempC)) / (tempC + tempD);
DecayCurrentOutputGain[i] = (tempC - tempD) / (tempC + tempD);
DecayDelays1[i].SetDecayRate(EchoLateReverbDecay);
DecayDelays2[i].SetDecayRate(EchoLateReverbDecay * -0.9f);
}
for (int i = 0; i < EarlyDelayTime.Length; i++)
{
uint sampleCount = Math.Min(IDelayLine.GetSampleCount(sampleRate, (parameter.ReflectionDelay * 1000.0f) + (EarlyDelayTimes[i] * 1000.0f * ((parameter.ReverbDelayTime * 0.9998f) + 0.02f))), PreDelayLine.SampleCountMax);
EarlyDelayTime[i] = sampleCount;
}
}
}
}

View file

@ -0,0 +1,204 @@
using Ryujinx.Audio.Renderer.Common;
using Ryujinx.Audio.Renderer.Dsp.Effect;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.State
{
public class ReverbState
{
private static readonly float[] FdnDelayTimes = new float[20]
{
// Room
53.953247f, 79.192566f, 116.238770f, 130.615295f,
// Hall
53.953247f, 79.192566f, 116.238770f, 170.615295f,
// Plate
5f, 10f, 5f, 10f,
// Cathedral
47.03f, 71f, 103f, 170f,
// Max delay (Hall is the one with the highest values so identical to Hall)
53.953247f, 79.192566f, 116.238770f, 170.615295f,
};
private static readonly float[] DecayDelayTimes = new float[20]
{
// Room
7f, 9f, 13f, 17f,
// Hall
7f, 9f, 13f, 17f,
// Plate (no decay)
1f, 1f, 1f, 1f,
// Cathedral
7f, 7f, 13f, 9f,
// Max delay (Hall is the one with the highest values so identical to Hall)
7f, 9f, 13f, 17f,
};
private static readonly float[] EarlyDelayTimes = new float[50]
{
// Room
0.0f, 3.5f, 2.8f, 3.9f, 2.7f, 13.4f, 7.9f, 8.4f, 9.9f, 12.0f,
// Chamber
0.0f, 11.8f, 5.5f, 11.2f, 10.4f, 38.1f, 22.2f, 29.6f, 21.2f, 24.8f,
// Hall
0.0f, 41.5f, 20.5f, 41.3f, 0.0f, 29.5f, 33.8f, 45.2f, 46.8f, 0.0f,
// Cathedral
33.1f, 43.3f, 22.8f, 37.9f, 14.9f, 35.3f, 17.9f, 34.2f, 0.0f, 43.3f,
// Disabled
0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f
};
private static readonly float[] EarlyGainBase = new float[50]
{
// Room
0.70f, 0.68f, 0.70f, 0.68f, 0.70f, 0.68f, 0.70f, 0.68f, 0.68f, 0.68f,
// Chamber
0.70f, 0.68f, 0.70f, 0.68f, 0.70f, 0.68f, 0.68f, 0.68f, 0.68f, 0.68f,
// Hall
0.50f, 0.70f, 0.70f, 0.68f, 0.50f, 0.68f, 0.68f, 0.70f, 0.68f, 0.00f,
// Cathedral
0.93f, 0.92f, 0.87f, 0.86f, 0.94f, 0.81f, 0.80f, 0.77f, 0.76f, 0.65f,
// Disabled
0.00f, 0.00f, 0.00f, 0.00f, 0.00f, 0.00f, 0.00f, 0.00f, 0.00f, 0.00f
};
private static readonly float[] PreDelayTimes = new float[5]
{
// Room
12.5f,
// Chamber
40.0f,
// Hall
50.0f,
// Cathedral
50.0f,
// Disabled
0.0f
};
public DelayLine[] FdnDelayLines { get; }
public DecayDelay[] DecayDelays { get; }
public DelayLine PreDelayLine { get; }
public DelayLine FrontCenterDelayLine { get; }
public uint[] EarlyDelayTime { get; }
public float[] EarlyGain { get; }
public uint PreDelayLineDelayTime { get; private set; }
public float[] HighFrequencyDecayDirectGain { get; }
public float[] HighFrequencyDecayPreviousGain { get; }
public float[] PreviousFeedbackOutput { get; }
public const int EarlyModeCount = 10;
private const int FixedPointPrecision = 14;
private ReadOnlySpan<float> GetFdnDelayTimesByLateMode(ReverbLateMode lateMode)
{
return FdnDelayTimes.AsSpan((int)lateMode * 4, 4);
}
private ReadOnlySpan<float> GetDecayDelayTimesByLateMode(ReverbLateMode lateMode)
{
return DecayDelayTimes.AsSpan((int)lateMode * 4, 4);
}
public ReverbState(ref ReverbParameter parameter, ulong workBuffer, bool isLongSizePreDelaySupported)
{
FdnDelayLines = new DelayLine[4];
DecayDelays = new DecayDelay[4];
EarlyDelayTime = new uint[EarlyModeCount];
EarlyGain = new float[EarlyModeCount];
HighFrequencyDecayDirectGain = new float[4];
HighFrequencyDecayPreviousGain = new float[4];
PreviousFeedbackOutput = new float[4];
ReadOnlySpan<float> fdnDelayTimes = GetFdnDelayTimesByLateMode(ReverbLateMode.Limit);
ReadOnlySpan<float> decayDelayTimes = GetDecayDelayTimesByLateMode(ReverbLateMode.Limit);
uint sampleRate = (uint)FixedPointHelper.ToFloat((uint)parameter.SampleRate, FixedPointPrecision);
for (int i = 0; i < 4; i++)
{
FdnDelayLines[i] = new DelayLine(sampleRate, fdnDelayTimes[i]);
DecayDelays[i] = new DecayDelay(new DelayLine(sampleRate, decayDelayTimes[i]));
}
float preDelayTimeMax = 150.0f;
if (isLongSizePreDelaySupported)
{
preDelayTimeMax = 350.0f;
}
PreDelayLine = new DelayLine(sampleRate, preDelayTimeMax);
FrontCenterDelayLine = new DelayLine(sampleRate, 5.0f);
UpdateParameter(ref parameter);
}
public void UpdateParameter(ref ReverbParameter parameter)
{
uint sampleRate = (uint)FixedPointHelper.ToFloat((uint)parameter.SampleRate, FixedPointPrecision);
float preDelayTimeInMilliseconds = FixedPointHelper.ToFloat(parameter.PreDelayTime, FixedPointPrecision);
float earlyGain = FixedPointHelper.ToFloat(parameter.EarlyGain, FixedPointPrecision);
float coloration = FixedPointHelper.ToFloat(parameter.Coloration, FixedPointPrecision);
float decayTime = FixedPointHelper.ToFloat(parameter.DecayTime, FixedPointPrecision);
for (int i = 0; i < 10; i++)
{
EarlyDelayTime[i] = Math.Min(IDelayLine.GetSampleCount(sampleRate, EarlyDelayTimes[i] + preDelayTimeInMilliseconds), PreDelayLine.SampleCountMax) + 1;
EarlyGain[i] = EarlyGainBase[i] * earlyGain;
}
if (parameter.ChannelCount == 2)
{
EarlyGain[4] = EarlyGain[4] * 0.5f;
EarlyGain[5] = EarlyGain[5] * 0.5f;
}
PreDelayLineDelayTime = Math.Min(IDelayLine.GetSampleCount(sampleRate, PreDelayTimes[(int)parameter.EarlyMode] + preDelayTimeInMilliseconds), PreDelayLine.SampleCountMax);
ReadOnlySpan<float> fdnDelayTimes = GetFdnDelayTimesByLateMode(parameter.LateMode);
ReadOnlySpan<float> decayDelayTimes = GetDecayDelayTimesByLateMode(parameter.LateMode);
float highFrequencyDecayRatio = FixedPointHelper.ToFloat(parameter.HighFrequencyDecayRatio, FixedPointPrecision);
float highFrequencyUnknownValue = FloatingPointHelper.Cos(1280.0f / sampleRate);
for (int i = 0; i < 4; i++)
{
FdnDelayLines[i].SetDelay(fdnDelayTimes[i]);
DecayDelays[i].SetDelay(decayDelayTimes[i]);
float tempA = -3 * (DecayDelays[i].CurrentSampleCount + FdnDelayLines[i].CurrentSampleCount);
float tempB = tempA / (decayTime * sampleRate);
float tempC;
float tempD;
if (highFrequencyDecayRatio < 0.995f)
{
float tempE = FloatingPointHelper.Pow10((((1.0f / highFrequencyDecayRatio) - 1.0f) * 2) / 100 * (tempB / 10));
float tempF = 1.0f - tempE;
float tempG = 2.0f - (tempE * 2 * highFrequencyUnknownValue);
float tempH = MathF.Sqrt((tempG * tempG) - (tempF * tempF * 4));
tempC = (tempG - tempH) / (tempF * 2);
tempD = 1.0f - tempC;
}
else
{
// no high frequency decay ratio
tempC = 0.0f;
tempD = 1.0f;
}
HighFrequencyDecayDirectGain[i] = FloatingPointHelper.Pow10(tempB / 1000) * tempD * 0.7071f;
HighFrequencyDecayPreviousGain[i] = tempC;
PreviousFeedbackOutput[i] = 0.0f;
DecayDelays[i].SetDecayRate(0.6f * (1.0f - coloration));
}
}
}
}

Some files were not shown because too many files have changed in this diff Show more