Move solution and projects to src

This commit is contained in:
TSR Berry 2023-04-08 01:22:00 +02:00 committed by Mary
parent cd124bda58
commit cee7121058
3466 changed files with 55 additions and 55 deletions

View file

@ -0,0 +1,26 @@
using Ryujinx.Audio.Common;
using System;
namespace Ryujinx.Audio.Backends.Common
{
public static class BackendHelper
{
public static int GetSampleSize(SampleFormat format)
{
return format switch
{
SampleFormat.PcmInt8 => sizeof(byte),
SampleFormat.PcmInt16 => sizeof(ushort),
SampleFormat.PcmInt24 => 3,
SampleFormat.PcmInt32 => sizeof(int),
SampleFormat.PcmFloat => sizeof(float),
_ => throw new ArgumentException($"{format}"),
};
}
public static int GetSampleCount(SampleFormat format, int channelCount, int bufferSize)
{
return bufferSize / GetSampleSize(format) / channelCount;
}
}
}

View file

@ -0,0 +1,166 @@
using Ryujinx.Common;
using System;
namespace Ryujinx.Audio.Backends.Common
{
/// <summary>
/// A ring buffer that grow if data written to it is too big to fit.
/// </summary>
public class DynamicRingBuffer
{
private const int RingBufferAlignment = 2048;
private object _lock = new object();
private byte[] _buffer;
private int _size;
private int _headOffset;
private int _tailOffset;
public int Length => _size;
public DynamicRingBuffer(int initialCapacity = RingBufferAlignment)
{
_buffer = new byte[initialCapacity];
}
public void Clear()
{
_size = 0;
_headOffset = 0;
_tailOffset = 0;
}
public void Clear(int size)
{
lock (_lock)
{
if (size > _size)
{
size = _size;
}
if (size == 0)
{
return;
}
_headOffset = (_headOffset + size) % _buffer.Length;
_size -= size;
if (_size == 0)
{
_headOffset = 0;
_tailOffset = 0;
}
}
}
private void SetCapacityLocked(int capacity)
{
byte[] buffer = new byte[capacity];
if (_size > 0)
{
if (_headOffset < _tailOffset)
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, 0, _size);
}
else
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, 0, _buffer.Length - _headOffset);
Buffer.BlockCopy(_buffer, 0, buffer, _buffer.Length - _headOffset, _tailOffset);
}
}
_buffer = buffer;
_headOffset = 0;
_tailOffset = _size;
}
public void Write<T>(T[] buffer, int index, int count)
{
if (count == 0)
{
return;
}
lock (_lock)
{
if ((_size + count) > _buffer.Length)
{
SetCapacityLocked(BitUtils.AlignUp(_size + count, RingBufferAlignment));
}
if (_headOffset < _tailOffset)
{
int tailLength = _buffer.Length - _tailOffset;
if (tailLength >= count)
{
Buffer.BlockCopy(buffer, index, _buffer, _tailOffset, count);
}
else
{
Buffer.BlockCopy(buffer, index, _buffer, _tailOffset, tailLength);
Buffer.BlockCopy(buffer, index + tailLength, _buffer, 0, count - tailLength);
}
}
else
{
Buffer.BlockCopy(buffer, index, _buffer, _tailOffset, count);
}
_size += count;
_tailOffset = (_tailOffset + count) % _buffer.Length;
}
}
public int Read<T>(T[] buffer, int index, int count)
{
lock (_lock)
{
if (count > _size)
{
count = _size;
}
if (count == 0)
{
return 0;
}
if (_headOffset < _tailOffset)
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, index, count);
}
else
{
int tailLength = _buffer.Length - _headOffset;
if (tailLength >= count)
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, index, count);
}
else
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, index, tailLength);
Buffer.BlockCopy(_buffer, 0, buffer, index + tailLength, count - tailLength);
}
}
_size -= count;
_headOffset = (_headOffset + count) % _buffer.Length;
if (_size == 0)
{
_headOffset = 0;
_tailOffset = 0;
}
return count;
}
}
}
}

View file

@ -0,0 +1,79 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Backends.Common
{
public abstract class HardwareDeviceSessionOutputBase : IHardwareDeviceSession
{
public IVirtualMemoryManager MemoryManager { get; }
public SampleFormat RequestedSampleFormat { get; }
public uint RequestedSampleRate { get; }
public uint RequestedChannelCount { get; }
public HardwareDeviceSessionOutputBase(IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount)
{
MemoryManager = memoryManager;
RequestedSampleFormat = requestedSampleFormat;
RequestedSampleRate = requestedSampleRate;
RequestedChannelCount = requestedChannelCount;
}
private byte[] GetBufferSamples(AudioBuffer buffer)
{
if (buffer.DataPointer == 0)
{
return null;
}
byte[] data = new byte[buffer.DataSize];
MemoryManager.Read(buffer.DataPointer, data);
return data;
}
protected ulong GetSampleCount(AudioBuffer buffer)
{
return GetSampleCount((int)buffer.DataSize);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected ulong GetSampleCount(int dataSize)
{
return (ulong)BackendHelper.GetSampleCount(RequestedSampleFormat, (int)RequestedChannelCount, dataSize);
}
public abstract void Dispose();
public abstract void PrepareToClose();
public abstract void QueueBuffer(AudioBuffer buffer);
public abstract void SetVolume(float volume);
public abstract float GetVolume();
public abstract void Start();
public abstract void Stop();
public abstract ulong GetPlayedSampleCount();
public abstract bool WasBufferFullyConsumed(AudioBuffer buffer);
public virtual bool RegisterBuffer(AudioBuffer buffer)
{
return RegisterBuffer(buffer, GetBufferSamples(buffer));
}
public virtual bool RegisterBuffer(AudioBuffer buffer, byte[] samples)
{
if (samples == null)
{
return false;
}
if (buffer.Data == null)
{
buffer.Data = samples;
}
return true;
}
public virtual void UnregisterBuffer(AudioBuffer buffer) { }
}
}

View file

@ -0,0 +1,186 @@
using Ryujinx.Audio.Backends.Common;
using Ryujinx.Audio.Backends.Dummy;
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Threading;
using static Ryujinx.Audio.Integration.IHardwareDeviceDriver;
namespace Ryujinx.Audio.Backends.CompatLayer
{
public class CompatLayerHardwareDeviceDriver : IHardwareDeviceDriver
{
private IHardwareDeviceDriver _realDriver;
public static bool IsSupported => true;
public CompatLayerHardwareDeviceDriver(IHardwareDeviceDriver realDevice)
{
_realDriver = realDevice;
}
public void Dispose()
{
_realDriver.Dispose();
}
public ManualResetEvent GetUpdateRequiredEvent()
{
return _realDriver.GetUpdateRequiredEvent();
}
public ManualResetEvent GetPauseEvent()
{
return _realDriver.GetPauseEvent();
}
private uint SelectHardwareChannelCount(uint targetChannelCount)
{
if (_realDriver.SupportsChannelCount(targetChannelCount))
{
return targetChannelCount;
}
return targetChannelCount switch
{
6 => SelectHardwareChannelCount(2),
2 => SelectHardwareChannelCount(1),
1 => throw new ArgumentException("No valid channel configuration found!"),
_ => throw new ArgumentException($"Invalid targetChannelCount {targetChannelCount}")
};
}
private SampleFormat SelectHardwareSampleFormat(SampleFormat targetSampleFormat)
{
if (_realDriver.SupportsSampleFormat(targetSampleFormat))
{
return targetSampleFormat;
}
// Attempt conversion from PCM16.
if (targetSampleFormat == SampleFormat.PcmInt16)
{
// Prefer PCM32 if we need to convert.
if (_realDriver.SupportsSampleFormat(SampleFormat.PcmInt32))
{
return SampleFormat.PcmInt32;
}
// If not supported, PCM float provides the best quality with a cost lower than PCM24.
if (_realDriver.SupportsSampleFormat(SampleFormat.PcmFloat))
{
return SampleFormat.PcmFloat;
}
if (_realDriver.SupportsSampleFormat(SampleFormat.PcmInt24))
{
return SampleFormat.PcmInt24;
}
// If nothing is truly supported, attempt PCM8 at the cost of losing quality.
if (_realDriver.SupportsSampleFormat(SampleFormat.PcmInt8))
{
return SampleFormat.PcmInt8;
}
}
throw new ArgumentException("No valid sample format configuration found!");
}
public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount, float volume)
{
if (channelCount == 0)
{
channelCount = 2;
}
if (sampleRate == 0)
{
sampleRate = Constants.TargetSampleRate;
}
volume = Math.Clamp(volume, 0, 1);
if (!_realDriver.SupportsDirection(direction))
{
if (direction == Direction.Input)
{
Logger.Warning?.Print(LogClass.Audio, "The selected audio backend doesn't support audio input, fallback to dummy...");
return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
}
throw new NotImplementedException();
}
SampleFormat hardwareSampleFormat = SelectHardwareSampleFormat(sampleFormat);
uint hardwareChannelCount = SelectHardwareChannelCount(channelCount);
IHardwareDeviceSession realSession = _realDriver.OpenDeviceSession(direction, memoryManager, hardwareSampleFormat, sampleRate, hardwareChannelCount, volume);
if (hardwareChannelCount == channelCount && hardwareSampleFormat == sampleFormat)
{
return realSession;
}
if (hardwareSampleFormat != sampleFormat)
{
Logger.Warning?.Print(LogClass.Audio, $"{sampleFormat} isn't supported by the audio device, conversion to {hardwareSampleFormat} will happen.");
if (hardwareSampleFormat < sampleFormat)
{
Logger.Warning?.Print(LogClass.Audio, $"{hardwareSampleFormat} has lower quality than {sampleFormat}, expect some loss in audio fidelity.");
}
}
if (direction == Direction.Input)
{
Logger.Warning?.Print(LogClass.Audio, $"The selected audio backend doesn't support the requested audio input configuration, fallback to dummy...");
// TODO: We currently don't support audio input upsampling/downsampling, implement this.
realSession.Dispose();
return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
}
// It must be a HardwareDeviceSessionOutputBase.
if (realSession is not HardwareDeviceSessionOutputBase realSessionOutputBase)
{
throw new InvalidOperationException($"Real driver session class type isn't based on {typeof(HardwareDeviceSessionOutputBase).Name}.");
}
// If we need to do post processing before sending to the hardware device, wrap around it.
return new CompatLayerHardwareDeviceSession(realSessionOutputBase, sampleFormat, channelCount);
}
public bool SupportsChannelCount(uint channelCount)
{
return channelCount == 1 || channelCount == 2 || channelCount == 6;
}
public bool SupportsSampleFormat(SampleFormat sampleFormat)
{
// TODO: More formats.
return sampleFormat == SampleFormat.PcmInt16;
}
public bool SupportsSampleRate(uint sampleRate)
{
// TODO: More sample rates.
return sampleRate == Constants.TargetSampleRate;
}
public IHardwareDeviceDriver GetRealDeviceDriver()
{
return _realDriver;
}
public bool SupportsDirection(Direction direction)
{
return direction == Direction.Input || direction == Direction.Output;
}
}
}

View file

@ -0,0 +1,162 @@
using Ryujinx.Audio.Backends.Common;
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Dsp;
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Backends.CompatLayer
{
class CompatLayerHardwareDeviceSession : HardwareDeviceSessionOutputBase
{
private HardwareDeviceSessionOutputBase _realSession;
private SampleFormat _userSampleFormat;
private uint _userChannelCount;
public CompatLayerHardwareDeviceSession(HardwareDeviceSessionOutputBase realSession, SampleFormat userSampleFormat, uint userChannelCount) : base(realSession.MemoryManager, realSession.RequestedSampleFormat, realSession.RequestedSampleRate, userChannelCount)
{
_realSession = realSession;
_userSampleFormat = userSampleFormat;
_userChannelCount = userChannelCount;
}
public override void Dispose()
{
_realSession.Dispose();
}
public override ulong GetPlayedSampleCount()
{
return _realSession.GetPlayedSampleCount();
}
public override float GetVolume()
{
return _realSession.GetVolume();
}
public override void PrepareToClose()
{
_realSession.PrepareToClose();
}
public override void QueueBuffer(AudioBuffer buffer)
{
SampleFormat realSampleFormat = _realSession.RequestedSampleFormat;
if (_userSampleFormat != realSampleFormat)
{
if (_userSampleFormat != SampleFormat.PcmInt16)
{
throw new NotImplementedException("Converting formats other than PCM16 is not supported.");
}
int userSampleCount = buffer.Data.Length / BackendHelper.GetSampleSize(_userSampleFormat);
ReadOnlySpan<short> samples = MemoryMarshal.Cast<byte, short>(buffer.Data);
byte[] convertedSamples = new byte[BackendHelper.GetSampleSize(realSampleFormat) * userSampleCount];
switch (realSampleFormat)
{
case SampleFormat.PcmInt8:
PcmHelper.ConvertSampleToPcm8(MemoryMarshal.Cast<byte, sbyte>(convertedSamples), samples);
break;
case SampleFormat.PcmInt24:
PcmHelper.ConvertSampleToPcm24(convertedSamples, samples);
break;
case SampleFormat.PcmInt32:
PcmHelper.ConvertSampleToPcm32(MemoryMarshal.Cast<byte, int>(convertedSamples), samples);
break;
case SampleFormat.PcmFloat:
PcmHelper.ConvertSampleToPcmFloat(MemoryMarshal.Cast<byte, float>(convertedSamples), samples);
break;
default:
throw new NotImplementedException($"Sample format conversion from {_userSampleFormat} to {realSampleFormat} not implemented.");
}
buffer.Data = convertedSamples;
}
_realSession.QueueBuffer(buffer);
}
public override bool RegisterBuffer(AudioBuffer buffer, byte[] samples)
{
if (samples == null)
{
return false;
}
if (_userChannelCount != _realSession.RequestedChannelCount)
{
if (_userSampleFormat != SampleFormat.PcmInt16)
{
throw new NotImplementedException("Downmixing formats other than PCM16 is not supported.");
}
ReadOnlySpan<short> samplesPCM16 = MemoryMarshal.Cast<byte, short>(samples);
if (_userChannelCount == 6)
{
samplesPCM16 = Downmixing.DownMixSurroundToStereo(samplesPCM16);
if (_realSession.RequestedChannelCount == 1)
{
samplesPCM16 = Downmixing.DownMixStereoToMono(samplesPCM16);
}
}
else if (_userChannelCount == 2 && _realSession.RequestedChannelCount == 1)
{
samplesPCM16 = Downmixing.DownMixStereoToMono(samplesPCM16);
}
else
{
throw new NotImplementedException($"Downmixing from {_userChannelCount} to {_realSession.RequestedChannelCount} not implemented.");
}
samples = MemoryMarshal.Cast<short, byte>(samplesPCM16).ToArray();
}
AudioBuffer fakeBuffer = new AudioBuffer
{
BufferTag = buffer.BufferTag,
DataPointer = buffer.DataPointer,
DataSize = (ulong)samples.Length
};
bool result = _realSession.RegisterBuffer(fakeBuffer, samples);
if (result)
{
buffer.Data = fakeBuffer.Data;
buffer.DataSize = fakeBuffer.DataSize;
}
return result;
}
public override void SetVolume(float volume)
{
_realSession.SetVolume(volume);
}
public override void Start()
{
_realSession.Start();
}
public override void Stop()
{
_realSession.Stop();
}
public override void UnregisterBuffer(AudioBuffer buffer)
{
_realSession.UnregisterBuffer(buffer);
}
public override bool WasBufferFullyConsumed(AudioBuffer buffer)
{
return _realSession.WasBufferFullyConsumed(buffer);
}
}
}

View file

@ -0,0 +1,125 @@
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Backends.CompatLayer
{
public static class Downmixing
{
[StructLayout(LayoutKind.Sequential, Pack = 1)]
private struct Channel51FormatPCM16
{
public short FrontLeft;
public short FrontRight;
public short FrontCenter;
public short LowFrequency;
public short BackLeft;
public short BackRight;
}
[StructLayout(LayoutKind.Sequential, Pack = 1)]
private struct ChannelStereoFormatPCM16
{
public short Left;
public short Right;
}
private const int Q15Bits = 16;
private const int RawQ15One = 1 << Q15Bits;
private const int RawQ15HalfOne = (int)(0.5f * RawQ15One);
private const int Minus3dBInQ15 = (int)(0.707f * RawQ15One);
private const int Minus6dBInQ15 = (int)(0.501f * RawQ15One);
private const int Minus12dBInQ15 = (int)(0.251f * RawQ15One);
private static readonly int[] DefaultSurroundToStereoCoefficients = new int[4]
{
RawQ15One,
Minus3dBInQ15,
Minus12dBInQ15,
Minus3dBInQ15
};
private static readonly int[] DefaultStereoToMonoCoefficients = new int[2]
{
Minus6dBInQ15,
Minus6dBInQ15
};
private const int SurroundChannelCount = 6;
private const int StereoChannelCount = 2;
private const int MonoChannelCount = 1;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static ReadOnlySpan<Channel51FormatPCM16> GetSurroundBuffer(ReadOnlySpan<short> data)
{
return MemoryMarshal.Cast<short, Channel51FormatPCM16>(data);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static ReadOnlySpan<ChannelStereoFormatPCM16> GetStereoBuffer(ReadOnlySpan<short> data)
{
return MemoryMarshal.Cast<short, ChannelStereoFormatPCM16>(data);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short DownMixStereoToMono(ReadOnlySpan<int> coefficients, short left, short right)
{
return (short)((left * coefficients[0] + right * coefficients[1]) >> Q15Bits);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short DownMixSurroundToStereo(ReadOnlySpan<int> coefficients, short back, short lfe, short center, short front)
{
return (short)((coefficients[3] * back + coefficients[2] * lfe + coefficients[1] * center + coefficients[0] * front + RawQ15HalfOne) >> Q15Bits);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short[] DownMixSurroundToStereo(ReadOnlySpan<int> coefficients, ReadOnlySpan<short> data)
{
int samplePerChannelCount = data.Length / SurroundChannelCount;
short[] downmixedBuffer = new short[samplePerChannelCount * StereoChannelCount];
ReadOnlySpan<Channel51FormatPCM16> channels = GetSurroundBuffer(data);
for (int i = 0; i < samplePerChannelCount; i++)
{
Channel51FormatPCM16 channel = channels[i];
downmixedBuffer[i * 2] = DownMixSurroundToStereo(coefficients, channel.BackLeft, channel.LowFrequency, channel.FrontCenter, channel.FrontLeft);
downmixedBuffer[i * 2 + 1] = DownMixSurroundToStereo(coefficients, channel.BackRight, channel.LowFrequency, channel.FrontCenter, channel.FrontRight);
}
return downmixedBuffer;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short[] DownMixStereoToMono(ReadOnlySpan<int> coefficients, ReadOnlySpan<short> data)
{
int samplePerChannelCount = data.Length / StereoChannelCount;
short[] downmixedBuffer = new short[samplePerChannelCount * MonoChannelCount];
ReadOnlySpan<ChannelStereoFormatPCM16> channels = GetStereoBuffer(data);
for (int i = 0; i < samplePerChannelCount; i++)
{
ChannelStereoFormatPCM16 channel = channels[i];
downmixedBuffer[i] = DownMixStereoToMono(coefficients, channel.Left, channel.Right);
}
return downmixedBuffer;
}
public static short[] DownMixStereoToMono(ReadOnlySpan<short> data)
{
return DownMixStereoToMono(DefaultStereoToMonoCoefficients, data);
}
public static short[] DownMixSurroundToStereo(ReadOnlySpan<short> data)
{
return DownMixSurroundToStereo(DefaultSurroundToStereoCoefficients, data);
}
}
}

View file

@ -0,0 +1,89 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System.Threading;
using static Ryujinx.Audio.Integration.IHardwareDeviceDriver;
namespace Ryujinx.Audio.Backends.Dummy
{
public class DummyHardwareDeviceDriver : IHardwareDeviceDriver
{
private ManualResetEvent _updateRequiredEvent;
private ManualResetEvent _pauseEvent;
public static bool IsSupported => true;
public DummyHardwareDeviceDriver()
{
_updateRequiredEvent = new ManualResetEvent(false);
_pauseEvent = new ManualResetEvent(true);
}
public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount, float volume)
{
if (sampleRate == 0)
{
sampleRate = Constants.TargetSampleRate;
}
if (channelCount == 0)
{
channelCount = 2;
}
if (direction == Direction.Output)
{
return new DummyHardwareDeviceSessionOutput(this, memoryManager, sampleFormat, sampleRate, channelCount, volume);
}
else
{
return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
}
}
public ManualResetEvent GetUpdateRequiredEvent()
{
return _updateRequiredEvent;
}
public ManualResetEvent GetPauseEvent()
{
return _pauseEvent;
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// NOTE: The _updateRequiredEvent will be disposed somewhere else.
_pauseEvent.Dispose();
}
}
public bool SupportsSampleRate(uint sampleRate)
{
return true;
}
public bool SupportsSampleFormat(SampleFormat sampleFormat)
{
return true;
}
public bool SupportsDirection(Direction direction)
{
return direction == Direction.Output || direction == Direction.Input;
}
public bool SupportsChannelCount(uint channelCount)
{
return channelCount == 1 || channelCount == 2 || channelCount == 6;
}
}
}

View file

@ -0,0 +1,67 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System;
namespace Ryujinx.Audio.Backends.Dummy
{
class DummyHardwareDeviceSessionInput : IHardwareDeviceSession
{
private float _volume;
private IHardwareDeviceDriver _manager;
private IVirtualMemoryManager _memoryManager;
public DummyHardwareDeviceSessionInput(IHardwareDeviceDriver manager, IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount)
{
_volume = 1.0f;
_manager = manager;
_memoryManager = memoryManager;
}
public void Dispose()
{
// Nothing to do.
}
public ulong GetPlayedSampleCount()
{
// Not implemented for input.
throw new NotSupportedException();
}
public float GetVolume()
{
return _volume;
}
public void PrepareToClose() { }
public void QueueBuffer(AudioBuffer buffer)
{
_memoryManager.Fill(buffer.DataPointer, buffer.DataSize, 0);
_manager.GetUpdateRequiredEvent().Set();
}
public bool RegisterBuffer(AudioBuffer buffer)
{
return buffer.DataPointer != 0;
}
public void SetVolume(float volume)
{
_volume = volume;
}
public void Start() { }
public void Stop() { }
public void UnregisterBuffer(AudioBuffer buffer) { }
public bool WasBufferFullyConsumed(AudioBuffer buffer)
{
return true;
}
}
}

View file

@ -0,0 +1,62 @@
using Ryujinx.Audio.Backends.Common;
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System.Threading;
namespace Ryujinx.Audio.Backends.Dummy
{
internal class DummyHardwareDeviceSessionOutput : HardwareDeviceSessionOutputBase
{
private float _volume;
private IHardwareDeviceDriver _manager;
private ulong _playedSampleCount;
public DummyHardwareDeviceSessionOutput(IHardwareDeviceDriver manager, IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount, float requestedVolume) : base(memoryManager, requestedSampleFormat, requestedSampleRate, requestedChannelCount)
{
_volume = requestedVolume;
_manager = manager;
}
public override void Dispose()
{
// Nothing to do.
}
public override ulong GetPlayedSampleCount()
{
return Interlocked.Read(ref _playedSampleCount);
}
public override float GetVolume()
{
return _volume;
}
public override void PrepareToClose() { }
public override void QueueBuffer(AudioBuffer buffer)
{
Interlocked.Add(ref _playedSampleCount, GetSampleCount(buffer));
_manager.GetUpdateRequiredEvent().Set();
}
public override void SetVolume(float volume)
{
_volume = volume;
}
public override void Start() { }
public override void Stop() { }
public override void UnregisterBuffer(AudioBuffer buffer) { }
public override bool WasBufferFullyConsumed(AudioBuffer buffer)
{
return true;
}
}
}