1
0
Fork 0
mirror of https://github.com/Ryujinx/Ryujinx.git synced 2024-12-30 11:46:02 +00:00
Ryujinx/Ryujinx.Audio/Backends/CompatLayer/CompatLayerHardwareDeviceSession.cs
Mary-nyan 1825bd87b4
misc: Reformat Ryujinx.Audio with dotnet-format (#3485)
This is the first commit of a series of reformat around the codebase as
discussed internally some weeks ago.

This project being one that isn't touched that much, it shouldn't cause
conflict with any opened PRs.
2022-07-25 15:46:33 -03:00

123 lines
No EOL
3.7 KiB
C#

using Ryujinx.Audio.Backends.Common;
using Ryujinx.Audio.Common;
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Backends.CompatLayer
{
class CompatLayerHardwareDeviceSession : HardwareDeviceSessionOutputBase
{
private HardwareDeviceSessionOutputBase _realSession;
private uint _userChannelCount;
public CompatLayerHardwareDeviceSession(HardwareDeviceSessionOutputBase realSession, uint userChannelCount) : base(realSession.MemoryManager, realSession.RequestedSampleFormat, realSession.RequestedSampleRate, userChannelCount)
{
_realSession = realSession;
_userChannelCount = userChannelCount;
}
public override void Dispose()
{
_realSession.Dispose();
}
public override ulong GetPlayedSampleCount()
{
return _realSession.GetPlayedSampleCount();
}
public override float GetVolume()
{
return _realSession.GetVolume();
}
public override void PrepareToClose()
{
_realSession.PrepareToClose();
}
public override void QueueBuffer(AudioBuffer buffer)
{
_realSession.QueueBuffer(buffer);
}
public override bool RegisterBuffer(AudioBuffer buffer, byte[] samples)
{
if (RequestedSampleFormat != SampleFormat.PcmInt16)
{
throw new NotImplementedException("Downmixing formats other than PCM16 is not supported.");
}
if (samples == null)
{
return false;
}
short[] downmixedBufferPCM16;
ReadOnlySpan<short> samplesPCM16 = MemoryMarshal.Cast<byte, short>(samples);
if (_userChannelCount == 6)
{
downmixedBufferPCM16 = Downmixing.DownMixSurroundToStereo(samplesPCM16);
if (_realSession.RequestedChannelCount == 1)
{
downmixedBufferPCM16 = Downmixing.DownMixStereoToMono(downmixedBufferPCM16);
}
}
else if (_userChannelCount == 2 && _realSession.RequestedChannelCount == 1)
{
downmixedBufferPCM16 = Downmixing.DownMixStereoToMono(samplesPCM16);
}
else
{
throw new NotImplementedException($"Downmixing from {_userChannelCount} to {_realSession.RequestedChannelCount} not implemented.");
}
byte[] downmixedBuffer = MemoryMarshal.Cast<short, byte>(downmixedBufferPCM16).ToArray();
AudioBuffer fakeBuffer = new AudioBuffer
{
BufferTag = buffer.BufferTag,
DataPointer = buffer.DataPointer,
DataSize = (ulong)downmixedBuffer.Length
};
bool result = _realSession.RegisterBuffer(fakeBuffer, downmixedBuffer);
if (result)
{
buffer.Data = fakeBuffer.Data;
buffer.DataSize = fakeBuffer.DataSize;
}
return result;
}
public override void SetVolume(float volume)
{
_realSession.SetVolume(volume);
}
public override void Start()
{
_realSession.Start();
}
public override void Stop()
{
_realSession.Stop();
}
public override void UnregisterBuffer(AudioBuffer buffer)
{
_realSession.UnregisterBuffer(buffer);
}
public override bool WasBufferFullyConsumed(AudioBuffer buffer)
{
return _realSession.WasBufferFullyConsumed(buffer);
}
}
}