1
0
Fork 0
mirror of https://github.com/Ryujinx/Ryujinx.git synced 2024-11-14 08:16:42 +00:00
Ryujinx/Ryujinx.Graphics.Nvdec/H264Decoder.cs
gdkchan 1402d8391d
Support NVDEC H264 interlaced video decoding and VIC deinterlacing (#3225)
* Support NVDEC H264 interlaced video decoding and VIC deinterlacing

* Remove unused code
2022-03-23 17:09:32 -03:00

57 lines
2.1 KiB
C#

using Ryujinx.Graphics.Nvdec.FFmpeg.H264;
using Ryujinx.Graphics.Nvdec.Image;
using Ryujinx.Graphics.Nvdec.Types.H264;
using Ryujinx.Graphics.Video;
using System;
namespace Ryujinx.Graphics.Nvdec
{
static class H264Decoder
{
private const int MbSizeInPixels = 16;
public static void Decode(NvdecDecoderContext context, ResourceManager rm, ref NvdecRegisters state)
{
PictureInfo pictureInfo = rm.Gmm.DeviceRead<PictureInfo>(state.SetPictureInfoOffset);
H264PictureInfo info = pictureInfo.Convert();
ReadOnlySpan<byte> bitstream = rm.Gmm.DeviceGetSpan(state.SetBitstreamOffset, (int)pictureInfo.BitstreamSize);
int width = (int)pictureInfo.PicWidthInMbs * MbSizeInPixels;
int height = (int)pictureInfo.PicHeightInMbs * MbSizeInPixels;
int surfaceIndex = (int)pictureInfo.OutputSurfaceIndex;
uint lumaOffset = state.SetSurfaceLumaOffset[surfaceIndex];
uint chromaOffset = state.SetSurfaceChromaOffset[surfaceIndex];
Decoder decoder = context.GetH264Decoder();
ISurface outputSurface = rm.Cache.Get(decoder, 0, 0, width, height);
if (decoder.Decode(ref info, outputSurface, bitstream))
{
if (outputSurface.Field == FrameField.Progressive)
{
SurfaceWriter.Write(
rm.Gmm,
outputSurface,
lumaOffset + pictureInfo.LumaFrameOffset,
chromaOffset + pictureInfo.ChromaFrameOffset);
}
else
{
SurfaceWriter.WriteInterlaced(
rm.Gmm,
outputSurface,
lumaOffset + pictureInfo.LumaTopFieldOffset,
chromaOffset + pictureInfo.ChromaTopFieldOffset,
lumaOffset + pictureInfo.LumaBottomFieldOffset,
chromaOffset + pictureInfo.ChromaBottomFieldOffset);
}
}
rm.Cache.Put(outputSurface);
}
}
}