From d82ccf8fb56c68af58c257ec91309b1d90d43bb8 Mon Sep 17 00:00:00 2001 From: Jason Dove <1695733+jasongdove@users.noreply.github.com> Date: Fri, 13 Jun 2025 20:19:35 -0500 Subject: [PATCH] use hardware-accelerated tonemapping with qsv (#2034) * add tonemap for qsv * update changelog --- CHANGELOG.md | 1 + .../Artworks/Queries/GetArtwork.cs | 12 +++---- .../Filter/Qsv/TonemapQsvFilter.cs | 12 +++++++ .../Pipeline/PipelineBuilderFactory.cs | 12 +++---- .../Pipeline/QsvPipelineBuilder.cs | 36 +++++++++++++++++-- 5 files changed, 59 insertions(+), 14 deletions(-) create mode 100644 ErsatzTV.FFmpeg/Filter/Qsv/TonemapQsvFilter.cs diff --git a/CHANGELOG.md b/CHANGELOG.md index 1c475b91..c2877c9f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Add basic HDR transcoding support - VAAPI may use hardware-accelerated tone mapping (when opencl accel is also available) - NVIDIA may use hardware-accelerated tone mapping (when vulkan accel and libplacebo filter are also available) + - QSV may use hardware-accelerated tone mapping (when hardware decoding is used) - In all other cases, HDR content will use a software pipeline and the clip algorithm ### Changed diff --git a/ErsatzTV.Application/Artworks/Queries/GetArtwork.cs b/ErsatzTV.Application/Artworks/Queries/GetArtwork.cs index b59abd67..187369e0 100644 --- a/ErsatzTV.Application/Artworks/Queries/GetArtwork.cs +++ b/ErsatzTV.Application/Artworks/Queries/GetArtwork.cs @@ -1,6 +1,6 @@ -using ErsatzTV.Core; -using ErsatzTV.Core.Domain; - -namespace ErsatzTV.Application.Artworks; - -public record GetArtwork(int Id) : IRequest>; +using ErsatzTV.Core; +using ErsatzTV.Core.Domain; + +namespace ErsatzTV.Application.Artworks; + +public record GetArtwork(int Id) : IRequest>; diff --git a/ErsatzTV.FFmpeg/Filter/Qsv/TonemapQsvFilter.cs b/ErsatzTV.FFmpeg/Filter/Qsv/TonemapQsvFilter.cs new file mode 100644 index 00000000..9fc05dca --- /dev/null +++ b/ErsatzTV.FFmpeg/Filter/Qsv/TonemapQsvFilter.cs @@ -0,0 +1,12 @@ +namespace ErsatzTV.FFmpeg.Filter.Qsv; + +public class TonemapQsvFilter : BaseFilter +{ + public override string Filter => "vpp_qsv=tonemap=1"; + + public override FrameState NextState(FrameState currentState) => + currentState with + { + FrameDataLocation = FrameDataLocation.Hardware + }; +} diff --git a/ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs b/ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs index bb6bf882..f910fa40 100644 --- a/ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs +++ b/ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs @@ -70,10 +70,10 @@ public class PipelineBuilderFactory : IPipelineBuilderFactory fontsFolder, _logger), - // force software pipeline when content is HDR (and not VAAPI or NVENC) - _ when isHdrContent => new SoftwarePipelineBuilder( + HardwareAccelerationMode.Qsv when capabilities is not NoHardwareCapabilities => new QsvPipelineBuilder( ffmpegCapabilities, - HardwareAccelerationMode.None, + capabilities, + hardwareAccelerationMode, videoInputFile, audioInputFile, watermarkInputFile, @@ -83,10 +83,10 @@ public class PipelineBuilderFactory : IPipelineBuilderFactory fontsFolder, _logger), - HardwareAccelerationMode.Qsv when capabilities is not NoHardwareCapabilities => new QsvPipelineBuilder( + // force software pipeline when content is HDR (and not VAAPI or NVENC or QSV) + _ when isHdrContent => new SoftwarePipelineBuilder( ffmpegCapabilities, - capabilities, - hardwareAccelerationMode, + HardwareAccelerationMode.None, videoInputFile, audioInputFile, watermarkInputFile, diff --git a/ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs b/ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs index 637d89d9..a01f0254 100644 --- a/ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs +++ b/ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs @@ -76,7 +76,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder pipelineSteps.Add(new QsvHardwareAccelerationOption(ffmpegState.VaapiDevice)); - bool isHevcOrH264 = videoStream.Codec is VideoFormat.Hevc or VideoFormat.H264; + bool isHevcOrH264 = videoStream.Codec is /*VideoFormat.Hevc or*/ VideoFormat.H264; bool is10Bit = videoStream.PixelFormat.Map(pf => pf.BitDepth).IfNone(8) == 10; // 10-bit hevc/h264 qsv decoders have issues, so use software @@ -169,6 +169,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder } // _logger.LogDebug("After decode: {PixelFormat}", currentState.PixelFormat); + currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState); currentState = SetDeinterlace(videoInputFile, context, ffmpegState, currentState); // _logger.LogDebug("After deinterlace: {PixelFormat}", currentState.PixelFormat); currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState); @@ -272,7 +273,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder IPixelFormat formatForDownload = pixelFormat; bool usesVppQsv = - videoInputFile.FilterSteps.Any(f => f is QsvFormatFilter or ScaleQsvFilter or DeinterlaceQsvFilter); + videoInputFile.FilterSteps.Any(f => f is QsvFormatFilter or ScaleQsvFilter or DeinterlaceQsvFilter or TonemapQsvFilter); // if we have no filters, check whether we need to convert pixel format // since qsv doesn't seem to like doing that at the encoder @@ -638,4 +639,35 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder return currentState; } + + private static FrameState SetTonemap( + VideoInputFile videoInputFile, + VideoStream videoStream, + FFmpegState ffmpegState, + FrameState desiredState, + FrameState currentState) + { + if (videoStream.ColorParams.IsHdr) + { + foreach (IPixelFormat pixelFormat in desiredState.PixelFormat) + { + if (ffmpegState.DecoderHardwareAccelerationMode == HardwareAccelerationMode.Qsv) + { + var filter = new TonemapQsvFilter(); + currentState = filter.NextState(currentState); + videoStream.ResetColorParams(ColorParams.Default); + videoInputFile.FilterSteps.Add(filter); + } + else + { + var filter = new TonemapFilter(currentState, pixelFormat); + currentState = filter.NextState(currentState); + videoStream.ResetColorParams(ColorParams.Default); + videoInputFile.FilterSteps.Add(filter); + } + } + } + + return currentState; + } }