Browse Source

use hardware-accelerated tonemapping with qsv (#2034)

* add tonemap for qsv

* update changelog
pull/2036/head
Jason Dove 1 week ago committed by GitHub
parent
commit
d82ccf8fb5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 1
      CHANGELOG.md
  2. 12
      ErsatzTV.Application/Artworks/Queries/GetArtwork.cs
  3. 12
      ErsatzTV.FFmpeg/Filter/Qsv/TonemapQsvFilter.cs
  4. 12
      ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs
  5. 36
      ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs

1
CHANGELOG.md

@ -25,6 +25,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). @@ -25,6 +25,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Add basic HDR transcoding support
- VAAPI may use hardware-accelerated tone mapping (when opencl accel is also available)
- NVIDIA may use hardware-accelerated tone mapping (when vulkan accel and libplacebo filter are also available)
- QSV may use hardware-accelerated tone mapping (when hardware decoding is used)
- In all other cases, HDR content will use a software pipeline and the clip algorithm
### Changed

12
ErsatzTV.Application/Artworks/Queries/GetArtwork.cs

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
using ErsatzTV.Core;
using ErsatzTV.Core.Domain;
namespace ErsatzTV.Application.Artworks;
public record GetArtwork(int Id) : IRequest<Either<BaseError, Artwork>>;
using ErsatzTV.Core;
using ErsatzTV.Core.Domain;
namespace ErsatzTV.Application.Artworks;
public record GetArtwork(int Id) : IRequest<Either<BaseError, Artwork>>;

12
ErsatzTV.FFmpeg/Filter/Qsv/TonemapQsvFilter.cs

@ -0,0 +1,12 @@ @@ -0,0 +1,12 @@
namespace ErsatzTV.FFmpeg.Filter.Qsv;
public class TonemapQsvFilter : BaseFilter
{
public override string Filter => "vpp_qsv=tonemap=1";
public override FrameState NextState(FrameState currentState) =>
currentState with
{
FrameDataLocation = FrameDataLocation.Hardware
};
}

12
ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs

@ -70,10 +70,10 @@ public class PipelineBuilderFactory : IPipelineBuilderFactory @@ -70,10 +70,10 @@ public class PipelineBuilderFactory : IPipelineBuilderFactory
fontsFolder,
_logger),
// force software pipeline when content is HDR (and not VAAPI or NVENC)
_ when isHdrContent => new SoftwarePipelineBuilder(
HardwareAccelerationMode.Qsv when capabilities is not NoHardwareCapabilities => new QsvPipelineBuilder(
ffmpegCapabilities,
HardwareAccelerationMode.None,
capabilities,
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
@ -83,10 +83,10 @@ public class PipelineBuilderFactory : IPipelineBuilderFactory @@ -83,10 +83,10 @@ public class PipelineBuilderFactory : IPipelineBuilderFactory
fontsFolder,
_logger),
HardwareAccelerationMode.Qsv when capabilities is not NoHardwareCapabilities => new QsvPipelineBuilder(
// force software pipeline when content is HDR (and not VAAPI or NVENC or QSV)
_ when isHdrContent => new SoftwarePipelineBuilder(
ffmpegCapabilities,
capabilities,
hardwareAccelerationMode,
HardwareAccelerationMode.None,
videoInputFile,
audioInputFile,
watermarkInputFile,

36
ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs

@ -76,7 +76,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -76,7 +76,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
pipelineSteps.Add(new QsvHardwareAccelerationOption(ffmpegState.VaapiDevice));
bool isHevcOrH264 = videoStream.Codec is VideoFormat.Hevc or VideoFormat.H264;
bool isHevcOrH264 = videoStream.Codec is /*VideoFormat.Hevc or*/ VideoFormat.H264;
bool is10Bit = videoStream.PixelFormat.Map(pf => pf.BitDepth).IfNone(8) == 10;
// 10-bit hevc/h264 qsv decoders have issues, so use software
@ -169,6 +169,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -169,6 +169,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
}
// _logger.LogDebug("After decode: {PixelFormat}", currentState.PixelFormat);
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
currentState = SetDeinterlace(videoInputFile, context, ffmpegState, currentState);
// _logger.LogDebug("After deinterlace: {PixelFormat}", currentState.PixelFormat);
currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState);
@ -272,7 +273,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -272,7 +273,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
IPixelFormat formatForDownload = pixelFormat;
bool usesVppQsv =
videoInputFile.FilterSteps.Any(f => f is QsvFormatFilter or ScaleQsvFilter or DeinterlaceQsvFilter);
videoInputFile.FilterSteps.Any(f => f is QsvFormatFilter or ScaleQsvFilter or DeinterlaceQsvFilter or TonemapQsvFilter);
// if we have no filters, check whether we need to convert pixel format
// since qsv doesn't seem to like doing that at the encoder
@ -638,4 +639,35 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -638,4 +639,35 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
return currentState;
}
private static FrameState SetTonemap(
VideoInputFile videoInputFile,
VideoStream videoStream,
FFmpegState ffmpegState,
FrameState desiredState,
FrameState currentState)
{
if (videoStream.ColorParams.IsHdr)
{
foreach (IPixelFormat pixelFormat in desiredState.PixelFormat)
{
if (ffmpegState.DecoderHardwareAccelerationMode == HardwareAccelerationMode.Qsv)
{
var filter = new TonemapQsvFilter();
currentState = filter.NextState(currentState);
videoStream.ResetColorParams(ColorParams.Default);
videoInputFile.FilterSteps.Add(filter);
}
else
{
var filter = new TonemapFilter(currentState, pixelFormat);
currentState = filter.NextState(currentState);
videoStream.ResetColorParams(ColorParams.Default);
videoInputFile.FilterSteps.Add(filter);
}
}
}
return currentState;
}
}

Loading…
Cancel
Save