Browse Source

optimize tonemapping performance (#2037)

* add env var to disable vulkan

* tonemap after scaling

* vulkan tonemapping still needs to happen before scaling
pull/2038/head
Jason Dove 2 months ago committed by GitHub
parent
commit
4a59dafe51
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 5
      CHANGELOG.md
  2. 2
      ErsatzTV.FFmpeg/Filter/TonemapFilter.cs
  3. 42
      ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs
  4. 3
      ErsatzTV.FFmpeg/Pipeline/PipelineBuilderBase.cs
  5. 3
      ErsatzTV.FFmpeg/Pipeline/PipelineContext.cs
  6. 2
      ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs
  7. 2
      ErsatzTV.FFmpeg/Pipeline/SoftwarePipelineBuilder.cs
  8. 4
      ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs

5
CHANGELOG.md

@ -26,8 +26,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- VAAPI may use hardware-accelerated tone mapping (when opencl accel is also available) - VAAPI may use hardware-accelerated tone mapping (when opencl accel is also available)
- NVIDIA may use hardware-accelerated tone mapping (when vulkan accel and libplacebo filter are also available) - NVIDIA may use hardware-accelerated tone mapping (when vulkan accel and libplacebo filter are also available)
- QSV may use hardware-accelerated tone mapping (when hardware decoding is used) - QSV may use hardware-accelerated tone mapping (when hardware decoding is used)
- In all other cases, HDR content will use a software pipeline and the clip algorithm - In all other cases, HDR content will use a software pipeline and the linear algorithm
- Use hardware-accelerated padding with VAAPI - Use hardware-accelerated padding with VAAPI
- Add environment variable `ETV_DISABLE_VULKAN`
- Any non-empty value will disable use of Vulkan acceleration and force software tonemapping
- This may be needed with misbehaving NVIDIA drivers on Windows
### Changed ### Changed
- Start to make UI minimally responsive (functional on smaller screens) - Start to make UI minimally responsive (functional on smaller screens)

2
ErsatzTV.FFmpeg/Filter/TonemapFilter.cs

@ -20,7 +20,7 @@ public class TonemapFilter : BaseFilter
string pixelFormat = _currentState.PixelFormat.Match(pf => pf.FFmpegName, () => string.Empty); string pixelFormat = _currentState.PixelFormat.Match(pf => pf.FFmpegName, () => string.Empty);
var tonemap = var tonemap =
$"zscale=transfer=linear,tonemap=clip,zscale=transfer=bt709,format={_desiredPixelFormat.FFmpegName}"; $"zscale=transfer=linear,tonemap=linear,zscale=transfer=bt709,format={_desiredPixelFormat.FFmpegName}";
if (_currentState.FrameDataLocation == FrameDataLocation.Hardware) if (_currentState.FrameDataLocation == FrameDataLocation.Hardware)
{ {

42
ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs

@ -80,7 +80,8 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder
bool isHdrTonemap = decodeCapability == FFmpegCapability.Hardware bool isHdrTonemap = decodeCapability == FFmpegCapability.Hardware
&& _ffmpegCapabilities.HasHardwareAcceleration(HardwareAccelerationMode.Vulkan) && _ffmpegCapabilities.HasHardwareAcceleration(HardwareAccelerationMode.Vulkan)
&& videoStream.ColorParams.IsHdr; && videoStream.ColorParams.IsHdr
&& string.IsNullOrWhiteSpace(System.Environment.GetEnvironmentVariable("ETV_DISABLE_VULKAN"));
if (decodeCapability == FFmpegCapability.Hardware || encodeCapability == FFmpegCapability.Hardware) if (decodeCapability == FFmpegCapability.Hardware || encodeCapability == FFmpegCapability.Hardware)
{ {
@ -181,9 +182,20 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder
// desiredState = desiredState with { PixelFormat = Some(pixelFormat) }; // desiredState = desiredState with { PixelFormat = Some(pixelFormat) };
// } // }
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState); // vulkan scale doesn't seem to handle HDR, so we need to tonemap before scaling
if (ffmpegState.IsHdrTonemap)
{
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
}
currentState = SetDeinterlace(videoInputFile, context, currentState); currentState = SetDeinterlace(videoInputFile, context, currentState);
currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState); currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState);
if (!ffmpegState.IsHdrTonemap)
{
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
}
currentState = SetPad(videoInputFile, videoStream, desiredState, currentState); currentState = SetPad(videoInputFile, videoStream, desiredState, currentState);
currentState = SetCrop(videoInputFile, desiredState, currentState); currentState = SetCrop(videoInputFile, desiredState, currentState);
SetStillImageLoop(videoInputFile, videoStream, ffmpegState, desiredState, pipelineSteps); SetStillImageLoop(videoInputFile, videoStream, ffmpegState, desiredState, pipelineSteps);
@ -673,19 +685,19 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder
scaleStep = new ScaleCudaFilter( scaleStep = new ScaleCudaFilter(
currentState with currentState with
{ {
PixelFormat = !context.Is10BitOutput && (context.HasWatermark || PixelFormat = context is { IsHdr: false, Is10BitOutput: false } && (context.HasWatermark ||
context.HasSubtitleOverlay || context.HasSubtitleOverlay ||
context.ShouldDeinterlace || context.ShouldDeinterlace ||
desiredState.ScaledSize != desiredState.PaddedSize || desiredState.ScaledSize != desiredState.PaddedSize ||
context.HasSubtitleText || context.HasSubtitleText ||
ffmpegState is ffmpegState is
{ {
DecoderHardwareAccelerationMode: DecoderHardwareAccelerationMode:
HardwareAccelerationMode.Nvenc, HardwareAccelerationMode.Nvenc,
EncoderHardwareAccelerationMode: EncoderHardwareAccelerationMode:
HardwareAccelerationMode.None HardwareAccelerationMode.None
}) })
? desiredState.PixelFormat.Map(pf => (IPixelFormat)new PixelFormatNv12(pf.Name)) ? desiredState.PixelFormat.Map(IPixelFormat (pf) => new PixelFormatNv12(pf.Name))
: Option<IPixelFormat>.None : Option<IPixelFormat>.None
}, },
desiredState.ScaledSize, desiredState.ScaledSize,

3
ErsatzTV.FFmpeg/Pipeline/PipelineBuilderBase.cs

@ -199,7 +199,8 @@ public abstract class PipelineBuilderBase : IPipelineBuilder
_subtitleInputFile.Map(s => s is { IsImageBased: false, Method: SubtitleMethod.Burn }).IfNone(false), _subtitleInputFile.Map(s => s is { IsImageBased: false, Method: SubtitleMethod.Burn }).IfNone(false),
desiredState.Deinterlaced, desiredState.Deinterlaced,
desiredState.PixelFormat.Map(pf => pf.BitDepth).IfNone(8) == 10, desiredState.PixelFormat.Map(pf => pf.BitDepth).IfNone(8) == 10,
false); false,
videoStream.ColorParams.IsHdr);
SetThreadCount(ffmpegState, desiredState, pipelineSteps); SetThreadCount(ffmpegState, desiredState, pipelineSteps);
SetSceneDetect(videoStream, ffmpegState, desiredState, pipelineSteps); SetSceneDetect(videoStream, ffmpegState, desiredState, pipelineSteps);

3
ErsatzTV.FFmpeg/Pipeline/PipelineContext.cs

@ -7,4 +7,5 @@ public record PipelineContext(
bool HasSubtitleText, bool HasSubtitleText,
bool ShouldDeinterlace, bool ShouldDeinterlace,
bool Is10BitOutput, bool Is10BitOutput,
bool IsIntelVaapiOrQsv); bool IsIntelVaapiOrQsv,
bool IsHdr);

2
ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs

@ -169,11 +169,11 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
} }
// _logger.LogDebug("After decode: {PixelFormat}", currentState.PixelFormat); // _logger.LogDebug("After decode: {PixelFormat}", currentState.PixelFormat);
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
currentState = SetDeinterlace(videoInputFile, context, ffmpegState, currentState); currentState = SetDeinterlace(videoInputFile, context, ffmpegState, currentState);
// _logger.LogDebug("After deinterlace: {PixelFormat}", currentState.PixelFormat); // _logger.LogDebug("After deinterlace: {PixelFormat}", currentState.PixelFormat);
currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState); currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState);
// _logger.LogDebug("After scale: {PixelFormat}", currentState.PixelFormat); // _logger.LogDebug("After scale: {PixelFormat}", currentState.PixelFormat);
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
currentState = SetPad(videoInputFile, videoStream, desiredState, currentState); currentState = SetPad(videoInputFile, videoStream, desiredState, currentState);
// _logger.LogDebug("After pad: {PixelFormat}", currentState.PixelFormat); // _logger.LogDebug("After pad: {PixelFormat}", currentState.PixelFormat);
currentState = SetCrop(videoInputFile, desiredState, currentState); currentState = SetCrop(videoInputFile, desiredState, currentState);

2
ErsatzTV.FFmpeg/Pipeline/SoftwarePipelineBuilder.cs

@ -104,8 +104,8 @@ public class SoftwarePipelineBuilder : PipelineBuilderBase
{ {
SetDeinterlace(videoInputFile, context, currentState); SetDeinterlace(videoInputFile, context, currentState);
currentState = SetTonemap(videoInputFile, videoStream, desiredState, currentState);
currentState = SetScale(videoInputFile, videoStream, desiredState, currentState); currentState = SetScale(videoInputFile, videoStream, desiredState, currentState);
currentState = SetTonemap(videoInputFile, videoStream, desiredState, currentState);
currentState = SetPad(videoInputFile, videoStream, desiredState, currentState); currentState = SetPad(videoInputFile, videoStream, desiredState, currentState);
currentState = SetCrop(videoInputFile, desiredState, currentState); currentState = SetCrop(videoInputFile, desiredState, currentState);
SetStillImageLoop(videoInputFile, videoStream, ffmpegState, desiredState, pipelineSteps); SetStillImageLoop(videoInputFile, videoStream, ffmpegState, desiredState, pipelineSteps);

4
ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs

@ -171,14 +171,14 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
// _logger.LogDebug("After decode: {PixelFormat}", currentState.PixelFormat); // _logger.LogDebug("After decode: {PixelFormat}", currentState.PixelFormat);
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
currentState = SetDeinterlace(videoInputFile, context, ffmpegState, currentState); currentState = SetDeinterlace(videoInputFile, context, ffmpegState, currentState);
// _logger.LogDebug("After deinterlace: {PixelFormat}", currentState.PixelFormat); // _logger.LogDebug("After deinterlace: {PixelFormat}", currentState.PixelFormat);
currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState); currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState);
// _logger.LogDebug("After scale: {PixelFormat}", currentState.PixelFormat); // _logger.LogDebug("After scale: {PixelFormat}", currentState.PixelFormat);
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
currentState = SetPad(videoInputFile, desiredState, currentState); currentState = SetPad(videoInputFile, desiredState, currentState);
// _logger.LogDebug("After pad: {PixelFormat}", currentState.PixelFormat); // _logger.LogDebug("After pad: {PixelFormat}", currentState.PixelFormat);

Loading…
Cancel
Save