Browse Source

optimize tonemapping performance (#2037)

* add env var to disable vulkan

* tonemap after scaling

* vulkan tonemapping still needs to happen before scaling
pull/2038/head
Jason Dove 1 week ago committed by GitHub
parent
commit
4a59dafe51
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 5
      CHANGELOG.md
  2. 2
      ErsatzTV.FFmpeg/Filter/TonemapFilter.cs
  3. 42
      ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs
  4. 3
      ErsatzTV.FFmpeg/Pipeline/PipelineBuilderBase.cs
  5. 3
      ErsatzTV.FFmpeg/Pipeline/PipelineContext.cs
  6. 2
      ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs
  7. 2
      ErsatzTV.FFmpeg/Pipeline/SoftwarePipelineBuilder.cs
  8. 4
      ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs

5
CHANGELOG.md

@ -26,8 +26,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). @@ -26,8 +26,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- VAAPI may use hardware-accelerated tone mapping (when opencl accel is also available)
- NVIDIA may use hardware-accelerated tone mapping (when vulkan accel and libplacebo filter are also available)
- QSV may use hardware-accelerated tone mapping (when hardware decoding is used)
- In all other cases, HDR content will use a software pipeline and the clip algorithm
- In all other cases, HDR content will use a software pipeline and the linear algorithm
- Use hardware-accelerated padding with VAAPI
- Add environment variable `ETV_DISABLE_VULKAN`
- Any non-empty value will disable use of Vulkan acceleration and force software tonemapping
- This may be needed with misbehaving NVIDIA drivers on Windows
### Changed
- Start to make UI minimally responsive (functional on smaller screens)

2
ErsatzTV.FFmpeg/Filter/TonemapFilter.cs

@ -20,7 +20,7 @@ public class TonemapFilter : BaseFilter @@ -20,7 +20,7 @@ public class TonemapFilter : BaseFilter
string pixelFormat = _currentState.PixelFormat.Match(pf => pf.FFmpegName, () => string.Empty);
var tonemap =
$"zscale=transfer=linear,tonemap=clip,zscale=transfer=bt709,format={_desiredPixelFormat.FFmpegName}";
$"zscale=transfer=linear,tonemap=linear,zscale=transfer=bt709,format={_desiredPixelFormat.FFmpegName}";
if (_currentState.FrameDataLocation == FrameDataLocation.Hardware)
{

42
ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs

@ -80,7 +80,8 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder @@ -80,7 +80,8 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder
bool isHdrTonemap = decodeCapability == FFmpegCapability.Hardware
&& _ffmpegCapabilities.HasHardwareAcceleration(HardwareAccelerationMode.Vulkan)
&& videoStream.ColorParams.IsHdr;
&& videoStream.ColorParams.IsHdr
&& string.IsNullOrWhiteSpace(System.Environment.GetEnvironmentVariable("ETV_DISABLE_VULKAN"));
if (decodeCapability == FFmpegCapability.Hardware || encodeCapability == FFmpegCapability.Hardware)
{
@ -181,9 +182,20 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder @@ -181,9 +182,20 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder
// desiredState = desiredState with { PixelFormat = Some(pixelFormat) };
// }
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
// vulkan scale doesn't seem to handle HDR, so we need to tonemap before scaling
if (ffmpegState.IsHdrTonemap)
{
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
}
currentState = SetDeinterlace(videoInputFile, context, currentState);
currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState);
if (!ffmpegState.IsHdrTonemap)
{
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
}
currentState = SetPad(videoInputFile, videoStream, desiredState, currentState);
currentState = SetCrop(videoInputFile, desiredState, currentState);
SetStillImageLoop(videoInputFile, videoStream, ffmpegState, desiredState, pipelineSteps);
@ -673,19 +685,19 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder @@ -673,19 +685,19 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder
scaleStep = new ScaleCudaFilter(
currentState with
{
PixelFormat = !context.Is10BitOutput && (context.HasWatermark ||
context.HasSubtitleOverlay ||
context.ShouldDeinterlace ||
desiredState.ScaledSize != desiredState.PaddedSize ||
context.HasSubtitleText ||
ffmpegState is
{
DecoderHardwareAccelerationMode:
HardwareAccelerationMode.Nvenc,
EncoderHardwareAccelerationMode:
HardwareAccelerationMode.None
})
? desiredState.PixelFormat.Map(pf => (IPixelFormat)new PixelFormatNv12(pf.Name))
PixelFormat = context is { IsHdr: false, Is10BitOutput: false } && (context.HasWatermark ||
context.HasSubtitleOverlay ||
context.ShouldDeinterlace ||
desiredState.ScaledSize != desiredState.PaddedSize ||
context.HasSubtitleText ||
ffmpegState is
{
DecoderHardwareAccelerationMode:
HardwareAccelerationMode.Nvenc,
EncoderHardwareAccelerationMode:
HardwareAccelerationMode.None
})
? desiredState.PixelFormat.Map(IPixelFormat (pf) => new PixelFormatNv12(pf.Name))
: Option<IPixelFormat>.None
},
desiredState.ScaledSize,

3
ErsatzTV.FFmpeg/Pipeline/PipelineBuilderBase.cs

@ -199,7 +199,8 @@ public abstract class PipelineBuilderBase : IPipelineBuilder @@ -199,7 +199,8 @@ public abstract class PipelineBuilderBase : IPipelineBuilder
_subtitleInputFile.Map(s => s is { IsImageBased: false, Method: SubtitleMethod.Burn }).IfNone(false),
desiredState.Deinterlaced,
desiredState.PixelFormat.Map(pf => pf.BitDepth).IfNone(8) == 10,
false);
false,
videoStream.ColorParams.IsHdr);
SetThreadCount(ffmpegState, desiredState, pipelineSteps);
SetSceneDetect(videoStream, ffmpegState, desiredState, pipelineSteps);

3
ErsatzTV.FFmpeg/Pipeline/PipelineContext.cs

@ -7,4 +7,5 @@ public record PipelineContext( @@ -7,4 +7,5 @@ public record PipelineContext(
bool HasSubtitleText,
bool ShouldDeinterlace,
bool Is10BitOutput,
bool IsIntelVaapiOrQsv);
bool IsIntelVaapiOrQsv,
bool IsHdr);

2
ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs

@ -169,11 +169,11 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -169,11 +169,11 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
}
// _logger.LogDebug("After decode: {PixelFormat}", currentState.PixelFormat);
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
currentState = SetDeinterlace(videoInputFile, context, ffmpegState, currentState);
// _logger.LogDebug("After deinterlace: {PixelFormat}", currentState.PixelFormat);
currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState);
// _logger.LogDebug("After scale: {PixelFormat}", currentState.PixelFormat);
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
currentState = SetPad(videoInputFile, videoStream, desiredState, currentState);
// _logger.LogDebug("After pad: {PixelFormat}", currentState.PixelFormat);
currentState = SetCrop(videoInputFile, desiredState, currentState);

2
ErsatzTV.FFmpeg/Pipeline/SoftwarePipelineBuilder.cs

@ -104,8 +104,8 @@ public class SoftwarePipelineBuilder : PipelineBuilderBase @@ -104,8 +104,8 @@ public class SoftwarePipelineBuilder : PipelineBuilderBase
{
SetDeinterlace(videoInputFile, context, currentState);
currentState = SetTonemap(videoInputFile, videoStream, desiredState, currentState);
currentState = SetScale(videoInputFile, videoStream, desiredState, currentState);
currentState = SetTonemap(videoInputFile, videoStream, desiredState, currentState);
currentState = SetPad(videoInputFile, videoStream, desiredState, currentState);
currentState = SetCrop(videoInputFile, desiredState, currentState);
SetStillImageLoop(videoInputFile, videoStream, ffmpegState, desiredState, pipelineSteps);

4
ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs

@ -171,14 +171,14 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -171,14 +171,14 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
// _logger.LogDebug("After decode: {PixelFormat}", currentState.PixelFormat);
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
currentState = SetDeinterlace(videoInputFile, context, ffmpegState, currentState);
// _logger.LogDebug("After deinterlace: {PixelFormat}", currentState.PixelFormat);
currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState);
// _logger.LogDebug("After scale: {PixelFormat}", currentState.PixelFormat);
currentState = SetTonemap(videoInputFile, videoStream, ffmpegState, desiredState, currentState);
currentState = SetPad(videoInputFile, desiredState, currentState);
// _logger.LogDebug("After pad: {PixelFormat}", currentState.PixelFormat);

Loading…
Cancel
Save