Browse Source

qsv improvements

qsv-improvements
Jason Dove 6 months ago
parent
commit
ac413f731a
No known key found for this signature in database
  1. 15
      ErsatzTV.FFmpeg/Environment/CudaVisibleDevicesVariable.cs
  2. 8
      ErsatzTV.FFmpeg/Filter/Qsv/HardwareUploadQsvFilter.cs
  3. 21
      ErsatzTV.FFmpeg/Filter/Qsv/TonemapQsvFilter.cs
  4. 4
      ErsatzTV.FFmpeg/GlobalOption/HardwareAcceleration/QsvHardwareAccelerationOption.cs
  5. 12
      ErsatzTV.FFmpeg/OutputOption/ColorMetadataOutputOption.cs
  6. 22
      ErsatzTV.FFmpeg/Pipeline/PipelineBuilderBase.cs
  7. 58
      ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs
  8. 3
      ErsatzTV.FFmpeg/Pipeline/VideoPipelineResult.cs

15
ErsatzTV.FFmpeg/Environment/CudaVisibleDevicesVariable.cs

@ -0,0 +1,15 @@ @@ -0,0 +1,15 @@
namespace ErsatzTV.FFmpeg.Environment;
public class CudaVisibleDevicesVariable(string visibleDevices) : IPipelineStep
{
public EnvironmentVariable[] EnvironmentVariables =>
[
new("CUDA_VISIBLE_DEVICES", visibleDevices)
];
public string[] GlobalOptions => [];
public string[] InputOptions(InputFile inputFile) => [];
public string[] FilterOptions => [];
public string[] OutputOptions => [];
public FrameState NextState(FrameState currentState) => currentState;
}

8
ErsatzTV.FFmpeg/Filter/Qsv/HardwareUploadQsvFilter.cs

@ -11,11 +11,9 @@ public class HardwareUploadQsvFilter : BaseFilter @@ -11,11 +11,9 @@ public class HardwareUploadQsvFilter : BaseFilter
_ffmpegState = ffmpegState;
}
public override string Filter => _currentState.FrameDataLocation switch
{
FrameDataLocation.Hardware => string.Empty,
_ => $"hwupload=extra_hw_frames={_ffmpegState.QsvExtraHardwareFrames}"
};
public override string Filter => _currentState.FrameDataLocation is FrameDataLocation.Software
? $"hwupload=extra_hw_frames={_ffmpegState.QsvExtraHardwareFrames},format=qsv"
: string.Empty;
public override FrameState NextState(FrameState currentState) =>
currentState with { FrameDataLocation = FrameDataLocation.Hardware };

21
ErsatzTV.FFmpeg/Filter/Qsv/TonemapQsvFilter.cs

@ -1,12 +1,19 @@ @@ -1,12 +1,19 @@
using ErsatzTV.FFmpeg.Format;
namespace ErsatzTV.FFmpeg.Filter.Qsv;
public class TonemapQsvFilter : BaseFilter
public class TonemapQsvFilter(IPixelFormat desiredPixelFormat) : BaseFilter
{
public override string Filter => "vpp_qsv=tonemap=1";
public override string Filter =>
desiredPixelFormat.BitDepth == 8
? $"vpp_qsv=tonemap=1:format=nv12"
: $"vpp_qsv=tonemap=1:format=p010le";
public override FrameState NextState(FrameState currentState)
{
return desiredPixelFormat.BitDepth == 8
? currentState with { FrameDataLocation = FrameDataLocation.Hardware, PixelFormat = new PixelFormatNv12(desiredPixelFormat.Name) }
: currentState with { FrameDataLocation = FrameDataLocation.Hardware, PixelFormat = new PixelFormatP010() };
}
public override FrameState NextState(FrameState currentState) =>
currentState with
{
FrameDataLocation = FrameDataLocation.Hardware
};
}

4
ErsatzTV.FFmpeg/GlobalOption/HardwareAcceleration/QsvHardwareAccelerationOption.cs

@ -16,9 +16,7 @@ public class QsvHardwareAccelerationOption(Option<string> device, FFmpegCapabili @@ -16,9 +16,7 @@ public class QsvHardwareAccelerationOption(Option<string> device, FFmpegCapabili
{
get
{
string[] initDevices = OperatingSystem.IsWindows()
? new[] { "-init_hw_device", "d3d11va=hw:,vendor=0x8086", "-filter_hw_device", "hw" }
: new[] { "-init_hw_device", "qsv=hw", "-filter_hw_device", "hw" };
string[] initDevices = ["-init_hw_device", "qsv=hw", "-filter_hw_device", "hw"];
var result = new List<string>
{

12
ErsatzTV.FFmpeg/OutputOption/ColorMetadataOutputOption.cs

@ -0,0 +1,12 @@ @@ -0,0 +1,12 @@
namespace ErsatzTV.FFmpeg.OutputOption;
public class ColorMetadataOutputOption : OutputOption
{
public override string[] OutputOptions =>
[
"-color_primaries", "bt709",
"-color_trc", "bt709",
"-colorspace", "bt709",
"-color_range", "tv"
];
}

22
ErsatzTV.FFmpeg/Pipeline/PipelineBuilderBase.cs

@ -202,13 +202,12 @@ public abstract class PipelineBuilderBase : IPipelineBuilder @@ -202,13 +202,12 @@ public abstract class PipelineBuilderBase : IPipelineBuilder
false,
videoStream.ColorParams.IsHdr);
SetThreadCount(ffmpegState, desiredState, pipelineSteps);
SetSceneDetect(videoStream, ffmpegState, desiredState, pipelineSteps);
SetFFReport(ffmpegState, pipelineSteps);
SetStreamSeek(ffmpegState, videoInputFile, context, pipelineSteps);
SetTimeLimit(ffmpegState, pipelineSteps);
FilterChain filterChain = BuildVideoPipeline(
(ffmpegState, FilterChain filterChain) = BuildVideoPipeline(
videoInputFile,
videoStream,
ffmpegState,
@ -216,6 +215,8 @@ public abstract class PipelineBuilderBase : IPipelineBuilder @@ -216,6 +215,8 @@ public abstract class PipelineBuilderBase : IPipelineBuilder
context,
pipelineSteps);
SetThreadCount(ffmpegState, desiredState, pipelineSteps);
// don't double input files for concat segmenter (v2) parent or child
if (_concatInputFile.IsNone && ffmpegState.OutputFormat is not OutputFormatKind.Nut)
{
@ -483,7 +484,7 @@ public abstract class PipelineBuilderBase : IPipelineBuilder @@ -483,7 +484,7 @@ public abstract class PipelineBuilderBase : IPipelineBuilder
PipelineContext context,
ICollection<IPipelineStep> pipelineSteps);
private FilterChain BuildVideoPipeline(
private VideoPipelineResult BuildVideoPipeline(
VideoInputFile videoInputFile,
VideoStream videoStream,
FFmpegState ffmpegState,
@ -539,7 +540,7 @@ public abstract class PipelineBuilderBase : IPipelineBuilder @@ -539,7 +540,7 @@ public abstract class PipelineBuilderBase : IPipelineBuilder
SetOutputTsOffset(ffmpegState, desiredState, pipelineSteps);
return filterChain;
return new VideoPipelineResult(ffmpegState, filterChain);
}
protected abstract Option<IDecoder> SetDecoder(
@ -760,19 +761,10 @@ public abstract class PipelineBuilderBase : IPipelineBuilder @@ -760,19 +761,10 @@ public abstract class PipelineBuilderBase : IPipelineBuilder
private void SetThreadCount(FFmpegState ffmpegState, FrameState desiredState, List<IPipelineStep> pipelineSteps)
{
if (ffmpegState.DecoderHardwareAccelerationMode != HardwareAccelerationMode.None ||
ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.None)
{
_logger.LogDebug(
"Forcing {Threads} ffmpeg thread when hardware acceleration is used",
1);
pipelineSteps.Insert(0, new ThreadCountOption(1));
}
else if (ffmpegState.Start.Exists(s => s > TimeSpan.Zero) && desiredState.Realtime)
if (ffmpegState.DecoderHardwareAccelerationMode != HardwareAccelerationMode.None)
{
_logger.LogDebug(
"Forcing {Threads} ffmpeg thread due to buggy combination of stream seek and realtime output",
"Forcing {Threads} ffmpeg decoding thread when hardware acceleration is used",
1);
pipelineSteps.Insert(0, new ThreadCountOption(1));

58
ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs

@ -3,6 +3,7 @@ using ErsatzTV.FFmpeg.Decoder; @@ -3,6 +3,7 @@ using ErsatzTV.FFmpeg.Decoder;
using ErsatzTV.FFmpeg.Decoder.Qsv;
using ErsatzTV.FFmpeg.Encoder;
using ErsatzTV.FFmpeg.Encoder.Qsv;
using ErsatzTV.FFmpeg.Environment;
using ErsatzTV.FFmpeg.Filter;
using ErsatzTV.FFmpeg.Filter.Qsv;
using ErsatzTV.FFmpeg.Format;
@ -47,9 +48,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -47,9 +48,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
_logger = logger;
}
protected override bool IsIntelVaapiOrQsv(FFmpegState ffmpegState) =>
ffmpegState.DecoderHardwareAccelerationMode is HardwareAccelerationMode.Qsv ||
ffmpegState.EncoderHardwareAccelerationMode is HardwareAccelerationMode.Qsv;
protected override bool IsIntelVaapiOrQsv(FFmpegState ffmpegState) => false;
protected override FFmpegState SetAccelState(
VideoStream videoStream,
@ -89,6 +88,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -89,6 +88,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
decodeCapability = FFmpegCapability.Software;
}
pipelineSteps.Add(new CudaVisibleDevicesVariable(string.Empty));
pipelineSteps.Add(new QsvHardwareAccelerationOption(ffmpegState.VaapiDevice, decodeCapability));
// disable hw accel if decoder/encoder isn't supported
@ -330,35 +330,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -330,35 +330,7 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
if (!videoStream.ColorParams.IsBt709 || usesVppQsv)
{
// _logger.LogDebug("Adding colorspace filter");
// force p010/nv12 if we're still in hardware
if (currentState.FrameDataLocation == FrameDataLocation.Hardware)
{
foreach (int bitDepth in currentState.PixelFormat.Map(pf => pf.BitDepth))
{
if (bitDepth is 10 && formatForDownload is not PixelFormatYuv420P10Le)
{
formatForDownload = new PixelFormatYuv420P10Le();
currentState = currentState with { PixelFormat = Some(formatForDownload) };
}
else if (bitDepth is 8 && formatForDownload is not PixelFormatNv12)
{
formatForDownload = new PixelFormatNv12(formatForDownload.Name);
currentState = currentState with { PixelFormat = Some(formatForDownload) };
}
}
}
// vpp_qsv seems to strip color info, so if we use that at all, force overriding input color info
var colorspace = new ColorspaceFilter(
currentState,
videoStream,
format,
usesVppQsv);
currentState = colorspace.NextState(currentState);
result.Add(colorspace);
pipelineSteps.Add(new ColorMetadataOutputOption());
}
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None)
@ -657,20 +629,14 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -657,20 +629,14 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
{
foreach (IPixelFormat pixelFormat in desiredState.PixelFormat)
{
if (ffmpegState.DecoderHardwareAccelerationMode == HardwareAccelerationMode.Qsv)
{
var filter = new TonemapQsvFilter();
currentState = filter.NextState(currentState);
videoStream.ResetColorParams(ColorParams.Default);
videoInputFile.FilterSteps.Add(filter);
}
else
{
var filter = new TonemapFilter(ffmpegState, currentState, pixelFormat);
currentState = filter.NextState(currentState);
videoStream.ResetColorParams(ColorParams.Default);
videoInputFile.FilterSteps.Add(filter);
}
var uploadFilter = new HardwareUploadQsvFilter(currentState, ffmpegState);
currentState = uploadFilter.NextState(currentState);
videoInputFile.FilterSteps.Add(uploadFilter);
var filter = new TonemapQsvFilter(pixelFormat);
currentState = filter.NextState(currentState);
videoStream.ResetColorParams(ColorParams.Default);
videoInputFile.FilterSteps.Add(filter);
}
}

3
ErsatzTV.FFmpeg/Pipeline/VideoPipelineResult.cs

@ -0,0 +1,3 @@ @@ -0,0 +1,3 @@
namespace ErsatzTV.FFmpeg.Pipeline;
public record VideoPipelineResult(FFmpegState FFmpegState, FilterChain FilterChain);
Loading…
Cancel
Save