Browse Source

detect nvidia capabilities (#853)

* fallback to software codecs for old nvidia cards

* update dependencies
pull/854/head
Jason Dove 3 years ago committed by GitHub
parent
commit
dd7f77751c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      CHANGELOG.md
  2. 2
      ErsatzTV.Application/Images/Queries/GetCachedImagePathHandler.cs
  3. 2
      ErsatzTV.Application/Streaming/Queries/GetConcatProcessByChannelNumberHandler.cs
  4. 4
      ErsatzTV.Core.Tests/FFmpeg/TranscodingTests.cs
  5. 19
      ErsatzTV.Core/FFmpeg/FFmpegLibraryProcessService.cs
  6. 4
      ErsatzTV.Core/Interfaces/FFmpeg/IFFmpegProcessService.cs
  7. 26
      ErsatzTV.FFmpeg.Tests/PipelineBuilderTests.cs
  8. 7
      ErsatzTV.FFmpeg/Capabilities/DefaultHardwareCapabilities.cs
  9. 76
      ErsatzTV.FFmpeg/Capabilities/HardwareCapabilitiesFactory.cs
  10. 7
      ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilities.cs
  11. 8
      ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilitiesFactory.cs
  12. 7
      ErsatzTV.FFmpeg/Capabilities/NoHardwareCapabilities.cs
  13. 26
      ErsatzTV.FFmpeg/Capabilities/NvidiaHardwareCapabilities.cs
  14. 20
      ErsatzTV.FFmpeg/Decoder/AvailableDecoders.cs
  15. 11
      ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderH264Cuvid.cs
  16. 11
      ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderHevcCuvid.cs
  17. 13
      ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderMpeg2Cuvid.cs
  18. 11
      ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderMpeg4Cuvid.cs
  19. 11
      ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderVc1Cuvid.cs
  20. 11
      ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderVp9Cuvid.cs
  21. 68
      ErsatzTV.FFmpeg/Encoder/AvailableEncoders.cs
  22. 9
      ErsatzTV.FFmpeg/Encoder/EncoderLibx265.cs
  23. 1
      ErsatzTV.FFmpeg/ErsatzTV.FFmpeg.csproj
  24. 4
      ErsatzTV.FFmpeg/FFmpegState.cs
  25. 16
      ErsatzTV.FFmpeg/Filter/ComplexFilter.cs
  26. 4
      ErsatzTV.FFmpeg/Filter/HardwareUploadFilter.cs
  27. 2
      ErsatzTV.FFmpeg/Filter/SubtitleHardwareUploadFilter.cs
  28. 2
      ErsatzTV.FFmpeg/Filter/SubtitlePixelFormatFilter.cs
  29. 2
      ErsatzTV.FFmpeg/Filter/WatermarkHardwareUploadFilter.cs
  30. 2
      ErsatzTV.FFmpeg/Filter/WatermarkPixelFormatFilter.cs
  31. 76
      ErsatzTV.FFmpeg/PipelineBuilder.cs
  32. 6
      ErsatzTV.Infrastructure/ErsatzTV.Infrastructure.csproj
  33. 10
      ErsatzTV/ErsatzTV.csproj
  34. 2
      ErsatzTV/Startup.cs

2
CHANGELOG.md

@ -6,6 +6,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). @@ -6,6 +6,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [Unreleased]
### Fixed
- Fix content repeating for up to a minute near the top of every hour
- Check whether hardware-accelerated hevc codecs are supported by the NVIDIA card
- Software codecs will be used if they are unsupported by the NVIDIA card
## [0.6.1-beta] - 2022-06-03
### Fixed

2
ErsatzTV.Application/Images/Queries/GetCachedImagePathHandler.cs

@ -69,7 +69,7 @@ public class @@ -69,7 +69,7 @@ public class
string originalPath = _imageCache.GetPathForImage(request.FileName, request.ArtworkKind, None);
Command process = _ffmpegProcessService.ResizeImage(
Command process = await _ffmpegProcessService.ResizeImage(
ffmpegPath,
originalPath,
withExtension,

2
ErsatzTV.Application/Streaming/Queries/GetConcatProcessByChannelNumberHandler.cs

@ -30,7 +30,7 @@ public class GetConcatProcessByChannelNumberHandler : FFmpegProcessHandler<GetCo @@ -30,7 +30,7 @@ public class GetConcatProcessByChannelNumberHandler : FFmpegProcessHandler<GetCo
.GetValue<bool>(ConfigElementKey.FFmpegSaveReports)
.Map(result => result.IfNone(false));
Command process = _ffmpegProcessService.ConcatChannel(
Command process = await _ffmpegProcessService.ConcatChannel(
ffmpegPath,
saveReports,
channel,

4
ErsatzTV.Core.Tests/FFmpeg/TranscodingTests.cs

@ -10,6 +10,7 @@ using ErsatzTV.Core.Interfaces.FFmpeg; @@ -10,6 +10,7 @@ using ErsatzTV.Core.Interfaces.FFmpeg;
using ErsatzTV.Core.Interfaces.Images;
using ErsatzTV.Core.Interfaces.Repositories;
using ErsatzTV.Core.Metadata;
using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.FFmpeg.State;
using FluentAssertions;
using Microsoft.Extensions.Caching.Memory;
@ -302,6 +303,9 @@ public class TranscodingTests @@ -302,6 +303,9 @@ public class TranscodingTests
new FFmpegPlaybackSettingsCalculator(),
new FakeStreamSelector(),
new Mock<ITempFilePool>().Object,
new HardwareCapabilitiesFactory(
new MemoryCache(new MemoryCacheOptions()),
LoggerFactory.CreateLogger<HardwareCapabilitiesFactory>()),
LoggerFactory.CreateLogger<FFmpegLibraryProcessService>());
var v = new MediaVersion

19
ErsatzTV.Core/FFmpeg/FFmpegLibraryProcessService.cs

@ -3,6 +3,7 @@ using ErsatzTV.Core.Domain; @@ -3,6 +3,7 @@ using ErsatzTV.Core.Domain;
using ErsatzTV.Core.Domain.Filler;
using ErsatzTV.Core.Interfaces.FFmpeg;
using ErsatzTV.FFmpeg;
using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.FFmpeg.Environment;
using ErsatzTV.FFmpeg.Format;
using ErsatzTV.FFmpeg.OutputFormat;
@ -16,6 +17,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -16,6 +17,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
{
private readonly FFmpegProcessService _ffmpegProcessService;
private readonly IFFmpegStreamSelector _ffmpegStreamSelector;
private readonly IHardwareCapabilitiesFactory _hardwareCapabilitiesFactory;
private readonly ILogger<FFmpegLibraryProcessService> _logger;
private readonly FFmpegPlaybackSettingsCalculator _playbackSettingsCalculator;
private readonly ITempFilePool _tempFilePool;
@ -25,12 +27,14 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -25,12 +27,14 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
FFmpegPlaybackSettingsCalculator playbackSettingsCalculator,
IFFmpegStreamSelector ffmpegStreamSelector,
ITempFilePool tempFilePool,
IHardwareCapabilitiesFactory hardwareCapabilitiesFactory,
ILogger<FFmpegLibraryProcessService> logger)
{
_ffmpegProcessService = ffmpegProcessService;
_playbackSettingsCalculator = playbackSettingsCalculator;
_ffmpegStreamSelector = ffmpegStreamSelector;
_tempFilePool = tempFilePool;
_hardwareCapabilitiesFactory = hardwareCapabilitiesFactory;
_logger = logger;
}
@ -214,6 +218,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -214,6 +218,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
var ffmpegState = new FFmpegState(
saveReports,
hwAccel,
hwAccel,
VaapiDriverName(hwAccel, vaapiDriver),
VaapiDeviceName(hwAccel, vaapiDevice),
playbackSettings.StreamSeek,
@ -231,6 +236,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -231,6 +236,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
_logger.LogDebug("FFmpeg desired state {FrameState}", desiredState);
var pipelineBuilder = new PipelineBuilder(
await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hwAccel),
videoInputFile,
audioInputFile,
watermarkInputFile,
@ -333,6 +339,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -333,6 +339,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
var ffmpegState = new FFmpegState(
false,
hwAccel,
hwAccel,
VaapiDriverName(hwAccel, vaapiDriver),
VaapiDeviceName(hwAccel, vaapiDevice),
playbackSettings.StreamSeek,
@ -359,6 +366,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -359,6 +366,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
_logger.LogDebug("FFmpeg desired error state {FrameState}", desiredState);
var pipelineBuilder = new PipelineBuilder(
await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hwAccel),
videoInputFile,
audioInputFile,
None,
@ -372,7 +380,12 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -372,7 +380,12 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
return GetCommand(ffmpegPath, videoInputFile, audioInputFile, None, None, pipeline);
}
public Command ConcatChannel(string ffmpegPath, bool saveReports, Channel channel, string scheme, string host)
public async Task<Command> ConcatChannel(
string ffmpegPath,
bool saveReports,
Channel channel,
string scheme,
string host)
{
var resolution = new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height);
@ -381,6 +394,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -381,6 +394,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
resolution);
var pipelineBuilder = new PipelineBuilder(
await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, HardwareAccelerationMode.None),
None,
None,
None,
@ -399,13 +413,14 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -399,13 +413,14 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
public Command WrapSegmenter(string ffmpegPath, bool saveReports, Channel channel, string scheme, string host) =>
_ffmpegProcessService.WrapSegmenter(ffmpegPath, saveReports, channel, scheme, host);
public Command ResizeImage(string ffmpegPath, string inputFile, string outputFile, int height)
public async Task<Command> ResizeImage(string ffmpegPath, string inputFile, string outputFile, int height)
{
var videoInputFile = new VideoInputFile(
inputFile,
new List<VideoStream> { new(0, string.Empty, None, FrameSize.Unknown, None, true) });
var pipelineBuilder = new PipelineBuilder(
await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, HardwareAccelerationMode.None),
videoInputFile,
None,
None,

4
ErsatzTV.Core/Interfaces/FFmpeg/IFFmpegProcessService.cs

@ -46,11 +46,11 @@ public interface IFFmpegProcessService @@ -46,11 +46,11 @@ public interface IFFmpegProcessService
VaapiDriver vaapiDriver,
string vaapiDevice);
Command ConcatChannel(string ffmpegPath, bool saveReports, Channel channel, string scheme, string host);
Task<Command> ConcatChannel(string ffmpegPath, bool saveReports, Channel channel, string scheme, string host);
Command WrapSegmenter(string ffmpegPath, bool saveReports, Channel channel, string scheme, string host);
Command ResizeImage(string ffmpegPath, string inputFile, string outputFile, int height);
Task<Command> ResizeImage(string ffmpegPath, string inputFile, string outputFile, int height);
Command ConvertToPng(string ffmpegPath, string inputFile, string outputFile);

26
ErsatzTV.FFmpeg.Tests/PipelineBuilderTests.cs

@ -1,5 +1,6 @@ @@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.FFmpeg.Encoder;
using ErsatzTV.FFmpeg.Format;
using ErsatzTV.FFmpeg.OutputFormat;
@ -54,6 +55,7 @@ public class PipelineGeneratorTests @@ -54,6 +55,7 @@ public class PipelineGeneratorTests
var ffmpegState = new FFmpegState(
false,
HardwareAccelerationMode.None,
HardwareAccelerationMode.None,
Option<string>.None,
Option<string>.None,
TimeSpan.FromSeconds(1),
@ -68,7 +70,15 @@ public class PipelineGeneratorTests @@ -68,7 +70,15 @@ public class PipelineGeneratorTests
0,
Option<int>.None);
var builder = new PipelineBuilder(videoInputFile, audioInputFile, None, None, "", "", _logger);
var builder = new PipelineBuilder(
new DefaultHardwareCapabilities(),
videoInputFile,
audioInputFile,
None,
None,
"",
"",
_logger);
FFmpegPipeline result = builder.Build(ffmpegState, desiredState);
result.PipelineSteps.Should().HaveCountGreaterThan(0);
@ -85,7 +95,7 @@ public class PipelineGeneratorTests @@ -85,7 +95,7 @@ public class PipelineGeneratorTests
var resolution = new FrameSize(1920, 1080);
var concatInputFile = new ConcatInputFile("http://localhost:8080/ffmpeg/concat/1", resolution);
var builder = new PipelineBuilder(None, None, None, None, "", "", _logger);
var builder = new PipelineBuilder(new DefaultHardwareCapabilities(), None, None, None, None, "", "", _logger);
FFmpegPipeline result = builder.Concat(concatInputFile, FFmpegState.Concat(false, "Some Channel"));
result.PipelineSteps.Should().HaveCountGreaterThan(0);
@ -132,6 +142,7 @@ public class PipelineGeneratorTests @@ -132,6 +142,7 @@ public class PipelineGeneratorTests
var ffmpegState = new FFmpegState(
false,
HardwareAccelerationMode.None,
HardwareAccelerationMode.None,
Option<string>.None,
Option<string>.None,
Option<TimeSpan>.None,
@ -146,7 +157,15 @@ public class PipelineGeneratorTests @@ -146,7 +157,15 @@ public class PipelineGeneratorTests
0,
Option<int>.None);
var builder = new PipelineBuilder(videoInputFile, audioInputFile, None, None, "", "", _logger);
var builder = new PipelineBuilder(
new DefaultHardwareCapabilities(),
videoInputFile,
audioInputFile,
None,
None,
"",
"",
_logger);
FFmpegPipeline result = builder.Build(ffmpegState, desiredState);
result.PipelineSteps.Should().HaveCountGreaterThan(0);
@ -172,6 +191,7 @@ public class PipelineGeneratorTests @@ -172,6 +191,7 @@ public class PipelineGeneratorTests
});
var pipelineBuilder = new PipelineBuilder(
new DefaultHardwareCapabilities(),
videoInputFile,
Option<AudioInputFile>.None,
Option<WatermarkInputFile>.None,

7
ErsatzTV.FFmpeg/Capabilities/DefaultHardwareCapabilities.cs

@ -0,0 +1,7 @@ @@ -0,0 +1,7 @@
namespace ErsatzTV.FFmpeg.Capabilities;
public class DefaultHardwareCapabilities : IHardwareCapabilities
{
public bool CanDecode(string videoFormat) => true;
public bool CanEncode(string videoFormat) => true;
}

76
ErsatzTV.FFmpeg/Capabilities/HardwareCapabilitiesFactory.cs

@ -0,0 +1,76 @@ @@ -0,0 +1,76 @@
using System.Text;
using System.Text.RegularExpressions;
using CliWrap;
using CliWrap.Buffered;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
namespace ErsatzTV.FFmpeg.Capabilities;
public class HardwareCapabilitiesFactory : IHardwareCapabilitiesFactory
{
private const string CacheKey = "ffmpeg.hardware.nvidia.architecture";
private readonly ILogger<HardwareCapabilitiesFactory> _logger;
private readonly IMemoryCache _memoryCache;
public HardwareCapabilitiesFactory(IMemoryCache memoryCache, ILogger<HardwareCapabilitiesFactory> logger)
{
_memoryCache = memoryCache;
_logger = logger;
}
public async Task<IHardwareCapabilities> GetHardwareCapabilities(
string ffmpegPath,
HardwareAccelerationMode hardwareAccelerationMode) =>
hardwareAccelerationMode switch
{
HardwareAccelerationMode.Nvenc => await GetNvidiaCapabilities(ffmpegPath),
_ => new DefaultHardwareCapabilities()
};
private async Task<IHardwareCapabilities> GetNvidiaCapabilities(string ffmpegPath)
{
if (_memoryCache.TryGetValue(CacheKey, out int cachedArchitecture))
{
return new NvidiaHardwareCapabilities(cachedArchitecture);
}
string[] arguments =
{
"-f", "lavfi",
"-i", "nullsrc",
"-c:v", "h264_nvenc",
"-gpu", "list",
"-f", "null", "-"
};
BufferedCommandResult result = await Cli.Wrap(ffmpegPath)
.WithArguments(arguments)
.WithValidation(CommandResultValidation.None)
.ExecuteBufferedAsync(Encoding.UTF8);
string output = string.IsNullOrWhiteSpace(result.StandardOutput)
? result.StandardError
: result.StandardOutput;
Option<string> maybeLine = Optional(output.Split("\n").FirstOrDefault(x => x.Contains("GPU")));
foreach (string line in maybeLine)
{
const string PATTERN = @"SM\s+(\d\.\d)";
Match match = Regex.Match(line, PATTERN);
if (match.Success && int.TryParse(match.Groups[1].Value.Replace(".", string.Empty), out int architecture))
{
_logger.LogInformation("Detected NVIDIA GPU architecture SM {Architecture}", architecture);
_memoryCache.Set(CacheKey, architecture);
return new NvidiaHardwareCapabilities(architecture);
}
}
_logger.LogWarning(
"Error detecting NVIDIA GPU capabilities; some hardware accelerated features will be unavailable: {ExitCode}",
result.ExitCode);
return new NoHardwareCapabilities();
}
}

7
ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilities.cs

@ -0,0 +1,7 @@ @@ -0,0 +1,7 @@
namespace ErsatzTV.FFmpeg.Capabilities;
public interface IHardwareCapabilities
{
public bool CanDecode(string videoFormat);
public bool CanEncode(string videoFormat);
}

8
ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilitiesFactory.cs

@ -0,0 +1,8 @@ @@ -0,0 +1,8 @@
namespace ErsatzTV.FFmpeg.Capabilities;
public interface IHardwareCapabilitiesFactory
{
Task<IHardwareCapabilities> GetHardwareCapabilities(
string ffmpegPath,
HardwareAccelerationMode hardwareAccelerationMode);
}

7
ErsatzTV.FFmpeg/Capabilities/NoHardwareCapabilities.cs

@ -0,0 +1,7 @@ @@ -0,0 +1,7 @@
namespace ErsatzTV.FFmpeg.Capabilities;
public class NoHardwareCapabilities : IHardwareCapabilities
{
public bool CanDecode(string videoFormat) => false;
public bool CanEncode(string videoFormat) => false;
}

26
ErsatzTV.FFmpeg/Capabilities/NvidiaHardwareCapabilities.cs

@ -0,0 +1,26 @@ @@ -0,0 +1,26 @@
using ErsatzTV.FFmpeg.Format;
namespace ErsatzTV.FFmpeg.Capabilities;
public class NvidiaHardwareCapabilities : IHardwareCapabilities
{
private readonly int _architecture;
public NvidiaHardwareCapabilities(int architecture) => _architecture = architecture;
public bool CanDecode(string videoFormat) =>
videoFormat switch
{
// pascal is required to decode hevc/vp9
VideoFormat.Hevc or VideoFormat.Vp9 => _architecture >= 60,
_ => true
};
public bool CanEncode(string videoFormat) =>
videoFormat switch
{
// pascal is required to encode hevc
VideoFormat.Hevc => _architecture >= 60,
_ => true
};
}

20
ErsatzTV.FFmpeg/Decoder/AvailableDecoders.cs

@ -1,4 +1,5 @@ @@ -1,4 +1,5 @@
using ErsatzTV.FFmpeg.Decoder.Cuvid;
using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.FFmpeg.Decoder.Cuvid;
using ErsatzTV.FFmpeg.Decoder.Qsv;
using ErsatzTV.FFmpeg.Format;
using Microsoft.Extensions.Logging;
@ -8,16 +9,18 @@ namespace ErsatzTV.FFmpeg.Decoder; @@ -8,16 +9,18 @@ namespace ErsatzTV.FFmpeg.Decoder;
public static class AvailableDecoders
{
public static Option<IDecoder> ForVideoFormat(
IHardwareCapabilities hardwareCapabilities,
FFmpegState ffmpegState,
FrameState currentState,
FrameState desiredState,
Option<WatermarkInputFile> watermarkInputFile,
Option<SubtitleInputFile> subtitleInputFile,
ILogger logger) =>
(ffmpegState.HardwareAccelerationMode, currentState.VideoFormat,
(ffmpegState.DecoderHardwareAccelerationMode, currentState.VideoFormat,
currentState.PixelFormat.Match(pf => pf.Name, () => string.Empty)) switch
{
(HardwareAccelerationMode.Nvenc, VideoFormat.Hevc, _) => new DecoderHevcCuvid(),
(HardwareAccelerationMode.Nvenc, VideoFormat.Hevc, _)
when hardwareCapabilities.CanDecode(VideoFormat.Hevc) => new DecoderHevcCuvid(ffmpegState),
// nvenc doesn't support hardware decoding of 10-bit content
(HardwareAccelerationMode.Nvenc, VideoFormat.H264, PixelFormat.YUV420P10LE or PixelFormat.YUV444P10LE)
@ -27,12 +30,15 @@ public static class AvailableDecoders @@ -27,12 +30,15 @@ public static class AvailableDecoders
(HardwareAccelerationMode.Nvenc, VideoFormat.Mpeg2Video, _) when desiredState.Deinterlaced =>
new DecoderMpeg2Video(),
(HardwareAccelerationMode.Nvenc, VideoFormat.H264, _) => new DecoderH264Cuvid(),
(HardwareAccelerationMode.Nvenc, VideoFormat.H264, _)
when hardwareCapabilities.CanDecode(VideoFormat.H264) => new DecoderH264Cuvid(ffmpegState),
(HardwareAccelerationMode.Nvenc, VideoFormat.Mpeg2Video, _) => new DecoderMpeg2Cuvid(
ffmpegState,
desiredState.Deinterlaced),
(HardwareAccelerationMode.Nvenc, VideoFormat.Vc1, _) => new DecoderVc1Cuvid(),
(HardwareAccelerationMode.Nvenc, VideoFormat.Vp9, _) => new DecoderVp9Cuvid(),
(HardwareAccelerationMode.Nvenc, VideoFormat.Mpeg4, _) => new DecoderMpeg4Cuvid(),
(HardwareAccelerationMode.Nvenc, VideoFormat.Vc1, _) => new DecoderVc1Cuvid(ffmpegState),
(HardwareAccelerationMode.Nvenc, VideoFormat.Vp9, _)
when hardwareCapabilities.CanDecode(VideoFormat.Vp9) => new DecoderVp9Cuvid(ffmpegState),
(HardwareAccelerationMode.Nvenc, VideoFormat.Mpeg4, _) => new DecoderMpeg4Cuvid(ffmpegState),
// hevc_qsv decoder sometimes causes green lines with 10-bit content
(HardwareAccelerationMode.Qsv, VideoFormat.Hevc, PixelFormat.YUV420P10LE) => new DecoderHevc(),

11
ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderH264Cuvid.cs

@ -2,16 +2,23 @@ @@ -2,16 +2,23 @@
public class DecoderH264Cuvid : DecoderBase
{
private readonly FFmpegState _ffmpegState;
public DecoderH264Cuvid(FFmpegState ffmpegState) => _ffmpegState = ffmpegState;
public override string Name => "h264_cuvid";
protected override FrameDataLocation OutputFrameDataLocation => FrameDataLocation.Hardware;
protected override FrameDataLocation OutputFrameDataLocation =>
_ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None
? FrameDataLocation.Software
: FrameDataLocation.Hardware;
public override IList<string> InputOptions(InputFile inputFile)
{
IList<string> result = base.InputOptions(inputFile);
result.Add("-hwaccel_output_format");
result.Add("cuda");
result.Add(_ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.None ? "cuda" : "nv12");
return result;
}

11
ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderHevcCuvid.cs

@ -2,16 +2,23 @@ @@ -2,16 +2,23 @@
public class DecoderHevcCuvid : DecoderBase
{
private readonly FFmpegState _ffmpegState;
public DecoderHevcCuvid(FFmpegState ffmpegState) => _ffmpegState = ffmpegState;
public override string Name => "hevc_cuvid";
protected override FrameDataLocation OutputFrameDataLocation => FrameDataLocation.Hardware;
protected override FrameDataLocation OutputFrameDataLocation =>
_ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None
? FrameDataLocation.Software
: FrameDataLocation.Hardware;
public override IList<string> InputOptions(InputFile inputFile)
{
IList<string> result = base.InputOptions(inputFile);
result.Add("-hwaccel_output_format");
result.Add("cuda");
result.Add(_ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.None ? "cuda" : "nv12");
return result;
}

13
ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderMpeg2Cuvid.cs

@ -3,20 +3,27 @@ @@ -3,20 +3,27 @@
public class DecoderMpeg2Cuvid : DecoderBase
{
private readonly bool _contentIsInterlaced;
private readonly FFmpegState _ffmpegState;
public DecoderMpeg2Cuvid(bool contentIsInterlaced) => _contentIsInterlaced = contentIsInterlaced;
public DecoderMpeg2Cuvid(FFmpegState ffmpegState, bool contentIsInterlaced)
{
_ffmpegState = ffmpegState;
_contentIsInterlaced = contentIsInterlaced;
}
public override string Name => "mpeg2_cuvid";
protected override FrameDataLocation OutputFrameDataLocation =>
_contentIsInterlaced ? FrameDataLocation.Software : FrameDataLocation.Hardware;
_contentIsInterlaced || _ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None
? FrameDataLocation.Software
: FrameDataLocation.Hardware;
public override IList<string> InputOptions(InputFile inputFile)
{
IList<string> result = base.InputOptions(inputFile);
result.Add("-hwaccel_output_format");
result.Add("cuda");
result.Add(_ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.None ? "cuda" : "nv12");
return result;
}

11
ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderMpeg4Cuvid.cs

@ -2,16 +2,23 @@ @@ -2,16 +2,23 @@
public class DecoderMpeg4Cuvid : DecoderBase
{
private readonly FFmpegState _ffmpegState;
public DecoderMpeg4Cuvid(FFmpegState ffmpegState) => _ffmpegState = ffmpegState;
public override string Name => "mpeg4_cuvid";
protected override FrameDataLocation OutputFrameDataLocation => FrameDataLocation.Hardware;
protected override FrameDataLocation OutputFrameDataLocation =>
_ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None
? FrameDataLocation.Software
: FrameDataLocation.Hardware;
public override IList<string> InputOptions(InputFile inputFile)
{
IList<string> result = base.InputOptions(inputFile);
result.Add("-hwaccel_output_format");
result.Add("cuda");
result.Add(_ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.None ? "cuda" : "nv12");
return result;
}

11
ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderVc1Cuvid.cs

@ -2,16 +2,23 @@ @@ -2,16 +2,23 @@
public class DecoderVc1Cuvid : DecoderBase
{
private readonly FFmpegState _ffmpegState;
public DecoderVc1Cuvid(FFmpegState ffmpegState) => _ffmpegState = ffmpegState;
public override string Name => "vc1_cuvid";
protected override FrameDataLocation OutputFrameDataLocation => FrameDataLocation.Hardware;
protected override FrameDataLocation OutputFrameDataLocation =>
_ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None
? FrameDataLocation.Software
: FrameDataLocation.Hardware;
public override IList<string> InputOptions(InputFile inputFile)
{
IList<string> result = base.InputOptions(inputFile);
result.Add("-hwaccel_output_format");
result.Add("cuda");
result.Add(_ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.None ? "cuda" : "nv12");
return result;
}

11
ErsatzTV.FFmpeg/Decoder/Cuvid/DecoderVp9Cuvid.cs

@ -2,16 +2,23 @@ @@ -2,16 +2,23 @@
public class DecoderVp9Cuvid : DecoderBase
{
private readonly FFmpegState _ffmpegState;
public DecoderVp9Cuvid(FFmpegState ffmpegState) => _ffmpegState = ffmpegState;
public override string Name => "vp9_cuvid";
protected override FrameDataLocation OutputFrameDataLocation => FrameDataLocation.Hardware;
protected override FrameDataLocation OutputFrameDataLocation =>
_ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None
? FrameDataLocation.Software
: FrameDataLocation.Hardware;
public override IList<string> InputOptions(InputFile inputFile)
{
IList<string> result = base.InputOptions(inputFile);
result.Add("-hwaccel_output_format");
result.Add("cuda");
result.Add(_ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.None ? "cuda" : "nv12");
return result;
}

68
ErsatzTV.FFmpeg/Encoder/AvailableEncoders.cs

@ -1,4 +1,5 @@ @@ -1,4 +1,5 @@
using ErsatzTV.FFmpeg.Encoder.Nvenc;
using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.FFmpeg.Encoder.Nvenc;
using ErsatzTV.FFmpeg.Encoder.Qsv;
using ErsatzTV.FFmpeg.Encoder.Vaapi;
using ErsatzTV.FFmpeg.Encoder.VideoToolbox;
@ -11,45 +12,54 @@ namespace ErsatzTV.FFmpeg.Encoder; @@ -11,45 +12,54 @@ namespace ErsatzTV.FFmpeg.Encoder;
public static class AvailableEncoders
{
public static Option<IEncoder> ForVideoFormat(
IHardwareCapabilities hardwareCapabilities,
FFmpegState ffmpegState,
FrameState currentState,
FrameState desiredState,
Option<WatermarkInputFile> maybeWatermarkInputFile,
Option<SubtitleInputFile> maybeSubtitleInputFile,
ILogger logger) =>
(ffmpegState.HardwareAccelerationMode, desiredState.VideoFormat) switch
(ffmpegState.EncoderHardwareAccelerationMode, desiredState.VideoFormat) switch
{
(HardwareAccelerationMode.Nvenc, VideoFormat.Hevc) => new EncoderHevcNvenc(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Nvenc, VideoFormat.H264) => new EncoderH264Nvenc(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Nvenc, VideoFormat.Hevc) when hardwareCapabilities.CanEncode(VideoFormat.Hevc) =>
new EncoderHevcNvenc(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Nvenc, VideoFormat.H264) when hardwareCapabilities.CanEncode(VideoFormat.H264) =>
new EncoderH264Nvenc(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Qsv, VideoFormat.Hevc) => new EncoderHevcQsv(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Qsv, VideoFormat.H264) => new EncoderH264Qsv(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Qsv, VideoFormat.Hevc) when hardwareCapabilities.CanEncode(VideoFormat.Hevc) =>
new EncoderHevcQsv(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Qsv, VideoFormat.H264) when hardwareCapabilities.CanEncode(VideoFormat.H264) =>
new EncoderH264Qsv(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Vaapi, VideoFormat.Hevc) => new EncoderHevcVaapi(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Vaapi, VideoFormat.H264) => new EncoderH264Vaapi(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Vaapi, VideoFormat.Hevc) when hardwareCapabilities.CanEncode(VideoFormat.Hevc) =>
new EncoderHevcVaapi(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.Vaapi, VideoFormat.H264) when hardwareCapabilities.CanEncode(VideoFormat.H264) =>
new EncoderH264Vaapi(
currentState,
maybeWatermarkInputFile,
maybeSubtitleInputFile),
(HardwareAccelerationMode.VideoToolbox, VideoFormat.Hevc) => new EncoderHevcVideoToolbox(),
(HardwareAccelerationMode.VideoToolbox, VideoFormat.H264) => new EncoderH264VideoToolbox(),
(HardwareAccelerationMode.VideoToolbox, VideoFormat.Hevc) when hardwareCapabilities.CanEncode(
VideoFormat.Hevc) => new EncoderHevcVideoToolbox(),
(HardwareAccelerationMode.VideoToolbox, VideoFormat.H264) when hardwareCapabilities.CanEncode(
VideoFormat.H264) => new EncoderH264VideoToolbox(),
(_, VideoFormat.Hevc) => new EncoderLibx265(),
(_, VideoFormat.Hevc) => new EncoderLibx265(currentState),
(_, VideoFormat.H264) => new EncoderLibx264(),
(_, VideoFormat.Mpeg2Video) => new EncoderMpeg2Video(),

9
ErsatzTV.FFmpeg/Encoder/EncoderLibx265.cs

@ -1,9 +1,16 @@ @@ -1,9 +1,16 @@
using ErsatzTV.FFmpeg.Format;
using ErsatzTV.FFmpeg.Filter;
using ErsatzTV.FFmpeg.Format;
namespace ErsatzTV.FFmpeg.Encoder;
public class EncoderLibx265 : EncoderBase
{
private readonly FrameState _currentState;
public EncoderLibx265(FrameState currentState) => _currentState = currentState;
public override string Filter => new HardwareDownloadFilter(_currentState).Filter;
// TODO: is tag:v needed for mpegts?
public override IList<string> OutputOptions => new List<string>
{ "-c:v", Name, "-tag:v", "hvc1", "-x265-params", "log-level=error" };

1
ErsatzTV.FFmpeg/ErsatzTV.FFmpeg.csproj

@ -9,6 +9,7 @@ @@ -9,6 +9,7 @@
<ItemGroup>
<PackageReference Include="CliWrap" Version="3.4.4" />
<PackageReference Include="LanguageExt.Core" Version="4.2.2" />
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="6.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="6.0.1" />
</ItemGroup>

4
ErsatzTV.FFmpeg/FFmpegState.cs

@ -4,7 +4,8 @@ namespace ErsatzTV.FFmpeg; @@ -4,7 +4,8 @@ namespace ErsatzTV.FFmpeg;
public record FFmpegState(
bool SaveReport,
HardwareAccelerationMode HardwareAccelerationMode,
HardwareAccelerationMode DecoderHardwareAccelerationMode,
HardwareAccelerationMode EncoderHardwareAccelerationMode,
Option<string> VaapiDriver,
Option<string> VaapiDevice,
Option<TimeSpan> Start,
@ -23,6 +24,7 @@ public record FFmpegState( @@ -23,6 +24,7 @@ public record FFmpegState(
new(
saveReport,
HardwareAccelerationMode.None,
HardwareAccelerationMode.None,
Option<string>.None,
Option<string>.None,
Option<TimeSpan>.None,

16
ErsatzTV.FFmpeg/Filter/ComplexFilter.cs

@ -148,7 +148,7 @@ public class ComplexFilter : IPipelineStep @@ -148,7 +148,7 @@ public class ComplexFilter : IPipelineStep
}
IPipelineFilterStep overlayFilter = AvailableWatermarkOverlayFilters.ForAcceleration(
_ffmpegState.HardwareAccelerationMode,
_ffmpegState.EncoderHardwareAccelerationMode,
_currentState,
watermarkInputFile.DesiredState,
_resolution);
@ -164,16 +164,16 @@ public class ComplexFilter : IPipelineStep @@ -164,16 +164,16 @@ public class ComplexFilter : IPipelineStep
// also wait to upload if a subtitle overlay is coming
string uploadDownloadFilter = string.Empty;
if (_maybeSubtitleInputFile.IsNone &&
(_ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Vaapi ||
_ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.VideoToolbox &&
(_ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Vaapi ||
_ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.VideoToolbox &&
_currentState.VideoFormat == VideoFormat.Hevc))
{
uploadDownloadFilter = new HardwareUploadFilter(_ffmpegState).Filter;
}
if (_maybeSubtitleInputFile.Map(s => !s.IsImageBased).IfNone(false) &&
_ffmpegState.HardwareAccelerationMode != HardwareAccelerationMode.Vaapi &&
_ffmpegState.HardwareAccelerationMode != HardwareAccelerationMode.VideoToolbox)
_ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.Vaapi &&
_ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.VideoToolbox)
{
uploadDownloadFilter = new HardwareDownloadFilter(_currentState).Filter;
}
@ -217,7 +217,7 @@ public class ComplexFilter : IPipelineStep @@ -217,7 +217,7 @@ public class ComplexFilter : IPipelineStep
{
IPipelineFilterStep overlayFilter =
AvailableSubtitleOverlayFilters.ForAcceleration(
_ffmpegState.HardwareAccelerationMode,
_ffmpegState.EncoderHardwareAccelerationMode,
_currentState);
filter = overlayFilter.Filter;
}
@ -238,8 +238,8 @@ public class ComplexFilter : IPipelineStep @@ -238,8 +238,8 @@ public class ComplexFilter : IPipelineStep
// vaapi uses software overlay and needs to upload
// videotoolbox seems to require a hwupload for hevc
string uploadFilter = string.Empty;
if (_ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Vaapi
|| _ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.VideoToolbox &&
if (_ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Vaapi
|| _ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.VideoToolbox &&
_currentState.VideoFormat == VideoFormat.Hevc)
{
uploadFilter = new HardwareUploadFilter(_ffmpegState).Filter;

4
ErsatzTV.FFmpeg/Filter/HardwareUploadFilter.cs

@ -6,7 +6,7 @@ public class HardwareUploadFilter : BaseFilter @@ -6,7 +6,7 @@ public class HardwareUploadFilter : BaseFilter
public HardwareUploadFilter(FFmpegState ffmpegState) => _ffmpegState = ffmpegState;
public override string Filter => _ffmpegState.HardwareAccelerationMode switch
public override string Filter => _ffmpegState.EncoderHardwareAccelerationMode switch
{
HardwareAccelerationMode.None => string.Empty,
HardwareAccelerationMode.Nvenc => "hwupload_cuda",
@ -15,7 +15,7 @@ public class HardwareUploadFilter : BaseFilter @@ -15,7 +15,7 @@ public class HardwareUploadFilter : BaseFilter
_ => "hwupload"
};
public override FrameState NextState(FrameState currentState) => _ffmpegState.HardwareAccelerationMode switch
public override FrameState NextState(FrameState currentState) => _ffmpegState.EncoderHardwareAccelerationMode switch
{
HardwareAccelerationMode.None => currentState,
_ => currentState with { FrameDataLocation = FrameDataLocation.Hardware }

2
ErsatzTV.FFmpeg/Filter/SubtitleHardwareUploadFilter.cs

@ -12,7 +12,7 @@ public class SubtitleHardwareUploadFilter : BaseFilter @@ -12,7 +12,7 @@ public class SubtitleHardwareUploadFilter : BaseFilter
}
public override string Filter =>
_ffmpegState.HardwareAccelerationMode switch
_ffmpegState.EncoderHardwareAccelerationMode switch
{
HardwareAccelerationMode.None => string.Empty,
HardwareAccelerationMode.Nvenc => "hwupload_cuda",

2
ErsatzTV.FFmpeg/Filter/SubtitlePixelFormatFilter.cs

@ -10,7 +10,7 @@ public class SubtitlePixelFormatFilter : BaseFilter @@ -10,7 +10,7 @@ public class SubtitlePixelFormatFilter : BaseFilter
{
get
{
Option<string> maybeFormat = _ffmpegState.HardwareAccelerationMode switch
Option<string> maybeFormat = _ffmpegState.EncoderHardwareAccelerationMode switch
{
HardwareAccelerationMode.Nvenc => "yuva420p",
HardwareAccelerationMode.Qsv => "yuva420p",

2
ErsatzTV.FFmpeg/Filter/WatermarkHardwareUploadFilter.cs

@ -11,7 +11,7 @@ public class WatermarkHardwareUploadFilter : BaseFilter @@ -11,7 +11,7 @@ public class WatermarkHardwareUploadFilter : BaseFilter
_ffmpegState = ffmpegState;
}
public override string Filter => _ffmpegState.HardwareAccelerationMode switch
public override string Filter => _ffmpegState.EncoderHardwareAccelerationMode switch
{
HardwareAccelerationMode.None => string.Empty,
HardwareAccelerationMode.Nvenc => "hwupload_cuda",

2
ErsatzTV.FFmpeg/Filter/WatermarkPixelFormatFilter.cs

@ -19,7 +19,7 @@ public class WatermarkPixelFormatFilter : BaseFilter @@ -19,7 +19,7 @@ public class WatermarkPixelFormatFilter : BaseFilter
{
bool hasFadePoints = _watermarkState.MaybeFadePoints.Map(fp => fp.Count).IfNone(0) > 0;
Option<string> maybeFormat = _ffmpegState.HardwareAccelerationMode switch
Option<string> maybeFormat = _ffmpegState.EncoderHardwareAccelerationMode switch
{
HardwareAccelerationMode.Nvenc => "yuva420p",
HardwareAccelerationMode.Qsv => "yuva420p",

76
ErsatzTV.FFmpeg/PipelineBuilder.cs

@ -1,4 +1,5 @@ @@ -1,4 +1,5 @@
using ErsatzTV.FFmpeg.Decoder;
using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.FFmpeg.Decoder;
using ErsatzTV.FFmpeg.Encoder;
using ErsatzTV.FFmpeg.Environment;
using ErsatzTV.FFmpeg.Filter;
@ -18,6 +19,7 @@ public class PipelineBuilder @@ -18,6 +19,7 @@ public class PipelineBuilder
{
private readonly Option<AudioInputFile> _audioInputFile;
private readonly string _fontsFolder;
private readonly IHardwareCapabilities _hardwareCapabilities;
private readonly ILogger _logger;
private readonly List<IPipelineStep> _pipelineSteps;
private readonly string _reportsFolder;
@ -26,6 +28,7 @@ public class PipelineBuilder @@ -26,6 +28,7 @@ public class PipelineBuilder
private readonly Option<WatermarkInputFile> _watermarkInputFile;
public PipelineBuilder(
IHardwareCapabilities hardwareCapabilities,
Option<VideoInputFile> videoInputFile,
Option<AudioInputFile> audioInputFile,
Option<WatermarkInputFile> watermarkInputFile,
@ -46,6 +49,7 @@ public class PipelineBuilder @@ -46,6 +49,7 @@ public class PipelineBuilder
new ClosedGopOutputOption()
};
_hardwareCapabilities = hardwareCapabilities;
_videoInputFile = videoInputFile;
_audioInputFile = audioInputFile;
_watermarkInputFile = watermarkInputFile;
@ -200,7 +204,7 @@ public class PipelineBuilder @@ -200,7 +204,7 @@ public class PipelineBuilder
else
{
Option<IPipelineStep> maybeAccel = AvailableHardwareAccelerationOptions.ForMode(
ffmpegState.HardwareAccelerationMode,
ffmpegState.EncoderHardwareAccelerationMode,
ffmpegState.VaapiDevice,
_logger);
@ -209,24 +213,45 @@ public class PipelineBuilder @@ -209,24 +213,45 @@ public class PipelineBuilder
ffmpegState = ffmpegState with
{
// disable hw accel if we don't match anything
HardwareAccelerationMode = HardwareAccelerationMode.None
DecoderHardwareAccelerationMode = HardwareAccelerationMode.None,
EncoderHardwareAccelerationMode = HardwareAccelerationMode.None
};
}
foreach (IPipelineStep accel in maybeAccel)
{
currentState = accel.NextState(currentState);
_pipelineSteps.Add(accel);
bool canDecode = _hardwareCapabilities.CanDecode(currentState.VideoFormat);
bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat);
// disable hw accel if decoder/encoder isn't supported
if (!canDecode || !canEncode)
{
ffmpegState = ffmpegState with
{
DecoderHardwareAccelerationMode = canDecode
? ffmpegState.DecoderHardwareAccelerationMode
: HardwareAccelerationMode.None,
EncoderHardwareAccelerationMode = canEncode
? ffmpegState.EncoderHardwareAccelerationMode
: HardwareAccelerationMode.None
};
}
if (canDecode || canEncode)
{
currentState = accel.NextState(currentState);
_pipelineSteps.Add(accel);
}
}
// nvenc requires yuv420p background with yuva420p overlay
if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Nvenc && hasOverlay)
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Nvenc && hasOverlay)
{
desiredState = desiredState with { PixelFormat = new PixelFormatYuv420P() };
}
// qsv should stay nv12
if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Qsv && hasOverlay)
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Qsv && hasOverlay)
{
IPixelFormat pixelFormat = desiredState.PixelFormat.IfNone(new PixelFormatYuv420P());
desiredState = desiredState with { PixelFormat = new PixelFormatNv12(pixelFormat.Name) };
@ -240,6 +265,7 @@ public class PipelineBuilder @@ -240,6 +265,7 @@ public class PipelineBuilder
}
foreach (IDecoder decoder in AvailableDecoders.ForVideoFormat(
_hardwareCapabilities,
ffmpegState,
currentState,
desiredState,
@ -262,7 +288,7 @@ public class PipelineBuilder @@ -262,7 +288,7 @@ public class PipelineBuilder
if (videoStream.StillImage)
{
var option = new InfiniteLoopInputOption(ffmpegState.HardwareAccelerationMode);
var option = new InfiniteLoopInputOption(ffmpegState.EncoderHardwareAccelerationMode);
_videoInputFile.Iter(f => f.AddOption(option));
}
@ -277,7 +303,7 @@ public class PipelineBuilder @@ -277,7 +303,7 @@ public class PipelineBuilder
if (desiredState.InfiniteLoop)
{
var option = new InfiniteLoopInputOption(ffmpegState.HardwareAccelerationMode);
var option = new InfiniteLoopInputOption(ffmpegState.EncoderHardwareAccelerationMode);
_audioInputFile.Iter(f => f.AddOption(option));
_videoInputFile.Iter(f => f.AddOption(option));
}
@ -325,7 +351,7 @@ public class PipelineBuilder @@ -325,7 +351,7 @@ public class PipelineBuilder
if (desiredState.Deinterlaced && !currentState.Deinterlaced)
{
IPipelineFilterStep step = AvailableDeinterlaceFilters.ForAcceleration(
ffmpegState.HardwareAccelerationMode,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState,
_watermarkInputFile,
@ -335,7 +361,7 @@ public class PipelineBuilder @@ -335,7 +361,7 @@ public class PipelineBuilder
}
// TODO: this is a software-only flow, will need to be different for hardware accel
if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.None)
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None)
{
if (currentState.ScaledSize != desiredState.ScaledSize ||
currentState.PaddedSize != desiredState.PaddedSize)
@ -360,7 +386,7 @@ public class PipelineBuilder @@ -360,7 +386,7 @@ public class PipelineBuilder
else if (currentState.ScaledSize != desiredState.ScaledSize)
{
IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
ffmpegState.HardwareAccelerationMode,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize);
@ -382,7 +408,7 @@ public class PipelineBuilder @@ -382,7 +408,7 @@ public class PipelineBuilder
else if (currentState.PaddedSize != desiredState.PaddedSize)
{
IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
ffmpegState.HardwareAccelerationMode,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize);
@ -415,7 +441,7 @@ public class PipelineBuilder @@ -415,7 +441,7 @@ public class PipelineBuilder
currentState = formatFilter.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(formatFilter));
switch (ffmpegState.HardwareAccelerationMode)
switch (ffmpegState.EncoderHardwareAccelerationMode)
{
case HardwareAccelerationMode.Nvenc:
var uploadFilter = new HardwareUploadFilter(ffmpegState);
@ -426,13 +452,13 @@ public class PipelineBuilder @@ -426,13 +452,13 @@ public class PipelineBuilder
}
else
{
if (ffmpegState.HardwareAccelerationMode != HardwareAccelerationMode.Qsv)
if (ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.Qsv)
{
// the filter re-applies the current pixel format, so we have to set it first
currentState = currentState with { PixelFormat = desiredState.PixelFormat };
IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
ffmpegState.HardwareAccelerationMode,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize);
@ -444,7 +470,7 @@ public class PipelineBuilder @@ -444,7 +470,7 @@ public class PipelineBuilder
}
// nvenc custom logic
if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Nvenc)
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Nvenc)
{
foreach (VideoInputFile videoInputFile in _videoInputFile)
{
@ -461,7 +487,7 @@ public class PipelineBuilder @@ -461,7 +487,7 @@ public class PipelineBuilder
currentState = currentState with { PixelFormat = desiredState.PixelFormat };
IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
ffmpegState.HardwareAccelerationMode,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize);
@ -488,7 +514,7 @@ public class PipelineBuilder @@ -488,7 +514,7 @@ public class PipelineBuilder
if (currentState.PixelFormat.Map(pf => pf.FFmpegName) != desiredPixelFormat.FFmpegName)
{
// qsv doesn't seem to like this
if (ffmpegState.HardwareAccelerationMode != HardwareAccelerationMode.Qsv)
if (ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.Qsv)
{
IPipelineStep step = new PixelFormatOutputOption(desiredPixelFormat);
currentState = step.NextState(currentState);
@ -547,7 +573,7 @@ public class PipelineBuilder @@ -547,7 +573,7 @@ public class PipelineBuilder
{
// vaapi and videotoolbox use a software overlay, so we need to ensure the background is already in software
// though videotoolbox uses software decoders, so no need to download for that
if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Vaapi)
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Vaapi)
{
var downloadFilter = new HardwareDownloadFilter(currentState);
currentState = downloadFilter.NextState(currentState);
@ -565,12 +591,12 @@ public class PipelineBuilder @@ -565,12 +591,12 @@ public class PipelineBuilder
// text-based subtitles are always added in software, so always try to download the background
// nvidia needs some extra format help if the only filter will be the download filter
if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Nvenc &&
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Nvenc &&
currentState.FrameDataLocation == FrameDataLocation.Hardware &&
_videoInputFile.Map(f => f.FilterSteps.Count).IfNone(1) == 0)
{
IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
ffmpegState.HardwareAccelerationMode,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize);
@ -588,7 +614,7 @@ public class PipelineBuilder @@ -588,7 +614,7 @@ public class PipelineBuilder
{
// vaapi and videotoolbox use a software overlay, so we need to ensure the background is already in software
// though videotoolbox uses software decoders, so no need to download for that
if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Vaapi)
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Vaapi)
{
var downloadFilter = new HardwareDownloadFilter(currentState);
currentState = downloadFilter.NextState(currentState);
@ -607,7 +633,8 @@ public class PipelineBuilder @@ -607,7 +633,8 @@ public class PipelineBuilder
else if (watermarkInputFile.DesiredState.MaybeFadePoints.Map(fp => fp.Count > 0).IfNone(false))
{
// looping is required to fade a static image in and out
watermarkInputFile.AddOption(new InfiniteLoopInputOption(ffmpegState.HardwareAccelerationMode));
watermarkInputFile.AddOption(
new InfiniteLoopInputOption(ffmpegState.EncoderHardwareAccelerationMode));
}
}
@ -634,6 +661,7 @@ public class PipelineBuilder @@ -634,6 +661,7 @@ public class PipelineBuilder
if (_pipelineSteps.OfType<IEncoder>().All(e => e.Kind != StreamKind.Video))
{
foreach (IEncoder e in AvailableEncoders.ForVideoFormat(
_hardwareCapabilities,
ffmpegState,
currentState,
desiredState,

6
ErsatzTV.Infrastructure/ErsatzTV.Infrastructure.csproj

@ -14,12 +14,12 @@ @@ -14,12 +14,12 @@
<PackageReference Include="Lucene.Net" Version="4.8.0-beta00016" />
<PackageReference Include="Lucene.Net.Analysis.Common" Version="4.8.0-beta00016" />
<PackageReference Include="Lucene.Net.QueryParser" Version="4.8.0-beta00016" />
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="6.0.5" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="6.0.5">
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="6.0.6" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="6.0.6">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="6.0.5" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="6.0.6" />
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.2.32">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

10
ErsatzTV/ErsatzTV.csproj

@ -55,16 +55,16 @@ @@ -55,16 +55,16 @@
<ItemGroup>
<PackageReference Include="Bugsnag.AspNet.Core" Version="3.0.1" />
<PackageReference Include="FluentValidation" Version="11.0.2" />
<PackageReference Include="FluentValidation.AspNetCore" Version="11.0.2" />
<PackageReference Include="FluentValidation" Version="11.0.3" />
<PackageReference Include="FluentValidation.AspNetCore" Version="11.0.3" />
<PackageReference Include="HtmlSanitizer" Version="7.1.512" />
<PackageReference Include="LanguageExt.Core" Version="4.2.2" />
<PackageReference Include="Markdig" Version="0.30.2" />
<PackageReference Include="MediatR.Courier.DependencyInjection" Version="5.0.0" />
<PackageReference Include="MediatR.Extensions.Microsoft.DependencyInjection" Version="10.0.1" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="6.0.5" />
<PackageReference Include="Microsoft.AspNetCore.SpaServices.Extensions" Version="6.0.5" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="6.0.5">
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="6.0.6" />
<PackageReference Include="Microsoft.AspNetCore.SpaServices.Extensions" Version="6.0.6" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="6.0.6">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>

2
ErsatzTV/Startup.cs

@ -32,6 +32,7 @@ using ErsatzTV.Core.Metadata.Nfo; @@ -32,6 +32,7 @@ using ErsatzTV.Core.Metadata.Nfo;
using ErsatzTV.Core.Plex;
using ErsatzTV.Core.Scheduling;
using ErsatzTV.Core.Trakt;
using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.Formatters;
using ErsatzTV.Infrastructure.Data;
using ErsatzTV.Infrastructure.Data.Repositories;
@ -396,6 +397,7 @@ public class Startup @@ -396,6 +397,7 @@ public class Startup
services.AddScoped<IRuntimeInfo, RuntimeInfo>();
services.AddScoped<IPlexPathReplacementService, PlexPathReplacementService>();
services.AddScoped<IFFmpegStreamSelector, FFmpegStreamSelector>();
services.AddScoped<IHardwareCapabilitiesFactory, HardwareCapabilitiesFactory>();
services.AddScoped<IFFmpegProcessService, FFmpegLibraryProcessService>();
services.AddScoped<FFmpegProcessService>();

Loading…
Cancel
Save