diff --git a/CHANGELOG.md b/CHANGELOG.md index 6bc8dd3a7..deddccb8e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - This can help if your card only supports e.g. h264 encoding, normalizing to 8 bits will allow the hardware encoder to be used - Extract font attachments after extracting text subtitles - This should improve SubStation Alpha subtitle rendering +- Detect VAAPI capabilities and fallback to software decoding/encoding as needed - Add audio stream selector scripts for episodes and movies - This will let you customize which audio stream is selected for playback - Episodes are passed the following data: diff --git a/ErsatzTV.Application/ErsatzTV.Application.csproj b/ErsatzTV.Application/ErsatzTV.Application.csproj index 11dc55bc6..2a23f4b26 100644 --- a/ErsatzTV.Application/ErsatzTV.Application.csproj +++ b/ErsatzTV.Application/ErsatzTV.Application.csproj @@ -10,13 +10,13 @@ - + - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + diff --git a/ErsatzTV.Core.Tests/ErsatzTV.Core.Tests.csproj b/ErsatzTV.Core.Tests/ErsatzTV.Core.Tests.csproj index 1f121d02a..fe7537e0e 100644 --- a/ErsatzTV.Core.Tests/ErsatzTV.Core.Tests.csproj +++ b/ErsatzTV.Core.Tests/ErsatzTV.Core.Tests.csproj @@ -10,20 +10,20 @@ - + - - + + all runtime; build; native; contentfiles; analyzers; buildtransitive - + - + diff --git a/ErsatzTV.Core.Tests/FFmpeg/TranscodingTests.cs b/ErsatzTV.Core.Tests/FFmpeg/TranscodingTests.cs index 56521a4ec..000e3ccad 100644 --- a/ErsatzTV.Core.Tests/FFmpeg/TranscodingTests.cs +++ b/ErsatzTV.Core.Tests/FFmpeg/TranscodingTests.cs @@ -35,6 +35,7 @@ namespace ErsatzTV.Core.Tests.FFmpeg; public class TranscodingTests { private static readonly ILoggerFactory LoggerFactory; + private static readonly MemoryCache _memoryCache; static TranscodingTests() { @@ -44,6 +45,8 @@ public class TranscodingTests .CreateLogger(); LoggerFactory = new LoggerFactory().AddSerilog(Log.Logger); + + _memoryCache = new MemoryCache(new MemoryCacheOptions()); } [Test] @@ -213,10 +216,10 @@ public class TranscodingTests FFmpegProfileVideoFormat profileVideoFormat, // [ValueSource(typeof(TestData), nameof(TestData.NoAcceleration))] HardwareAccelerationKind profileAcceleration) // [ValueSource(typeof(TestData), nameof(TestData.NvidiaAcceleration))] HardwareAccelerationKind profileAcceleration) - // [ValueSource(typeof(TestData), nameof(TestData.VaapiAcceleration))] HardwareAccelerationKind profileAcceleration) + [ValueSource(typeof(TestData), nameof(TestData.VaapiAcceleration))] HardwareAccelerationKind profileAcceleration) // [ValueSource(typeof(TestData), nameof(TestData.QsvAcceleration))] HardwareAccelerationKind profileAcceleration) // [ValueSource(typeof(TestData), nameof(TestData.VideoToolboxAcceleration))] HardwareAccelerationKind profileAcceleration) - [ValueSource(typeof(TestData), nameof(TestData.AmfAcceleration))] HardwareAccelerationKind profileAcceleration) + // [ValueSource(typeof(TestData), nameof(TestData.AmfAcceleration))] HardwareAccelerationKind profileAcceleration) { if (inputFormat.Encoder is "mpeg1video" or "msmpeg4v2" or "msmpeg4v3") { @@ -318,7 +321,7 @@ public class TranscodingTests imageCache.Object, new Mock().Object, new Mock().Object, - new MemoryCache(new MemoryCacheOptions()), + _memoryCache, LoggerFactory.CreateLogger()); var service = new FFmpegLibraryProcessService( @@ -330,7 +333,7 @@ public class TranscodingTests new RuntimeInfo(), //new FakeNvidiaCapabilitiesFactory(), new HardwareCapabilitiesFactory( - new MemoryCache(new MemoryCacheOptions()), + _memoryCache, LoggerFactory.CreateLogger()), LoggerFactory.CreateLogger()), LoggerFactory.CreateLogger()); @@ -785,7 +788,9 @@ public class TranscodingTests { public Task GetHardwareCapabilities( string ffmpegPath, - HardwareAccelerationMode hardwareAccelerationMode) => + HardwareAccelerationMode hardwareAccelerationMode, + Option vaapiDriver, + Option vaapiDevice) => Task.FromResult(new NvidiaHardwareCapabilities(61, string.Empty)); } diff --git a/ErsatzTV.Core/ErsatzTV.Core.csproj b/ErsatzTV.Core/ErsatzTV.Core.csproj index f59d0c3bd..97e722264 100644 --- a/ErsatzTV.Core/ErsatzTV.Core.csproj +++ b/ErsatzTV.Core/ErsatzTV.Core.csproj @@ -10,19 +10,19 @@ - - - + + + - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + diff --git a/ErsatzTV.Core/FFmpeg/FFmpegLibraryProcessService.cs b/ErsatzTV.Core/FFmpeg/FFmpegLibraryProcessService.cs index c17b69aa9..4aba848ea 100644 --- a/ErsatzTV.Core/FFmpeg/FFmpegLibraryProcessService.cs +++ b/ErsatzTV.Core/FFmpeg/FFmpegLibraryProcessService.cs @@ -218,6 +218,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService playbackSettings.RealtimeOutput, false, // TODO: fallback filler needs to loop videoFormat, + videoStream.Profile, Optional(desiredPixelFormat), ffmpegVideoStream.SquarePixelFrameSize( new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height)), @@ -256,6 +257,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService audioInputFile, watermarkInputFile, subtitleInputFile, + VaapiDriverName(hwAccel, vaapiDriver), + VaapiDeviceName(hwAccel, vaapiDevice), FileSystemLayout.FFmpegReportsFolder, FileSystemLayout.FontsCacheFolder, ffmpegPath); @@ -317,6 +320,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService playbackSettings.RealtimeOutput, false, GetVideoFormat(playbackSettings), + VideoProfile.Main, new PixelFormatYuv420P(), new FrameSize(desiredResolution.Width, desiredResolution.Height), new FrameSize(desiredResolution.Width, desiredResolution.Height), @@ -395,6 +399,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService audioInputFile, None, subtitleInputFile, + VaapiDriverName(hwAccel, vaapiDriver), + VaapiDeviceName(hwAccel, vaapiDevice), FileSystemLayout.FFmpegReportsFolder, FileSystemLayout.FontsCacheFolder, ffmpegPath); @@ -423,6 +429,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService None, None, None, + None, + None, FileSystemLayout.FFmpegReportsFolder, FileSystemLayout.FontsCacheFolder, ffmpegPath); @@ -462,6 +470,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService None, None, None, + None, + None, FileSystemLayout.FFmpegReportsFolder, FileSystemLayout.FontsCacheFolder, ffmpegPath); diff --git a/ErsatzTV.FFmpeg.Tests/ErsatzTV.FFmpeg.Tests.csproj b/ErsatzTV.FFmpeg.Tests/ErsatzTV.FFmpeg.Tests.csproj index c3cd94451..ee5108ed9 100644 --- a/ErsatzTV.FFmpeg.Tests/ErsatzTV.FFmpeg.Tests.csproj +++ b/ErsatzTV.FFmpeg.Tests/ErsatzTV.FFmpeg.Tests.csproj @@ -9,15 +9,15 @@ - - + + - + - + all runtime; build; native; contentfiles; analyzers; buildtransitive diff --git a/ErsatzTV.FFmpeg.Tests/PipelineBuilderBaseTests.cs b/ErsatzTV.FFmpeg.Tests/PipelineBuilderBaseTests.cs index 0323f979d..9d6a6b3e2 100644 --- a/ErsatzTV.FFmpeg.Tests/PipelineBuilderBaseTests.cs +++ b/ErsatzTV.FFmpeg.Tests/PipelineBuilderBaseTests.cs @@ -55,6 +55,7 @@ public class PipelineBuilderBaseTests true, false, VideoFormat.Hevc, + VideoProfile.Main, new PixelFormatYuv420P(), new FrameSize(1920, 1080), new FrameSize(1920, 1080), @@ -139,6 +140,7 @@ public class PipelineBuilderBaseTests true, false, VideoFormat.Hevc, + VideoProfile.Main, new PixelFormatYuv420P(), new FrameSize(1920, 1080), new FrameSize(1920, 1080), @@ -248,6 +250,7 @@ public class PipelineBuilderBaseTests true, false, VideoFormat.Copy, + VideoProfile.Main, Option.None, new FrameSize(1920, 1080), new FrameSize(1920, 1080), @@ -324,6 +327,7 @@ public class PipelineBuilderBaseTests true, false, VideoFormat.Copy, + VideoProfile.Main, new PixelFormatYuv420P(), new FrameSize(1920, 1080), new FrameSize(1920, 1080), diff --git a/ErsatzTV.FFmpeg/Capabilities/AmfHardwareCapabilities.cs b/ErsatzTV.FFmpeg/Capabilities/AmfHardwareCapabilities.cs index cd8923be7..3246e3d0d 100644 --- a/ErsatzTV.FFmpeg/Capabilities/AmfHardwareCapabilities.cs +++ b/ErsatzTV.FFmpeg/Capabilities/AmfHardwareCapabilities.cs @@ -4,9 +4,9 @@ namespace ErsatzTV.FFmpeg.Capabilities; public class AmfHardwareCapabilities : IHardwareCapabilities { - public bool CanDecode(string videoFormat, Option maybePixelFormat) => false; + public bool CanDecode(string videoFormat, string videoProfile, Option maybePixelFormat) => false; - public bool CanEncode(string videoFormat, Option maybePixelFormat) + public bool CanEncode(string videoFormat, string videoProfile, Option maybePixelFormat) { int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8); diff --git a/ErsatzTV.FFmpeg/Capabilities/DefaultHardwareCapabilities.cs b/ErsatzTV.FFmpeg/Capabilities/DefaultHardwareCapabilities.cs index 532aa7bec..7edcb72dc 100644 --- a/ErsatzTV.FFmpeg/Capabilities/DefaultHardwareCapabilities.cs +++ b/ErsatzTV.FFmpeg/Capabilities/DefaultHardwareCapabilities.cs @@ -4,8 +4,9 @@ namespace ErsatzTV.FFmpeg.Capabilities; public class DefaultHardwareCapabilities : IHardwareCapabilities { - public bool CanDecode(string videoFormat, Option maybePixelFormat) => true; - public bool CanEncode(string videoFormat, Option maybePixelFormat) + public bool CanDecode(string videoFormat, string videoProfile, Option maybePixelFormat) => true; + + public bool CanEncode(string videoFormat, string videoProfile, Option maybePixelFormat) { int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8); diff --git a/ErsatzTV.FFmpeg/Capabilities/HardwareCapabilitiesFactory.cs b/ErsatzTV.FFmpeg/Capabilities/HardwareCapabilitiesFactory.cs index 71c1636f2..ff6251683 100644 --- a/ErsatzTV.FFmpeg/Capabilities/HardwareCapabilitiesFactory.cs +++ b/ErsatzTV.FFmpeg/Capabilities/HardwareCapabilitiesFactory.cs @@ -2,6 +2,7 @@ using System.Text; using System.Text.RegularExpressions; using CliWrap; using CliWrap.Buffered; +using ErsatzTV.FFmpeg.Capabilities.Vaapi; using Microsoft.Extensions.Caching.Memory; using Microsoft.Extensions.Logging; @@ -11,6 +12,7 @@ public class HardwareCapabilitiesFactory : IHardwareCapabilitiesFactory { private const string ArchitectureCacheKey = "ffmpeg.hardware.nvidia.architecture"; private const string ModelCacheKey = "ffmpeg.hardware.nvidia.model"; + private const string VaapiCacheKeyFormat = "ffmpeg.hardware.vaapi.{0}.{1}"; private readonly ILogger _logger; private readonly IMemoryCache _memoryCache; @@ -23,14 +25,103 @@ public class HardwareCapabilitiesFactory : IHardwareCapabilitiesFactory public async Task GetHardwareCapabilities( string ffmpegPath, - HardwareAccelerationMode hardwareAccelerationMode) => + HardwareAccelerationMode hardwareAccelerationMode, + Option vaapiDriver, + Option vaapiDevice) => hardwareAccelerationMode switch { HardwareAccelerationMode.Nvenc => await GetNvidiaCapabilities(ffmpegPath), + HardwareAccelerationMode.Vaapi => await GetVaapiCapabilities(vaapiDriver, vaapiDevice), HardwareAccelerationMode.Amf => new AmfHardwareCapabilities(), _ => new DefaultHardwareCapabilities() }; + private async Task GetVaapiCapabilities( + Option vaapiDriver, + Option vaapiDevice) + { + try + { + if (vaapiDevice.IsNone) + { + // this shouldn't really happen + + _logger.LogError( + "Cannot detect VAAPI capabilities without device {Device}", + vaapiDevice); + + return new NoHardwareCapabilities(); + } + + string driver = vaapiDriver.IfNone(string.Empty); + string device = vaapiDevice.IfNone(string.Empty); + var cacheKey = string.Format(VaapiCacheKeyFormat, driver, device); + + if (_memoryCache.TryGetValue(cacheKey, out List profileEntrypoints)) + { + return new VaapiHardwareCapabilities(profileEntrypoints, _logger); + } + + BufferedCommandResult whichResult = await Cli.Wrap("which") + .WithArguments("vainfo") + .WithValidation(CommandResultValidation.None) + .ExecuteBufferedAsync(Encoding.UTF8); + + if (whichResult.ExitCode != 0) + { + _logger.LogWarning("Unable to determine VAAPI capabilities; please install vainfo"); + return new DefaultHardwareCapabilities(); + } + + var envVars = new Dictionary(); + foreach (string libvaDriverName in vaapiDriver) + { + envVars.Add("LIBVA_DRIVER_NAME", libvaDriverName); + } + + BufferedCommandResult result = await Cli.Wrap("vainfo") + .WithArguments($"--display drm --device {device}") + .WithEnvironmentVariables(envVars) + .WithValidation(CommandResultValidation.None) + .ExecuteBufferedAsync(Encoding.UTF8); + + profileEntrypoints = new List(); + + foreach (string line in result.StandardOutput.Split("\n")) + { + const string PROFILE_ENTRYPOINT_PATTERN = @"(VAProfile\w*).*(VAEntrypoint\w*)"; + Match match = Regex.Match(line, PROFILE_ENTRYPOINT_PATTERN); + if (match.Success) + { + profileEntrypoints.Add( + new VaapiProfileEntrypoint( + match.Groups[1].Value.Trim(), + match.Groups[2].Value.Trim())); + } + } + + if (profileEntrypoints.Any()) + { + _logger.LogWarning( + "Detected {Count} VAAPI profile entrypoints for using {Driver} {Device}", + profileEntrypoints.Count, + driver, + device); + _memoryCache.Set(cacheKey, profileEntrypoints); + return new VaapiHardwareCapabilities(profileEntrypoints, _logger); + } + } + catch + { + // ignored + } + + _logger.LogWarning( + "Error detecting VAAPI capabilities; some hardware accelerated features will be unavailable"); + + return new NoHardwareCapabilities(); + } + private async Task GetNvidiaCapabilities(string ffmpegPath) { if (_memoryCache.TryGetValue(ArchitectureCacheKey, out int cachedArchitecture) diff --git a/ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilities.cs b/ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilities.cs index 92545c2af..4da0be0ba 100644 --- a/ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilities.cs +++ b/ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilities.cs @@ -4,6 +4,6 @@ namespace ErsatzTV.FFmpeg.Capabilities; public interface IHardwareCapabilities { - public bool CanDecode(string videoFormat, Option maybePixelFormat); - public bool CanEncode(string videoFormat, Option maybePixelFormat); + public bool CanDecode(string videoFormat, string videoProfile, Option maybePixelFormat); + public bool CanEncode(string videoFormat, string videoProfile, Option maybePixelFormat); } diff --git a/ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilitiesFactory.cs b/ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilitiesFactory.cs index 3d3954927..8df2a9a91 100644 --- a/ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilitiesFactory.cs +++ b/ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilitiesFactory.cs @@ -4,5 +4,7 @@ public interface IHardwareCapabilitiesFactory { Task GetHardwareCapabilities( string ffmpegPath, - HardwareAccelerationMode hardwareAccelerationMode); + HardwareAccelerationMode hardwareAccelerationMode, + Option vaapiDriver, + Option vaapiDevice); } diff --git a/ErsatzTV.FFmpeg/Capabilities/NoHardwareCapabilities.cs b/ErsatzTV.FFmpeg/Capabilities/NoHardwareCapabilities.cs index a64105913..73ed7aaed 100644 --- a/ErsatzTV.FFmpeg/Capabilities/NoHardwareCapabilities.cs +++ b/ErsatzTV.FFmpeg/Capabilities/NoHardwareCapabilities.cs @@ -4,6 +4,6 @@ namespace ErsatzTV.FFmpeg.Capabilities; public class NoHardwareCapabilities : IHardwareCapabilities { - public bool CanDecode(string videoFormat, Option maybePixelFormat) => false; - public bool CanEncode(string videoFormat, Option maybePixelFormat) => false; + public bool CanDecode(string videoFormat, string videoProfile, Option maybePixelFormat) => false; + public bool CanEncode(string videoFormat, string videoProfile, Option maybePixelFormat) => false; } diff --git a/ErsatzTV.FFmpeg/Capabilities/NvidiaHardwareCapabilities.cs b/ErsatzTV.FFmpeg/Capabilities/NvidiaHardwareCapabilities.cs index e3845d835..0b5586093 100644 --- a/ErsatzTV.FFmpeg/Capabilities/NvidiaHardwareCapabilities.cs +++ b/ErsatzTV.FFmpeg/Capabilities/NvidiaHardwareCapabilities.cs @@ -14,7 +14,7 @@ public class NvidiaHardwareCapabilities : IHardwareCapabilities _model = model; } - public bool CanDecode(string videoFormat, Option maybePixelFormat) + public bool CanDecode(string videoFormat, string videoProfile, Option maybePixelFormat) { int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8); @@ -36,7 +36,7 @@ public class NvidiaHardwareCapabilities : IHardwareCapabilities }; } - public bool CanEncode(string videoFormat, Option maybePixelFormat) + public bool CanEncode(string videoFormat, string videoProfile, Option maybePixelFormat) { int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8); diff --git a/ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiEntrypoint.cs b/ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiEntrypoint.cs new file mode 100644 index 000000000..c990e5b88 --- /dev/null +++ b/ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiEntrypoint.cs @@ -0,0 +1,7 @@ +namespace ErsatzTV.FFmpeg.Capabilities.Vaapi; + +public class VaapiEntrypoint +{ + public const string Decode = "VAEntrypointVLD"; + public const string Encode = "VAEntrypointEncSlice"; +} diff --git a/ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiProfile.cs b/ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiProfile.cs new file mode 100644 index 000000000..17fb85a25 --- /dev/null +++ b/ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiProfile.cs @@ -0,0 +1,17 @@ +namespace ErsatzTV.FFmpeg.Capabilities.Vaapi; + +public class VaapiProfile +{ + public const string Mpeg2Simple = "VAProfileMPEG2Simple"; + public const string Mpeg2Main = "VAProfileMPEG2Main"; + public const string H264ConstrainedBaseline = "VAProfileH264ConstrainedBaseline"; + public const string H264Main = "VAProfileH264Main"; + public const string H264High = "VAProfileH264High"; + public const string H264MultiviewHigh = "VAProfileH264MultiviewHigh"; + public const string H264StereoHigh = "VAProfileH264StereoHigh"; + public const string Vc1Simple = "VAProfileVC1Simple"; + public const string Vc1Main = "VAProfileVC1Main"; + public const string Vc1Advanced = "VAProfileVC1Advanced"; + public const string HevcMain = "VAProfileHEVCMain"; + public const string HevcMain10 = "VAProfileHEVCMain10"; +} diff --git a/ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiProfileEntrypoint.cs b/ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiProfileEntrypoint.cs new file mode 100644 index 000000000..b439f9510 --- /dev/null +++ b/ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiProfileEntrypoint.cs @@ -0,0 +1,3 @@ +namespace ErsatzTV.FFmpeg.Capabilities.Vaapi; + +public record VaapiProfileEntrypoint(string VaapiProfile, string VaapiEntrypoint); diff --git a/ErsatzTV.FFmpeg/Capabilities/VaapiHardwareCapabilities.cs b/ErsatzTV.FFmpeg/Capabilities/VaapiHardwareCapabilities.cs new file mode 100644 index 000000000..90e5c9d09 --- /dev/null +++ b/ErsatzTV.FFmpeg/Capabilities/VaapiHardwareCapabilities.cs @@ -0,0 +1,123 @@ +using ErsatzTV.FFmpeg.Capabilities.Vaapi; +using ErsatzTV.FFmpeg.Format; +using Microsoft.Extensions.Logging; + +namespace ErsatzTV.FFmpeg.Capabilities; + +public class VaapiHardwareCapabilities : IHardwareCapabilities +{ + private readonly List _profileEntrypoints; + private readonly ILogger _logger; + + public VaapiHardwareCapabilities(List profileEntrypoints, ILogger logger) + { + _profileEntrypoints = profileEntrypoints; + _logger = logger; + } + + public bool CanDecode(string videoFormat, string videoProfile, Option maybePixelFormat) + { + int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8); + + bool result = (videoFormat, videoProfile.ToLowerInvariant()) switch + { + // no hardware decoding of 10-bit h264 + (VideoFormat.H264, _) when bitDepth == 10 => false, + + // no hardware decoding of h264 baseline profile + (VideoFormat.H264, "baseline" or "66") => false, + + (VideoFormat.H264, "main" or "77") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.H264Main, VaapiEntrypoint.Decode)), + + (VideoFormat.H264, "high" or "100") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.H264High, VaapiEntrypoint.Decode)), + + (VideoFormat.H264, "high 10" or "110") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.H264High, VaapiEntrypoint.Decode)), + + (VideoFormat.H264, "baseline constrained" or "578") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.H264ConstrainedBaseline, VaapiEntrypoint.Decode)), + + (VideoFormat.Mpeg2Video, "main" or "4") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.Mpeg2Main, VaapiEntrypoint.Decode)), + + (VideoFormat.Mpeg2Video, "simple" or "5") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.Mpeg2Simple, VaapiEntrypoint.Decode)), + + (VideoFormat.Vc1, "simple" or "0") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.Vc1Simple, VaapiEntrypoint.Decode)), + + (VideoFormat.Vc1, "main" or "1") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.Vc1Main, VaapiEntrypoint.Decode)), + + (VideoFormat.Vc1, "advanced" or "3") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.Vc1Advanced, VaapiEntrypoint.Decode)), + + (VideoFormat.Hevc, "main" or "1") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.HevcMain, VaapiEntrypoint.Decode)), + + (VideoFormat.Hevc, "main 10" or "2") => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.HevcMain10, VaapiEntrypoint.Decode)), + + // fall back to software decoder + _ => false + }; + + if (!result) + { + _logger.LogDebug( + "VAAPI does not support decoding {Format}/{Profile}, will use software decoder", + videoFormat, + videoProfile); + } + + return result; + } + + public bool CanEncode(string videoFormat, string videoProfile, Option maybePixelFormat) + { + int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8); + + bool result = videoFormat switch + { + // vaapi cannot encode 10-bit h264 + VideoFormat.H264 when bitDepth == 10 => false, + + VideoFormat.H264 => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.H264Main, VaapiEntrypoint.Encode)), + + VideoFormat.Hevc when bitDepth == 10 => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.HevcMain10, VaapiEntrypoint.Encode)), + + VideoFormat.Hevc => + _profileEntrypoints.Contains( + new VaapiProfileEntrypoint(VaapiProfile.HevcMain, VaapiEntrypoint.Encode)), + + _ => false + }; + + if (!result) + { + _logger.LogDebug( + "VAAPI does not support encoding {Format} with bit depth {BitDepth}, will use software encoder", + videoFormat, + bitDepth); + } + + return result; + } +} diff --git a/ErsatzTV.FFmpeg/Decoder/AvailableDecoders.cs b/ErsatzTV.FFmpeg/Decoder/AvailableDecoders.cs deleted file mode 100644 index 4615fd40b..000000000 --- a/ErsatzTV.FFmpeg/Decoder/AvailableDecoders.cs +++ /dev/null @@ -1,103 +0,0 @@ -using ErsatzTV.FFmpeg.Capabilities; -using ErsatzTV.FFmpeg.Decoder.Cuvid; -using ErsatzTV.FFmpeg.Decoder.Qsv; -using ErsatzTV.FFmpeg.Format; -using Microsoft.Extensions.Logging; - -namespace ErsatzTV.FFmpeg.Decoder; - -public static class AvailableDecoders -{ - public static Option ForVideoFormat( - IHardwareCapabilities hardwareCapabilities, - FFmpegState ffmpegState, - FrameState currentState, - FrameState desiredState, - Option watermarkInputFile, - Option subtitleInputFile, - ILogger logger) => - (ffmpegState.DecoderHardwareAccelerationMode, currentState.VideoFormat, - currentState.PixelFormat.Match(pf => pf.Name, () => string.Empty)) switch - { - (HardwareAccelerationMode.Nvenc, VideoFormat.Hevc, _) - when hardwareCapabilities.CanDecode(VideoFormat.Hevc, currentState.PixelFormat) => - new DecoderHevcCuvid(ffmpegState.EncoderHardwareAccelerationMode), - - // nvenc doesn't support hardware decoding of 10-bit content - (HardwareAccelerationMode.Nvenc, VideoFormat.H264, PixelFormat.YUV420P10LE or PixelFormat.YUV444P10LE) - => new DecoderH264(), - - // mpeg2_cuvid seems to have issues when yadif_cuda is used, so just use software decoding - (HardwareAccelerationMode.Nvenc, VideoFormat.Mpeg2Video, _) when desiredState.Deinterlaced => - new DecoderMpeg2Video(), - - (HardwareAccelerationMode.Nvenc, VideoFormat.H264, _) - when hardwareCapabilities.CanDecode(VideoFormat.H264, currentState.PixelFormat) => - new DecoderH264Cuvid(ffmpegState.EncoderHardwareAccelerationMode), - (HardwareAccelerationMode.Nvenc, VideoFormat.Mpeg2Video, _) => new DecoderMpeg2Cuvid( - ffmpegState.EncoderHardwareAccelerationMode, - desiredState.Deinterlaced), - (HardwareAccelerationMode.Nvenc, VideoFormat.Vc1, _) => new DecoderVc1Cuvid(ffmpegState.EncoderHardwareAccelerationMode), - (HardwareAccelerationMode.Nvenc, VideoFormat.Vp9, _) - when hardwareCapabilities.CanDecode(VideoFormat.Vp9, currentState.PixelFormat) => - new DecoderVp9Cuvid(ffmpegState.EncoderHardwareAccelerationMode), - (HardwareAccelerationMode.Nvenc, VideoFormat.Mpeg4, _) => new DecoderMpeg4Cuvid(ffmpegState.EncoderHardwareAccelerationMode), - - // hevc_qsv decoder sometimes causes green lines with 10-bit content - (HardwareAccelerationMode.Qsv, VideoFormat.Hevc, PixelFormat.YUV420P10LE) => new DecoderHevc(), - - // h264_qsv does not support decoding 10-bit content - (HardwareAccelerationMode.Qsv, VideoFormat.H264, PixelFormat.YUV420P10LE or PixelFormat.YUV444P10LE) => - new DecoderH264(), - - // qsv uses software deinterlace filter, so decode in software - (HardwareAccelerationMode.Qsv, VideoFormat.H264, _) when desiredState.Deinterlaced => new DecoderH264(), - (HardwareAccelerationMode.Qsv, VideoFormat.Mpeg2Video, _) when desiredState.Deinterlaced => - new DecoderMpeg2Video(), - - (HardwareAccelerationMode.Qsv, VideoFormat.Hevc, _) => new DecoderHevcQsv(), - (HardwareAccelerationMode.Qsv, VideoFormat.H264, _) => new DecoderH264Qsv(), - (HardwareAccelerationMode.Qsv, VideoFormat.Mpeg2Video, _) => new DecoderMpeg2Qsv(), - (HardwareAccelerationMode.Qsv, VideoFormat.Vc1, _) => new DecoderVc1Qsv(), - (HardwareAccelerationMode.Qsv, VideoFormat.Vp9, _) => new DecoderVp9Qsv(), - - // vaapi should use implicit decoders when scaling or no watermark/subtitles - // otherwise, fall back to software decoders - (HardwareAccelerationMode.Vaapi, _, _) when watermarkInputFile.IsNone && subtitleInputFile.IsNone || - currentState.ScaledSize != desiredState.ScaledSize => - new DecoderVaapi(), - - // videotoolbox should use implicit decoders - (HardwareAccelerationMode.VideoToolbox, _, _) => new DecoderVideoToolbox(), - - (_, VideoFormat.Hevc, _) => new DecoderHevc(), - (_, VideoFormat.H264, _) => new DecoderH264(), - (_, VideoFormat.Mpeg1Video, _) => new DecoderMpeg1Video(), - (_, VideoFormat.Mpeg2Video, _) => new DecoderMpeg2Video(), - (_, VideoFormat.Vc1, _) => new DecoderVc1(), - (_, VideoFormat.MsMpeg4V2, _) => new DecoderMsMpeg4V2(), - (_, VideoFormat.MsMpeg4V3, _) => new DecoderMsMpeg4V3(), - (_, VideoFormat.Mpeg4, _) => new DecoderMpeg4(), - (_, VideoFormat.Vp9, _) => new DecoderVp9(), - - (_, VideoFormat.Undetermined, _) => new DecoderImplicit(), - (_, VideoFormat.Copy, _) => new DecoderImplicit(), - (_, VideoFormat.GeneratedImage, _) => new DecoderImplicit(), - - var (accel, videoFormat, pixelFormat) => LogUnknownDecoder(accel, videoFormat, pixelFormat, logger) - }; - - private static Option LogUnknownDecoder( - HardwareAccelerationMode hardwareAccelerationMode, - string videoFormat, - string pixelFormat, - ILogger logger) - { - logger.LogWarning( - "Unable to determine decoder for {AccelMode} - {VideoFormat} - {PixelFormat}; may have playback issues", - hardwareAccelerationMode, - videoFormat, - pixelFormat); - return Option.None; - } -} diff --git a/ErsatzTV.FFmpeg/Encoder/AvailableEncoders.cs b/ErsatzTV.FFmpeg/Encoder/AvailableEncoders.cs index 6146d667a..cd217b2ad 100644 --- a/ErsatzTV.FFmpeg/Encoder/AvailableEncoders.cs +++ b/ErsatzTV.FFmpeg/Encoder/AvailableEncoders.cs @@ -1,10 +1,4 @@ -using ErsatzTV.FFmpeg.Capabilities; -using ErsatzTV.FFmpeg.Encoder.Amf; -using ErsatzTV.FFmpeg.Encoder.Nvenc; -using ErsatzTV.FFmpeg.Encoder.Qsv; -using ErsatzTV.FFmpeg.Encoder.Vaapi; -using ErsatzTV.FFmpeg.Encoder.VideoToolbox; -using ErsatzTV.FFmpeg.Format; +using ErsatzTV.FFmpeg.Format; using ErsatzTV.FFmpeg.State; using Microsoft.Extensions.Logging; @@ -12,75 +6,6 @@ namespace ErsatzTV.FFmpeg.Encoder; public static class AvailableEncoders { - public static Option ForVideoFormat( - IHardwareCapabilities hardwareCapabilities, - FFmpegState ffmpegState, - FrameState currentState, - FrameState desiredState, - Option maybeWatermarkInputFile, - Option maybeSubtitleInputFile, - ILogger logger) => - (ffmpegState.EncoderHardwareAccelerationMode, desiredState.VideoFormat) switch - { - (HardwareAccelerationMode.Nvenc, VideoFormat.Hevc) when hardwareCapabilities.CanEncode( - VideoFormat.Hevc, - desiredState.PixelFormat) => - new EncoderHevcNvenc(), - (HardwareAccelerationMode.Nvenc, VideoFormat.H264) when hardwareCapabilities.CanEncode( - VideoFormat.H264, - desiredState.PixelFormat) => - new EncoderH264Nvenc(), - - (HardwareAccelerationMode.Qsv, VideoFormat.Hevc) when hardwareCapabilities.CanEncode( - VideoFormat.Hevc, - desiredState.PixelFormat) => new EncoderHevcQsv(), - (HardwareAccelerationMode.Qsv, VideoFormat.H264) when hardwareCapabilities.CanEncode( - VideoFormat.H264, - desiredState.PixelFormat) => new EncoderH264Qsv(), - - (HardwareAccelerationMode.Vaapi, VideoFormat.Hevc) when hardwareCapabilities.CanEncode( - VideoFormat.Hevc, - desiredState.PixelFormat) => new EncoderHevcVaapi(), - (HardwareAccelerationMode.Vaapi, VideoFormat.H264) when hardwareCapabilities.CanEncode( - VideoFormat.H264, - desiredState.PixelFormat) => new EncoderH264Vaapi(), - - (HardwareAccelerationMode.VideoToolbox, VideoFormat.Hevc) when hardwareCapabilities.CanEncode( - VideoFormat.Hevc, - desiredState.PixelFormat) => new EncoderHevcVideoToolbox(desiredState.BitDepth), - (HardwareAccelerationMode.VideoToolbox, VideoFormat.H264) when hardwareCapabilities.CanEncode( - VideoFormat.H264, - desiredState.PixelFormat) => new EncoderH264VideoToolbox(), - - (HardwareAccelerationMode.Amf, VideoFormat.Hevc) when hardwareCapabilities.CanEncode( - VideoFormat.Hevc, - desiredState.PixelFormat) => new EncoderHevcAmf(), - (HardwareAccelerationMode.Amf, VideoFormat.H264) when hardwareCapabilities.CanEncode( - VideoFormat.H264, - desiredState.PixelFormat) => new EncoderH264Amf(), - - (_, VideoFormat.Hevc) => new EncoderLibx265(currentState), - (_, VideoFormat.H264) => new EncoderLibx264(), - (_, VideoFormat.Mpeg2Video) => new EncoderMpeg2Video(), - - (_, VideoFormat.Undetermined) => new EncoderImplicitVideo(), - (_, VideoFormat.Copy) => new EncoderCopyVideo(), - - var (accel, videoFormat) => LogUnknownEncoder(accel, videoFormat, logger) - }; - - private static Option LogUnknownEncoder( - HardwareAccelerationMode hardwareAccelerationMode, - string videoFormat, - ILogger logger) - { - logger.LogWarning( - "Unable to determine video encoder for {AccelMode} - {VideoFormat}; may have playback issues", - hardwareAccelerationMode, - videoFormat); - return Option.None; - } - public static Option ForAudioFormat(AudioState desiredState, ILogger logger) => desiredState.AudioFormat.Match( audioFormat => diff --git a/ErsatzTV.FFmpeg/ErsatzTV.FFmpeg.csproj b/ErsatzTV.FFmpeg/ErsatzTV.FFmpeg.csproj index 9d7ade830..d30ac040c 100644 --- a/ErsatzTV.FFmpeg/ErsatzTV.FFmpeg.csproj +++ b/ErsatzTV.FFmpeg/ErsatzTV.FFmpeg.csproj @@ -8,7 +8,7 @@ - + diff --git a/ErsatzTV.FFmpeg/Format/VideoProfile.cs b/ErsatzTV.FFmpeg/Format/VideoProfile.cs new file mode 100644 index 000000000..4f64725c1 --- /dev/null +++ b/ErsatzTV.FFmpeg/Format/VideoProfile.cs @@ -0,0 +1,6 @@ +namespace ErsatzTV.FFmpeg.Format; + +public static class VideoProfile +{ + public const string Main = "main"; +} diff --git a/ErsatzTV.FFmpeg/FrameState.cs b/ErsatzTV.FFmpeg/FrameState.cs index 9ca0fe0bc..e8d4ed42e 100644 --- a/ErsatzTV.FFmpeg/FrameState.cs +++ b/ErsatzTV.FFmpeg/FrameState.cs @@ -6,6 +6,7 @@ public record FrameState( bool Realtime, bool InfiniteLoop, string VideoFormat, + string VideoProfile, Option PixelFormat, FrameSize ScaledSize, FrameSize PaddedSize, diff --git a/ErsatzTV.FFmpeg/Option/HardwareAcceleration/AvailableHardwareAccelerationOptions.cs b/ErsatzTV.FFmpeg/Option/HardwareAcceleration/AvailableHardwareAccelerationOptions.cs deleted file mode 100644 index e5e72621e..000000000 --- a/ErsatzTV.FFmpeg/Option/HardwareAcceleration/AvailableHardwareAccelerationOptions.cs +++ /dev/null @@ -1,38 +0,0 @@ -using Microsoft.Extensions.Logging; - -namespace ErsatzTV.FFmpeg.Option.HardwareAcceleration; - -public static class AvailableHardwareAccelerationOptions -{ - public static Option ForMode( - HardwareAccelerationMode mode, - Option gpuDevice, - ILogger logger) => - mode switch - { - HardwareAccelerationMode.Nvenc => new CudaHardwareAccelerationOption(), - HardwareAccelerationMode.Qsv => new QsvHardwareAccelerationOption(gpuDevice), - HardwareAccelerationMode.Vaapi => GetVaapiAcceleration(gpuDevice, logger), - HardwareAccelerationMode.VideoToolbox => new VideoToolboxHardwareAccelerationOption(), - HardwareAccelerationMode.Amf => new AmfHardwareAccelerationOption(), - HardwareAccelerationMode.None => Option.None, - _ => LogUnknownMode(mode, logger) - }; - - private static Option GetVaapiAcceleration(Option vaapiDevice, ILogger logger) - { - foreach (string device in vaapiDevice) - { - return new VaapiHardwareAccelerationOption(device); - } - - logger.LogWarning("VAAPI device name is missing; falling back to software mode"); - return Option.None; - } - - private static Option LogUnknownMode(HardwareAccelerationMode mode, ILogger logger) - { - logger.LogWarning("Unexpected hardware acceleration mode {AccelMode}; may have playback issues", mode); - return Option.None; - } -} diff --git a/ErsatzTV.FFmpeg/Option/HardwareAcceleration/VaapiHardwareAccelerationOption.cs b/ErsatzTV.FFmpeg/Option/HardwareAcceleration/VaapiHardwareAccelerationOption.cs index 4e545f48a..03ba8f418 100644 --- a/ErsatzTV.FFmpeg/Option/HardwareAcceleration/VaapiHardwareAccelerationOption.cs +++ b/ErsatzTV.FFmpeg/Option/HardwareAcceleration/VaapiHardwareAccelerationOption.cs @@ -3,11 +3,28 @@ public class VaapiHardwareAccelerationOption : GlobalOption { private readonly string _vaapiDevice; + private readonly bool _canDecode; - public VaapiHardwareAccelerationOption(string vaapiDevice) => _vaapiDevice = vaapiDevice; + public VaapiHardwareAccelerationOption(string vaapiDevice, bool canDecode) + { + _vaapiDevice = vaapiDevice; + _canDecode = canDecode; + } + + public override IList GlobalOptions + { + get + { + var result = new List { "-vaapi_device", _vaapiDevice }; + + if (_canDecode) + { + result.InsertRange(0, new[] { "-hwaccel", "vaapi" }); + } - public override IList GlobalOptions => new List - { "-hwaccel", "vaapi", "-vaapi_device", _vaapiDevice }; + return result; + } + } public override FrameState NextState(FrameState currentState) => currentState with { diff --git a/ErsatzTV.FFmpeg/Pipeline/AmfPipelineBuilder.cs b/ErsatzTV.FFmpeg/Pipeline/AmfPipelineBuilder.cs index 30ec3fd99..2503ef563 100644 --- a/ErsatzTV.FFmpeg/Pipeline/AmfPipelineBuilder.cs +++ b/ErsatzTV.FFmpeg/Pipeline/AmfPipelineBuilder.cs @@ -44,8 +44,14 @@ public class AmfPipelineBuilder : SoftwarePipelineBuilder PipelineContext context, ICollection pipelineSteps) { - bool canDecode = _hardwareCapabilities.CanDecode(videoStream.Codec, videoStream.PixelFormat); - bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat, desiredState.PixelFormat); + bool canDecode = _hardwareCapabilities.CanDecode( + videoStream.Codec, + desiredState.VideoProfile, + videoStream.PixelFormat); + bool canEncode = _hardwareCapabilities.CanEncode( + desiredState.VideoFormat, + desiredState.VideoProfile, + desiredState.PixelFormat); pipelineSteps.Add(new AmfHardwareAccelerationOption()); diff --git a/ErsatzTV.FFmpeg/Pipeline/IPipelineBuilderFactory.cs b/ErsatzTV.FFmpeg/Pipeline/IPipelineBuilderFactory.cs index d010c42ea..ac641520b 100644 --- a/ErsatzTV.FFmpeg/Pipeline/IPipelineBuilderFactory.cs +++ b/ErsatzTV.FFmpeg/Pipeline/IPipelineBuilderFactory.cs @@ -8,6 +8,8 @@ public interface IPipelineBuilderFactory Option audioInputFile, Option watermarkInputFile, Option subtitleInputFile, + Option vaapiDriver, + Option vaapiDevice, string reportsFolder, string fontsFolder, string ffmpegPath); diff --git a/ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs b/ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs index 4b7f10c72..3c85d18da 100644 --- a/ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs +++ b/ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs @@ -48,8 +48,14 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder PipelineContext context, ICollection pipelineSteps) { - bool canDecode = _hardwareCapabilities.CanDecode(videoStream.Codec, videoStream.PixelFormat); - bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat, desiredState.PixelFormat); + bool canDecode = _hardwareCapabilities.CanDecode( + videoStream.Codec, + desiredState.VideoProfile, + videoStream.PixelFormat); + bool canEncode = _hardwareCapabilities.CanEncode( + desiredState.VideoFormat, + desiredState.VideoProfile, + desiredState.PixelFormat); // mpeg2_cuvid seems to have issues when yadif_cuda is used, so just use software decoding if (context.ShouldDeinterlace && videoStream.Codec == VideoFormat.Mpeg2Video) diff --git a/ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs b/ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs index b3883309d..c8b80b19c 100644 --- a/ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs +++ b/ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs @@ -26,68 +26,79 @@ public class PipelineBuilderFactory : IPipelineBuilderFactory Option audioInputFile, Option watermarkInputFile, Option subtitleInputFile, + Option vaapiDriver, + Option vaapiDevice, string reportsFolder, string fontsFolder, - string ffmpegPath) => hardwareAccelerationMode switch + string ffmpegPath) { - HardwareAccelerationMode.Nvenc => new NvidiaPipelineBuilder( - await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hardwareAccelerationMode), + IHardwareCapabilities capabilities = await _hardwareCapabilitiesFactory.GetHardwareCapabilities( + ffmpegPath, hardwareAccelerationMode, - videoInputFile, - audioInputFile, - watermarkInputFile, - subtitleInputFile, - reportsFolder, - fontsFolder, - _logger), - HardwareAccelerationMode.Vaapi => new VaapiPipelineBuilder( - await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hardwareAccelerationMode), - hardwareAccelerationMode, - videoInputFile, - audioInputFile, - watermarkInputFile, - subtitleInputFile, - reportsFolder, - fontsFolder, - _logger), - HardwareAccelerationMode.Qsv => new QsvPipelineBuilder( - await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hardwareAccelerationMode), - hardwareAccelerationMode, - videoInputFile, - audioInputFile, - watermarkInputFile, - subtitleInputFile, - reportsFolder, - fontsFolder, - _logger), - HardwareAccelerationMode.VideoToolbox => new VideoToolboxPipelineBuilder( - await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hardwareAccelerationMode), - hardwareAccelerationMode, - videoInputFile, - audioInputFile, - watermarkInputFile, - subtitleInputFile, - reportsFolder, - fontsFolder, - _logger), - HardwareAccelerationMode.Amf => new AmfPipelineBuilder( - await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hardwareAccelerationMode), - hardwareAccelerationMode, - videoInputFile, - audioInputFile, - watermarkInputFile, - subtitleInputFile, - reportsFolder, - fontsFolder, - _logger), - _ => new SoftwarePipelineBuilder( - hardwareAccelerationMode, - videoInputFile, - audioInputFile, - watermarkInputFile, - subtitleInputFile, - reportsFolder, - fontsFolder, - _logger) - }; + vaapiDriver, + vaapiDevice); + + return hardwareAccelerationMode switch + { + HardwareAccelerationMode.Nvenc => new NvidiaPipelineBuilder( + capabilities, + hardwareAccelerationMode, + videoInputFile, + audioInputFile, + watermarkInputFile, + subtitleInputFile, + reportsFolder, + fontsFolder, + _logger), + HardwareAccelerationMode.Vaapi => new VaapiPipelineBuilder( + capabilities, + hardwareAccelerationMode, + videoInputFile, + audioInputFile, + watermarkInputFile, + subtitleInputFile, + reportsFolder, + fontsFolder, + _logger), + HardwareAccelerationMode.Qsv => new QsvPipelineBuilder( + capabilities, + hardwareAccelerationMode, + videoInputFile, + audioInputFile, + watermarkInputFile, + subtitleInputFile, + reportsFolder, + fontsFolder, + _logger), + HardwareAccelerationMode.VideoToolbox => new VideoToolboxPipelineBuilder( + capabilities, + hardwareAccelerationMode, + videoInputFile, + audioInputFile, + watermarkInputFile, + subtitleInputFile, + reportsFolder, + fontsFolder, + _logger), + HardwareAccelerationMode.Amf => new AmfPipelineBuilder( + capabilities, + hardwareAccelerationMode, + videoInputFile, + audioInputFile, + watermarkInputFile, + subtitleInputFile, + reportsFolder, + fontsFolder, + _logger), + _ => new SoftwarePipelineBuilder( + hardwareAccelerationMode, + videoInputFile, + audioInputFile, + watermarkInputFile, + subtitleInputFile, + reportsFolder, + fontsFolder, + _logger) + }; + } } diff --git a/ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs b/ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs index 7f6b66bf8..92e74972f 100644 --- a/ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs +++ b/ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs @@ -48,8 +48,14 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder PipelineContext context, ICollection pipelineSteps) { - bool canDecode = _hardwareCapabilities.CanDecode(videoStream.Codec, videoStream.PixelFormat); - bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat, desiredState.PixelFormat); + bool canDecode = _hardwareCapabilities.CanDecode( + videoStream.Codec, + desiredState.VideoProfile, + videoStream.PixelFormat); + bool canEncode = _hardwareCapabilities.CanEncode( + desiredState.VideoFormat, + desiredState.VideoProfile, + desiredState.PixelFormat); pipelineSteps.Add(new QsvHardwareAccelerationOption(ffmpegState.VaapiDevice)); diff --git a/ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs b/ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs index bc9de633b..3ac365c0c 100644 --- a/ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs +++ b/ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs @@ -2,6 +2,7 @@ using ErsatzTV.FFmpeg.Capabilities; using ErsatzTV.FFmpeg.Decoder; using ErsatzTV.FFmpeg.Encoder; using ErsatzTV.FFmpeg.Encoder.Vaapi; +using ErsatzTV.FFmpeg.Environment; using ErsatzTV.FFmpeg.Filter; using ErsatzTV.FFmpeg.Filter.Vaapi; using ErsatzTV.FFmpeg.Format; @@ -47,16 +48,27 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder PipelineContext context, ICollection pipelineSteps) { - bool canDecode = _hardwareCapabilities.CanDecode(videoStream.Codec, videoStream.PixelFormat); - bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat, desiredState.PixelFormat); + bool canDecode = _hardwareCapabilities.CanDecode( + videoStream.Codec, + desiredState.VideoProfile, + videoStream.PixelFormat); + bool canEncode = _hardwareCapabilities.CanEncode( + desiredState.VideoFormat, + desiredState.VideoProfile, + desiredState.PixelFormat); foreach (string vaapiDevice in ffmpegState.VaapiDevice) { - pipelineSteps.Add(new VaapiHardwareAccelerationOption(vaapiDevice)); + pipelineSteps.Add(new VaapiHardwareAccelerationOption(vaapiDevice, canDecode)); + + foreach (string driverName in ffmpegState.VaapiDriver) + { + pipelineSteps.Add(new LibvaDriverNameVariable(driverName)); + } } // use software decoding with an extensive pipeline - if (context.HasSubtitleOverlay && context.HasWatermark) + if (context is { HasSubtitleOverlay: true, HasWatermark: true }) { canDecode = false; } @@ -136,11 +148,11 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState); // _logger.LogDebug("After scale: {PixelFormat}", currentState.PixelFormat); - currentState = SetPad(videoInputFile, videoStream, desiredState, currentState); + currentState = SetPad(videoInputFile, desiredState, currentState); // _logger.LogDebug("After pad: {PixelFormat}", currentState.PixelFormat); // need to upload for hardware overlay - bool forceSoftwareOverlay = context.HasSubtitleOverlay && context.HasWatermark; + bool forceSoftwareOverlay = context is { HasSubtitleOverlay: true, HasWatermark: true }; if (currentState.FrameDataLocation == FrameDataLocation.Software && context.HasSubtitleOverlay && !forceSoftwareOverlay) @@ -164,7 +176,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder subtitleInputFile, context, forceSoftwareOverlay, - ffmpegState, currentState, desiredState, fontsFolder, @@ -174,7 +185,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder videoStream, watermarkInputFile, context, - ffmpegState, desiredState, currentState, watermarkOverlayFilterSteps); @@ -202,9 +212,7 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder videoStream, desiredState.PixelFormat, ffmpegState, - currentState, - context, - pipelineSteps); + currentState); return new FilterChain( videoInputFile.FilterSteps, @@ -219,9 +227,7 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder VideoStream videoStream, Option desiredPixelFormat, FFmpegState ffmpegState, - FrameState currentState, - PipelineContext context, - ICollection pipelineSteps) + FrameState currentState) { var result = new List(); @@ -298,7 +304,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder VideoStream videoStream, Option watermarkInputFile, PipelineContext context, - FFmpegState ffmpegState, FrameState desiredState, FrameState currentState, List watermarkOverlayFilterSteps) @@ -367,7 +372,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder Option subtitleInputFile, PipelineContext context, bool forceSoftwareOverlay, - FFmpegState ffmpegState, FrameState currentState, FrameState desiredState, string fontsFolder, @@ -449,7 +453,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder private static FrameState SetPad( VideoInputFile videoInputFile, - VideoStream videoStream, FrameState desiredState, FrameState currentState) { diff --git a/ErsatzTV.FFmpeg/Pipeline/VideoToolboxPipelineBuilder.cs b/ErsatzTV.FFmpeg/Pipeline/VideoToolboxPipelineBuilder.cs index 37712261b..fc1de86d1 100644 --- a/ErsatzTV.FFmpeg/Pipeline/VideoToolboxPipelineBuilder.cs +++ b/ErsatzTV.FFmpeg/Pipeline/VideoToolboxPipelineBuilder.cs @@ -45,8 +45,14 @@ public class VideoToolboxPipelineBuilder : SoftwarePipelineBuilder PipelineContext context, ICollection pipelineSteps) { - bool canDecode = _hardwareCapabilities.CanDecode(videoStream.Codec, videoStream.PixelFormat); - bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat, desiredState.PixelFormat); + bool canDecode = _hardwareCapabilities.CanDecode( + videoStream.Codec, + desiredState.VideoProfile, + videoStream.PixelFormat); + bool canEncode = _hardwareCapabilities.CanEncode( + desiredState.VideoFormat, + desiredState.VideoProfile, + desiredState.PixelFormat); pipelineSteps.Add(new VideoToolboxHardwareAccelerationOption()); diff --git a/ErsatzTV.FFmpeg/PipelineBuilder.cs b/ErsatzTV.FFmpeg/PipelineBuilder.cs deleted file mode 100644 index b2b542968..000000000 --- a/ErsatzTV.FFmpeg/PipelineBuilder.cs +++ /dev/null @@ -1,812 +0,0 @@ -using ErsatzTV.FFmpeg.Capabilities; -using ErsatzTV.FFmpeg.Decoder; -using ErsatzTV.FFmpeg.Encoder; -using ErsatzTV.FFmpeg.Environment; -using ErsatzTV.FFmpeg.Filter; -using ErsatzTV.FFmpeg.Filter.Cuda; -using ErsatzTV.FFmpeg.Format; -using ErsatzTV.FFmpeg.Option; -using ErsatzTV.FFmpeg.Option.HardwareAcceleration; -using ErsatzTV.FFmpeg.Option.Metadata; -using ErsatzTV.FFmpeg.OutputFormat; -using ErsatzTV.FFmpeg.Pipeline; -using ErsatzTV.FFmpeg.Protocol; -using ErsatzTV.FFmpeg.Runtime; -using ErsatzTV.FFmpeg.State; -using Microsoft.Extensions.Logging; - -namespace ErsatzTV.FFmpeg; - -public class PipelineBuilder : IPipelineBuilder -{ - private readonly Option _audioInputFile; - private readonly string _fontsFolder; - private readonly IRuntimeInfo _runtimeInfo; - private readonly IHardwareCapabilities _hardwareCapabilities; - private readonly ILogger _logger; - private readonly List _pipelineSteps; - private readonly string _reportsFolder; - private readonly Option _subtitleInputFile; - private readonly Option _videoInputFile; - private readonly Option _watermarkInputFile; - - public PipelineBuilder( - IRuntimeInfo runtimeInfo, - IHardwareCapabilities hardwareCapabilities, - Option videoInputFile, - Option audioInputFile, - Option watermarkInputFile, - Option subtitleInputFile, - string reportsFolder, - string fontsFolder, - ILogger logger) - { - _pipelineSteps = new List - { - new NoStandardInputOption(), - new HideBannerOption(), - new NoStatsOption(), - new LoglevelErrorOption(), - new StandardFormatFlags(), - new NoDemuxDecodeDelayOutputOption(), - new FastStartOutputOption(), - new ClosedGopOutputOption() - }; - - _runtimeInfo = runtimeInfo; - _hardwareCapabilities = hardwareCapabilities; - _videoInputFile = videoInputFile; - _audioInputFile = audioInputFile; - _watermarkInputFile = watermarkInputFile; - _subtitleInputFile = subtitleInputFile; - _reportsFolder = reportsFolder; - _fontsFolder = fontsFolder; - _logger = logger; - } - - public FFmpegPipeline Resize(string outputFile, FrameSize scaledSize) - { - _pipelineSteps.Clear(); - _pipelineSteps.Add(new NoStandardInputOption()); - _pipelineSteps.Add(new HideBannerOption()); - _pipelineSteps.Add(new NoStatsOption()); - _pipelineSteps.Add(new LoglevelErrorOption()); - - IPipelineFilterStep scaleStep = new ScaleImageFilter(scaledSize); - _videoInputFile.Iter(f => f.FilterSteps.Add(scaleStep)); - - _pipelineSteps.Add(new VideoFilter(new[] { scaleStep })); - _pipelineSteps.Add(scaleStep); - _pipelineSteps.Add(new FileNameOutputOption(outputFile)); - - return new FFmpegPipeline(_pipelineSteps); - } - - public FFmpegPipeline Concat(ConcatInputFile concatInputFile, FFmpegState ffmpegState) - { - concatInputFile.AddOption(new ConcatInputFormat()); - concatInputFile.AddOption(new RealtimeInputOption()); - concatInputFile.AddOption(new InfiniteLoopInputOption(HardwareAccelerationMode.None)); - - foreach (int threadCount in ffmpegState.ThreadCount) - { - _pipelineSteps.Insert(0, new ThreadCountOption(threadCount)); - } - - _pipelineSteps.Add(new NoSceneDetectOutputOption(0)); - _pipelineSteps.Add(new EncoderCopyAll()); - - if (ffmpegState.DoNotMapMetadata) - { - _pipelineSteps.Add(new DoNotMapMetadataOutputOption()); - } - - foreach (string desiredServiceProvider in ffmpegState.MetadataServiceProvider) - { - _pipelineSteps.Add(new MetadataServiceProviderOutputOption(desiredServiceProvider)); - } - - foreach (string desiredServiceName in ffmpegState.MetadataServiceName) - { - _pipelineSteps.Add(new MetadataServiceNameOutputOption(desiredServiceName)); - } - - _pipelineSteps.Add(new OutputFormatMpegTs()); - _pipelineSteps.Add(new PipeProtocol()); - - if (ffmpegState.SaveReport) - { - _pipelineSteps.Add(new FFReportVariable(_reportsFolder, concatInputFile)); - } - - return new FFmpegPipeline(_pipelineSteps); - } - - public virtual FFmpegPipeline Build(FFmpegState ffmpegState, FrameState desiredState) - { - Option originalDesiredPixelFormat = desiredState.PixelFormat; - bool is10BitOutput = desiredState.PixelFormat.Map(pf => pf.BitDepth).IfNone(8) == 10; - - if (ffmpegState.Start.Exists(s => s > TimeSpan.Zero) && desiredState.Realtime) - { - _logger.LogInformation( - "Forcing {Threads} ffmpeg thread due to buggy combination of stream seek and realtime output", - 1); - - _pipelineSteps.Insert(0, new ThreadCountOption(1)); - } - else - { - foreach (int threadCount in ffmpegState.ThreadCount) - { - _pipelineSteps.Insert(0, new ThreadCountOption(threadCount)); - } - } - - var allVideoStreams = _videoInputFile.SelectMany(f => f.VideoStreams).ToList(); - - // -sc_threshold 0 is unsupported with mpeg2video - _pipelineSteps.Add( - allVideoStreams.All(s => s.Codec != VideoFormat.Mpeg2Video) && - desiredState.VideoFormat != VideoFormat.Mpeg2Video - ? new NoSceneDetectOutputOption(0) - : new NoSceneDetectOutputOption(1_000_000_000)); - - if (ffmpegState.SaveReport) - { - _pipelineSteps.Add(new FFReportVariable(_reportsFolder, None)); - } - - foreach (TimeSpan desiredStart in ffmpegState.Start.Filter(s => s > TimeSpan.Zero)) - { - var option = new StreamSeekInputOption(desiredStart); - _audioInputFile.Iter(f => f.AddOption(option)); - _videoInputFile.Iter(f => f.AddOption(option)); - - // need to seek text subtitle files - if (_subtitleInputFile.Map(s => !s.IsImageBased).IfNone(false)) - { - _pipelineSteps.Add(new StreamSeekFilterOption(desiredStart)); - } - } - - foreach (TimeSpan desiredFinish in ffmpegState.Finish) - { - _pipelineSteps.Add(new TimeLimitOutputOption(desiredFinish)); - } - - foreach (VideoStream videoStream in allVideoStreams) - { - bool hasOverlay = _watermarkInputFile.IsSome || - _subtitleInputFile.Map(s => s.IsImageBased && !s.Copy).IfNone(false); - - Option initialFrameRate = Option.None; - foreach (string frameRateString in videoStream.FrameRate) - { - if (int.TryParse(frameRateString, out int parsedFrameRate)) - { - initialFrameRate = parsedFrameRate; - } - } - - var currentState = new FrameState( - false, // realtime - false, // infinite loop - videoStream.Codec, - videoStream.PixelFormat, - videoStream.FrameSize, - videoStream.FrameSize, - videoStream.IsAnamorphic, - initialFrameRate, - Option.None, - Option.None, - Option.None, - false); // deinterlace - - IEncoder encoder; - - if (IsDesiredVideoState(currentState, desiredState)) - { - encoder = new EncoderCopyVideo(); - _pipelineSteps.Add(encoder); - } - else - { - Option maybeAccel = AvailableHardwareAccelerationOptions.ForMode( - ffmpegState.EncoderHardwareAccelerationMode, - ffmpegState.VaapiDevice, - _logger); - - if (maybeAccel.IsNone) - { - ffmpegState = ffmpegState with - { - // disable hw accel if we don't match anything - DecoderHardwareAccelerationMode = HardwareAccelerationMode.None, - EncoderHardwareAccelerationMode = HardwareAccelerationMode.None - }; - } - - foreach (IPipelineStep accel in maybeAccel) - { - bool canDecode = _hardwareCapabilities.CanDecode(currentState.VideoFormat, videoStream.PixelFormat); - bool canEncode = _hardwareCapabilities.CanEncode( - desiredState.VideoFormat, - desiredState.PixelFormat); - - // disable hw accel if decoder/encoder isn't supported - if (!canDecode || !canEncode) - { - ffmpegState = ffmpegState with - { - DecoderHardwareAccelerationMode = canDecode - ? ffmpegState.DecoderHardwareAccelerationMode - : HardwareAccelerationMode.None, - EncoderHardwareAccelerationMode = canEncode - ? ffmpegState.EncoderHardwareAccelerationMode - : HardwareAccelerationMode.None - }; - } - - if (canDecode || canEncode) - { - currentState = accel.NextState(currentState); - _pipelineSteps.Add(accel); - } - } - - if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Nvenc && hasOverlay && - is10BitOutput) - { - IPixelFormat pixelFormat = desiredState.PixelFormat.IfNone(new PixelFormatYuv420P10Le()); - desiredState = desiredState with { PixelFormat = new PixelFormatNv12(pixelFormat.Name) }; - } - - // - // // qsv should stay nv12 - // if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Qsv && hasOverlay) - // { - // IPixelFormat pixelFormat = desiredState.PixelFormat.IfNone(new PixelFormatYuv420P()); - // desiredState = desiredState with { PixelFormat = new PixelFormatNv12(pixelFormat.Name) }; - // } - - foreach (string desiredVaapiDriver in ffmpegState.VaapiDriver) - { - IPipelineStep step = new LibvaDriverNameVariable(desiredVaapiDriver); - currentState = step.NextState(currentState); - _pipelineSteps.Add(step); - } - - foreach (IDecoder decoder in AvailableDecoders.ForVideoFormat( - _hardwareCapabilities, - ffmpegState, - currentState, - desiredState, - _watermarkInputFile, - _subtitleInputFile, - _logger)) - { - foreach (VideoInputFile videoInputFile in _videoInputFile) - { - videoInputFile.AddOption(decoder); - currentState = decoder.NextState(currentState); - } - } - } - - if (_subtitleInputFile.Map(s => s.Copy) == Some(true)) - { - _pipelineSteps.Add(new EncoderCopySubtitle()); - } - - if (videoStream.StillImage) - { - var option = new InfiniteLoopInputOption(ffmpegState.EncoderHardwareAccelerationMode); - _videoInputFile.Iter(f => f.AddOption(option)); - } - - if (!IsDesiredVideoState(currentState, desiredState)) - { - if (desiredState.Realtime) - { - var option = new RealtimeInputOption(); - _audioInputFile.Iter(f => f.AddOption(option)); - _videoInputFile.Iter(f => f.AddOption(option)); - } - - if (desiredState.InfiniteLoop) - { - var option = new InfiniteLoopInputOption(ffmpegState.EncoderHardwareAccelerationMode); - _audioInputFile.Iter(f => f.AddOption(option)); - _videoInputFile.Iter(f => f.AddOption(option)); - } - - foreach (int desiredFrameRate in desiredState.FrameRate) - { - if (currentState.FrameRate != desiredFrameRate) - { - IPipelineStep step = new FrameRateOutputOption(desiredFrameRate); - currentState = step.NextState(currentState); - _pipelineSteps.Add(step); - } - } - - foreach (int desiredTimeScale in desiredState.VideoTrackTimeScale) - { - if (currentState.VideoTrackTimeScale != desiredTimeScale) - { - IPipelineStep step = new VideoTrackTimescaleOutputOption(desiredTimeScale); - currentState = step.NextState(currentState); - _pipelineSteps.Add(step); - } - } - - foreach (int desiredBitrate in desiredState.VideoBitrate) - { - if (currentState.VideoBitrate != desiredBitrate) - { - IPipelineStep step = new VideoBitrateOutputOption(desiredBitrate); - currentState = step.NextState(currentState); - _pipelineSteps.Add(step); - } - } - - foreach (int desiredBufferSize in desiredState.VideoBufferSize) - { - if (currentState.VideoBufferSize != desiredBufferSize) - { - IPipelineStep step = new VideoBufferSizeOutputOption(desiredBufferSize); - currentState = step.NextState(currentState); - _pipelineSteps.Add(step); - } - } - - if (desiredState.Deinterlaced && !currentState.Deinterlaced) - { - IPipelineFilterStep step = AvailableDeinterlaceFilters.ForAcceleration( - ffmpegState.EncoderHardwareAccelerationMode, - currentState, - desiredState, - _watermarkInputFile, - _subtitleInputFile); - currentState = step.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(step)); - } - - // TODO: this is a software-only flow, will need to be different for hardware accel - if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None) - { - if (currentState.ScaledSize != desiredState.ScaledSize || - currentState.PaddedSize != desiredState.PaddedSize) - { - IPipelineFilterStep scaleStep = new ScaleFilter( - currentState, - desiredState.ScaledSize, - desiredState.PaddedSize, - videoStream.IsAnamorphicEdgeCase); - currentState = scaleStep.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(scaleStep)); - - // TODO: padding might not be needed, can we optimize this out? - IPipelineFilterStep padStep = new PadFilter(currentState, desiredState.PaddedSize); - currentState = padStep.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(padStep)); - } - } - else if (currentState.ScaledSize != desiredState.ScaledSize) - { - IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration( - _runtimeInfo, - ffmpegState.EncoderHardwareAccelerationMode, - currentState, - desiredState.ScaledSize, - desiredState.PaddedSize, - ffmpegState.QsvExtraHardwareFrames, - videoStream.IsAnamorphicEdgeCase, - videoStream.SampleAspectRatio); - currentState = scaleFilter.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter)); - - // TODO: padding might not be needed, can we optimize this out? - if (currentState.PaddedSize != desiredState.PaddedSize) - { - IPipelineFilterStep padStep = new PadFilter(currentState, desiredState.PaddedSize); - currentState = padStep.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(padStep)); - } - } - else if (currentState.PaddedSize != desiredState.PaddedSize) - { - IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration( - _runtimeInfo, - ffmpegState.EncoderHardwareAccelerationMode, - currentState, - desiredState.ScaledSize, - desiredState.PaddedSize, - ffmpegState.QsvExtraHardwareFrames, - videoStream.IsAnamorphicEdgeCase, - videoStream.SampleAspectRatio); - currentState = scaleFilter.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter)); - - if (currentState.PaddedSize != desiredState.PaddedSize) - { - IPipelineFilterStep padStep = new PadFilter(currentState, desiredState.PaddedSize); - currentState = padStep.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(padStep)); - } - } - - if (hasOverlay && currentState.PixelFormat.Map(pf => pf.FFmpegName) != - desiredState.PixelFormat.Map(pf => pf.FFmpegName)) - { - // this should only happen with nvenc? - // use scale filter to fix pixel format - - foreach (IPixelFormat pixelFormat in desiredState.PixelFormat) - { - if (currentState.FrameDataLocation == FrameDataLocation.Software) - { - IPipelineFilterStep formatFilter = new PixelFormatFilter(pixelFormat); - currentState = formatFilter.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(formatFilter)); - - switch (ffmpegState.EncoderHardwareAccelerationMode) - { - case HardwareAccelerationMode.Nvenc: - var uploadFilter = new HardwareUploadFilter(ffmpegState); - currentState = uploadFilter.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(uploadFilter)); - break; - } - } - else - { - if (ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.Qsv) - { - // the filter re-applies the current pixel format, so we have to set it first - currentState = currentState with { PixelFormat = desiredState.PixelFormat }; - - IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration( - _runtimeInfo, - ffmpegState.EncoderHardwareAccelerationMode, - currentState, - desiredState.ScaledSize, - desiredState.PaddedSize, - ffmpegState.QsvExtraHardwareFrames, - videoStream.IsAnamorphicEdgeCase, - videoStream.SampleAspectRatio); - currentState = scaleFilter.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter)); - } - } - } - } - - // nvenc custom logic - if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Nvenc) - { - foreach (VideoInputFile videoInputFile in _videoInputFile) - { - // if we only deinterlace, we need to set pixel format again (using scale_cuda) - bool onlyYadif = videoInputFile.FilterSteps.Count == 1 && - videoInputFile.FilterSteps.Any(fs => fs is YadifCudaFilter); - - // if we have no filters and an overlay, we need to set pixel format - bool unfilteredWithOverlay = videoInputFile.FilterSteps.Count == 0 && hasOverlay; - - if (onlyYadif || unfilteredWithOverlay) - { - // the filter re-applies the current pixel format, so we have to set it first - currentState = currentState with { PixelFormat = desiredState.PixelFormat }; - - IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration( - _runtimeInfo, - ffmpegState.EncoderHardwareAccelerationMode, - currentState, - desiredState.ScaledSize, - desiredState.PaddedSize, - ffmpegState.QsvExtraHardwareFrames, - videoStream.IsAnamorphicEdgeCase, - videoStream.SampleAspectRatio); - currentState = scaleFilter.NextState(currentState); - videoInputFile.FilterSteps.Add(scaleFilter); - } - } - } - - if (ffmpegState.PtsOffset > 0) - { - foreach (int videoTrackTimeScale in desiredState.VideoTrackTimeScale) - { - IPipelineStep step = new OutputTsOffsetOption( - ffmpegState.PtsOffset, - videoTrackTimeScale); - currentState = step.NextState(currentState); - _pipelineSteps.Add(step); - } - } - } - - // TODO: if all video filters are software, use software pixel format for hwaccel output - // might be able to skip scale_cuda=format=whatever,hwdownload,format=whatever - - if (_audioInputFile.IsNone) - { - // always need to specify audio codec so ffmpeg doesn't default to a codec we don't want - foreach (IEncoder step in AvailableEncoders.ForAudioFormat(AudioState.Copy, _logger)) - { - currentState = step.NextState(currentState); - _pipelineSteps.Add(step); - } - } - - foreach (AudioInputFile audioInputFile in _audioInputFile) - { - // always need to specify audio codec so ffmpeg doesn't default to a codec we don't want - foreach (IEncoder step in AvailableEncoders.ForAudioFormat(audioInputFile.DesiredState, _logger)) - { - currentState = step.NextState(currentState); - _pipelineSteps.Add(step); - } - - foreach (AudioStream audioStream in audioInputFile.AudioStreams.HeadOrNone()) - { - foreach (int desiredAudioChannels in audioInputFile.DesiredState.AudioChannels) - { - _pipelineSteps.Add( - new AudioChannelsOutputOption( - audioInputFile.DesiredState.AudioFormat, - audioStream.Channels, - desiredAudioChannels)); - } - } - - foreach (int desiredBitrate in audioInputFile.DesiredState.AudioBitrate) - { - _pipelineSteps.Add(new AudioBitrateOutputOption(desiredBitrate)); - } - - foreach (int desiredBufferSize in audioInputFile.DesiredState.AudioBufferSize) - { - _pipelineSteps.Add(new AudioBufferSizeOutputOption(desiredBufferSize)); - } - - foreach (int desiredSampleRate in audioInputFile.DesiredState.AudioSampleRate) - { - _pipelineSteps.Add(new AudioSampleRateOutputOption(desiredSampleRate)); - } - - if (audioInputFile.DesiredState.NormalizeLoudness) - { - _audioInputFile.Iter(f => f.FilterSteps.Add(new NormalizeLoudnessFilter())); - } - - foreach (TimeSpan desiredDuration in audioInputFile.DesiredState.AudioDuration) - { - _audioInputFile.Iter(f => f.FilterSteps.Add(new AudioPadFilter(desiredDuration))); - } - } - - foreach (SubtitleInputFile subtitleInputFile in _subtitleInputFile) - { - if (subtitleInputFile.IsImageBased) - { - // vaapi and videotoolbox use a software overlay, so we need to ensure the background is already in software - // though videotoolbox uses software decoders, so no need to download for that - if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Vaapi) - { - var downloadFilter = new HardwareDownloadFilter(currentState); - currentState = downloadFilter.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(downloadFilter)); - } - - var pixelFormatFilter = new SubtitlePixelFormatFilter(ffmpegState, is10BitOutput); - subtitleInputFile.FilterSteps.Add(pixelFormatFilter); - - subtitleInputFile.FilterSteps.Add(new SubtitleHardwareUploadFilter(currentState, ffmpegState)); - - FrameState fakeState = currentState; - foreach (string format in pixelFormatFilter.MaybeFormat) - { - fakeState = fakeState with - { - PixelFormat = AvailablePixelFormats.ForPixelFormat(format, _logger) - }; - } - - // hacky check for actual scaling or padding - if (_videoInputFile.Exists( - v => v.FilterSteps.Any(s => s.Filter.Contains(currentState.PaddedSize.Height.ToString())))) - { - // enable scaling the subtitle stream - fakeState = fakeState with { ScaledSize = new FrameSize(1, 1) }; - } - - IPipelineFilterStep scaleFilter = AvailableSubtitleScaleFilters.ForAcceleration( - ffmpegState.EncoderHardwareAccelerationMode, - fakeState, - desiredState.ScaledSize, - desiredState.PaddedSize, - ffmpegState.QsvExtraHardwareFrames); - subtitleInputFile.FilterSteps.Add(scaleFilter); - } - else - { - _videoInputFile.Iter(f => f.AddOption(new CopyTimestampInputOption())); - - // text-based subtitles are always added in software, so always try to download the background - - // nvidia needs some extra format help if the only filter will be the download filter - if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Nvenc && - currentState.FrameDataLocation == FrameDataLocation.Hardware && - _videoInputFile.Map(f => f.FilterSteps.Count).IfNone(1) == 0) - { - IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration( - _runtimeInfo, - ffmpegState.EncoderHardwareAccelerationMode, - currentState, - desiredState.ScaledSize, - desiredState.PaddedSize, - ffmpegState.QsvExtraHardwareFrames, - videoStream.IsAnamorphicEdgeCase, - videoStream.SampleAspectRatio); - currentState = scaleFilter.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter)); - } - - var downloadFilter = new HardwareDownloadFilter(currentState); - currentState = downloadFilter.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(downloadFilter)); - } - } - - foreach (WatermarkInputFile watermarkInputFile in _watermarkInputFile) - { - // vaapi and videotoolbox use a software overlay, so we need to ensure the background is already in software - // though videotoolbox uses software decoders, so no need to download for that - if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Vaapi) - { - var downloadFilter = new HardwareDownloadFilter(currentState); - currentState = downloadFilter.NextState(currentState); - _videoInputFile.Iter(f => f.FilterSteps.Add(downloadFilter)); - } - - watermarkInputFile.FilterSteps.Add( - new WatermarkPixelFormatFilter(ffmpegState, watermarkInputFile.DesiredState, is10BitOutput)); - - foreach (VideoStream watermarkStream in watermarkInputFile.VideoStreams) - { - if (watermarkStream.StillImage == false) - { - watermarkInputFile.AddOption(new DoNotIgnoreLoopInputOption()); - } - else if (watermarkInputFile.DesiredState.MaybeFadePoints.Map(fp => fp.Count > 0).IfNone(false)) - { - // looping is required to fade a static image in and out - watermarkInputFile.AddOption( - new InfiniteLoopInputOption(ffmpegState.EncoderHardwareAccelerationMode)); - } - } - - if (watermarkInputFile.DesiredState.Size == WatermarkSize.Scaled) - { - watermarkInputFile.FilterSteps.Add( - new WatermarkScaleFilter(watermarkInputFile.DesiredState, currentState.PaddedSize)); - } - - if (watermarkInputFile.DesiredState.Opacity != 100) - { - watermarkInputFile.FilterSteps.Add(new WatermarkOpacityFilter(watermarkInputFile.DesiredState)); - } - - foreach (List fadePoints in watermarkInputFile.DesiredState.MaybeFadePoints) - { - watermarkInputFile.FilterSteps.AddRange(fadePoints.Map(fp => new WatermarkFadeFilter(fp))); - } - - watermarkInputFile.FilterSteps.Add(new WatermarkHardwareUploadFilter(currentState, ffmpegState)); - } - - // after everything else is done, apply the encoder - if (_pipelineSteps.OfType().All(e => e.Kind != StreamKind.Video)) - { - foreach (IEncoder e in AvailableEncoders.ForVideoFormat( - _hardwareCapabilities, - ffmpegState, - currentState, - desiredState, - _watermarkInputFile, - _subtitleInputFile, - _logger)) - { - encoder = e; - _pipelineSteps.Add(encoder); - _videoInputFile.Iter(f => f.FilterSteps.Add(encoder)); - currentState = encoder.NextState(currentState); - } - } - - if (ffmpegState.DoNotMapMetadata) - { - _pipelineSteps.Add(new DoNotMapMetadataOutputOption()); - } - - foreach (string desiredServiceProvider in ffmpegState.MetadataServiceProvider) - { - _pipelineSteps.Add(new MetadataServiceProviderOutputOption(desiredServiceProvider)); - } - - foreach (string desiredServiceName in ffmpegState.MetadataServiceName) - { - _pipelineSteps.Add(new MetadataServiceNameOutputOption(desiredServiceName)); - } - - foreach (string desiredAudioLanguage in ffmpegState.MetadataAudioLanguage) - { - _pipelineSteps.Add(new MetadataAudioLanguageOutputOption(desiredAudioLanguage)); - } - - switch (ffmpegState.OutputFormat) - { - case OutputFormatKind.MpegTs: - _pipelineSteps.Add(new OutputFormatMpegTs()); - _pipelineSteps.Add(new PipeProtocol()); - // currentState = currentState with { OutputFormat = OutputFormatKind.MpegTs }; - break; - case OutputFormatKind.Hls: - foreach (string playlistPath in ffmpegState.HlsPlaylistPath) - { - foreach (string segmentTemplate in ffmpegState.HlsSegmentTemplate) - { - var step = new OutputFormatHls( - desiredState, - videoStream.FrameRate, - segmentTemplate, - playlistPath); - currentState = step.NextState(currentState); - _pipelineSteps.Add(step); - } - } - - break; - } - - var complexFilter = new ComplexFilter( - currentState, - ffmpegState, - _videoInputFile, - _audioInputFile, - _watermarkInputFile, - _subtitleInputFile, - originalDesiredPixelFormat, - currentState.PaddedSize, - _fontsFolder, - _logger); - - _pipelineSteps.Add(complexFilter); - } - - return new FFmpegPipeline(_pipelineSteps); - } - - private static bool IsDesiredVideoState(FrameState currentState, FrameState desiredState) - { - if (desiredState.VideoFormat == VideoFormat.Copy) - { - return true; - } - - return currentState.VideoFormat == desiredState.VideoFormat && - currentState.PixelFormat.Match(pf => pf.Name, () => string.Empty) == - desiredState.PixelFormat.Match(pf => pf.Name, string.Empty) && - (desiredState.VideoBitrate.IsNone || currentState.VideoBitrate == desiredState.VideoBitrate) && - (desiredState.VideoBufferSize.IsNone || currentState.VideoBufferSize == desiredState.VideoBufferSize) && - currentState.Realtime == desiredState.Realtime && - (desiredState.VideoTrackTimeScale.IsNone || - currentState.VideoTrackTimeScale == desiredState.VideoTrackTimeScale) && - currentState.ScaledSize == desiredState.ScaledSize && - currentState.PaddedSize == desiredState.PaddedSize && - (desiredState.FrameRate.IsNone || currentState.FrameRate == desiredState.FrameRate); - } -} diff --git a/ErsatzTV.Infrastructure.Tests/ErsatzTV.Infrastructure.Tests.csproj b/ErsatzTV.Infrastructure.Tests/ErsatzTV.Infrastructure.Tests.csproj index 12ba3c88e..00c22b5b1 100644 --- a/ErsatzTV.Infrastructure.Tests/ErsatzTV.Infrastructure.Tests.csproj +++ b/ErsatzTV.Infrastructure.Tests/ErsatzTV.Infrastructure.Tests.csproj @@ -10,15 +10,18 @@ - - + + - + runtime; build; native; contentfiles; analyzers; buildtransitive all - + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + diff --git a/ErsatzTV.Infrastructure/ErsatzTV.Infrastructure.csproj b/ErsatzTV.Infrastructure/ErsatzTV.Infrastructure.csproj index 950ebaf38..9bcec5196 100644 --- a/ErsatzTV.Infrastructure/ErsatzTV.Infrastructure.csproj +++ b/ErsatzTV.Infrastructure/ErsatzTV.Infrastructure.csproj @@ -11,7 +11,7 @@ - + @@ -21,14 +21,14 @@ runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + diff --git a/ErsatzTV.sln.DotSettings b/ErsatzTV.sln.DotSettings index 6d5f83d57..bfde66f92 100644 --- a/ErsatzTV.sln.DotSettings +++ b/ErsatzTV.sln.DotSettings @@ -21,6 +21,7 @@ True True True + True True True True diff --git a/ErsatzTV/ErsatzTV.csproj b/ErsatzTV/ErsatzTV.csproj index 1cc9a703e..6f8fdcbde 100644 --- a/ErsatzTV/ErsatzTV.csproj +++ b/ErsatzTV/ErsatzTV.csproj @@ -53,29 +53,29 @@ - + - + - - + + all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + - +