Browse Source

detect vaapi capabilities (#1051)

* remove unused pipeline

* spike vaapi hardware capabilities

* more vaapi capabilities

* use proper vaapi driver

* update readme

* update dependencies
pull/1015/head
Jason Dove 3 years ago committed by GitHub
parent
commit
a2274bca7b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 1
      CHANGELOG.md
  2. 6
      ErsatzTV.Application/ErsatzTV.Application.csproj
  3. 10
      ErsatzTV.Core.Tests/ErsatzTV.Core.Tests.csproj
  4. 15
      ErsatzTV.Core.Tests/FFmpeg/TranscodingTests.cs
  5. 10
      ErsatzTV.Core/ErsatzTV.Core.csproj
  6. 10
      ErsatzTV.Core/FFmpeg/FFmpegLibraryProcessService.cs
  7. 8
      ErsatzTV.FFmpeg.Tests/ErsatzTV.FFmpeg.Tests.csproj
  8. 4
      ErsatzTV.FFmpeg.Tests/PipelineBuilderBaseTests.cs
  9. 4
      ErsatzTV.FFmpeg/Capabilities/AmfHardwareCapabilities.cs
  10. 5
      ErsatzTV.FFmpeg/Capabilities/DefaultHardwareCapabilities.cs
  11. 93
      ErsatzTV.FFmpeg/Capabilities/HardwareCapabilitiesFactory.cs
  12. 4
      ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilities.cs
  13. 4
      ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilitiesFactory.cs
  14. 4
      ErsatzTV.FFmpeg/Capabilities/NoHardwareCapabilities.cs
  15. 4
      ErsatzTV.FFmpeg/Capabilities/NvidiaHardwareCapabilities.cs
  16. 7
      ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiEntrypoint.cs
  17. 17
      ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiProfile.cs
  18. 3
      ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiProfileEntrypoint.cs
  19. 123
      ErsatzTV.FFmpeg/Capabilities/VaapiHardwareCapabilities.cs
  20. 103
      ErsatzTV.FFmpeg/Decoder/AvailableDecoders.cs
  21. 77
      ErsatzTV.FFmpeg/Encoder/AvailableEncoders.cs
  22. 2
      ErsatzTV.FFmpeg/ErsatzTV.FFmpeg.csproj
  23. 6
      ErsatzTV.FFmpeg/Format/VideoProfile.cs
  24. 1
      ErsatzTV.FFmpeg/FrameState.cs
  25. 38
      ErsatzTV.FFmpeg/Option/HardwareAcceleration/AvailableHardwareAccelerationOptions.cs
  26. 23
      ErsatzTV.FFmpeg/Option/HardwareAcceleration/VaapiHardwareAccelerationOption.cs
  27. 10
      ErsatzTV.FFmpeg/Pipeline/AmfPipelineBuilder.cs
  28. 2
      ErsatzTV.FFmpeg/Pipeline/IPipelineBuilderFactory.cs
  29. 10
      ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs
  30. 131
      ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs
  31. 10
      ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs
  32. 37
      ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs
  33. 10
      ErsatzTV.FFmpeg/Pipeline/VideoToolboxPipelineBuilder.cs
  34. 812
      ErsatzTV.FFmpeg/PipelineBuilder.cs
  35. 11
      ErsatzTV.Infrastructure.Tests/ErsatzTV.Infrastructure.Tests.csproj
  36. 6
      ErsatzTV.Infrastructure/ErsatzTV.Infrastructure.csproj
  37. 1
      ErsatzTV.sln.DotSettings
  38. 14
      ErsatzTV/ErsatzTV.csproj

1
CHANGELOG.md

@ -16,6 +16,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). @@ -16,6 +16,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- This can help if your card only supports e.g. h264 encoding, normalizing to 8 bits will allow the hardware encoder to be used
- Extract font attachments after extracting text subtitles
- This should improve SubStation Alpha subtitle rendering
- Detect VAAPI capabilities and fallback to software decoding/encoding as needed
- Add audio stream selector scripts for episodes and movies
- This will let you customize which audio stream is selected for playback
- Episodes are passed the following data:

6
ErsatzTV.Application/ErsatzTV.Application.csproj

@ -10,13 +10,13 @@ @@ -10,13 +10,13 @@
<PackageReference Include="Bugsnag" Version="3.1.0" />
<PackageReference Include="CliWrap" Version="3.5.0" />
<PackageReference Include="Humanizer.Core" Version="2.14.1" />
<PackageReference Include="MediatR" Version="11.0.0" />
<PackageReference Include="MediatR" Version="11.1.0" />
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="6.0.0" />
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.3.48">
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.4.27">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.2" />
<PackageReference Include="Winista.MimeDetect" Version="1.0.1" />
</ItemGroup>

10
ErsatzTV.Core.Tests/ErsatzTV.Core.Tests.csproj

@ -10,20 +10,20 @@ @@ -10,20 +10,20 @@
<PackageReference Include="Bugsnag" Version="3.1.0" />
<PackageReference Include="CliWrap" Version="3.5.0" />
<PackageReference Include="FluentAssertions" Version="6.8.0" />
<PackageReference Include="LanguageExt.Core" Version="4.2.9" />
<PackageReference Include="LanguageExt.Core" Version="4.4.0" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="6.0.1" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="6.0.1" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="6.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="6.0.2" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="6.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.3.2" />
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.3.48">
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.4.0" />
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.4.27">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Moq" Version="4.18.2" />
<PackageReference Include="Moq" Version="4.18.3" />
<PackageReference Include="NUnit" Version="3.13.3" />
<PackageReference Include="NUnit3TestAdapter" Version="4.2.1" />
<PackageReference Include="NUnit3TestAdapter" Version="4.3.1" />
<PackageReference Include="Serilog" Version="2.12.0" />
<PackageReference Include="Serilog.Extensions.Logging" Version="3.1.0" />
<PackageReference Include="Serilog.Sinks.Debug" Version="2.0.0" />

15
ErsatzTV.Core.Tests/FFmpeg/TranscodingTests.cs

@ -35,6 +35,7 @@ namespace ErsatzTV.Core.Tests.FFmpeg; @@ -35,6 +35,7 @@ namespace ErsatzTV.Core.Tests.FFmpeg;
public class TranscodingTests
{
private static readonly ILoggerFactory LoggerFactory;
private static readonly MemoryCache _memoryCache;
static TranscodingTests()
{
@ -44,6 +45,8 @@ public class TranscodingTests @@ -44,6 +45,8 @@ public class TranscodingTests
.CreateLogger();
LoggerFactory = new LoggerFactory().AddSerilog(Log.Logger);
_memoryCache = new MemoryCache(new MemoryCacheOptions());
}
[Test]
@ -213,10 +216,10 @@ public class TranscodingTests @@ -213,10 +216,10 @@ public class TranscodingTests
FFmpegProfileVideoFormat profileVideoFormat,
// [ValueSource(typeof(TestData), nameof(TestData.NoAcceleration))] HardwareAccelerationKind profileAcceleration)
// [ValueSource(typeof(TestData), nameof(TestData.NvidiaAcceleration))] HardwareAccelerationKind profileAcceleration)
// [ValueSource(typeof(TestData), nameof(TestData.VaapiAcceleration))] HardwareAccelerationKind profileAcceleration)
[ValueSource(typeof(TestData), nameof(TestData.VaapiAcceleration))] HardwareAccelerationKind profileAcceleration)
// [ValueSource(typeof(TestData), nameof(TestData.QsvAcceleration))] HardwareAccelerationKind profileAcceleration)
// [ValueSource(typeof(TestData), nameof(TestData.VideoToolboxAcceleration))] HardwareAccelerationKind profileAcceleration)
[ValueSource(typeof(TestData), nameof(TestData.AmfAcceleration))] HardwareAccelerationKind profileAcceleration)
// [ValueSource(typeof(TestData), nameof(TestData.AmfAcceleration))] HardwareAccelerationKind profileAcceleration)
{
if (inputFormat.Encoder is "mpeg1video" or "msmpeg4v2" or "msmpeg4v3")
{
@ -318,7 +321,7 @@ public class TranscodingTests @@ -318,7 +321,7 @@ public class TranscodingTests
imageCache.Object,
new Mock<ITempFilePool>().Object,
new Mock<IClient>().Object,
new MemoryCache(new MemoryCacheOptions()),
_memoryCache,
LoggerFactory.CreateLogger<FFmpegProcessService>());
var service = new FFmpegLibraryProcessService(
@ -330,7 +333,7 @@ public class TranscodingTests @@ -330,7 +333,7 @@ public class TranscodingTests
new RuntimeInfo(),
//new FakeNvidiaCapabilitiesFactory(),
new HardwareCapabilitiesFactory(
new MemoryCache(new MemoryCacheOptions()),
_memoryCache,
LoggerFactory.CreateLogger<HardwareCapabilitiesFactory>()),
LoggerFactory.CreateLogger<PipelineBuilderFactory>()),
LoggerFactory.CreateLogger<FFmpegLibraryProcessService>());
@ -785,7 +788,9 @@ public class TranscodingTests @@ -785,7 +788,9 @@ public class TranscodingTests
{
public Task<IHardwareCapabilities> GetHardwareCapabilities(
string ffmpegPath,
HardwareAccelerationMode hardwareAccelerationMode) =>
HardwareAccelerationMode hardwareAccelerationMode,
Option<string> vaapiDriver,
Option<string> vaapiDevice) =>
Task.FromResult<IHardwareCapabilities>(new NvidiaHardwareCapabilities(61, string.Empty));
}

10
ErsatzTV.Core/ErsatzTV.Core.csproj

@ -10,19 +10,19 @@ @@ -10,19 +10,19 @@
<PackageReference Include="Bugsnag" Version="3.1.0" />
<PackageReference Include="Destructurama.Attributed" Version="3.0.0" />
<PackageReference Include="Flurl" Version="3.0.6" />
<PackageReference Include="LanguageExt.Core" Version="4.2.9" />
<PackageReference Include="LanguageExt.Transformers" Version="4.2.9" />
<PackageReference Include="MediatR" Version="11.0.0" />
<PackageReference Include="LanguageExt.Core" Version="4.4.0" />
<PackageReference Include="LanguageExt.Transformers" Version="4.4.0" />
<PackageReference Include="MediatR" Version="11.1.0" />
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="6.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="6.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="6.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="6.0.2" />
<PackageReference Include="Microsoft.IO.RecyclableMemoryStream" Version="2.2.1" />
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.3.48">
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.4.27">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.2" />
<PackageReference Include="Serilog" Version="2.12.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="4.1.0" />
</ItemGroup>

10
ErsatzTV.Core/FFmpeg/FFmpegLibraryProcessService.cs

@ -218,6 +218,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -218,6 +218,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
playbackSettings.RealtimeOutput,
false, // TODO: fallback filler needs to loop
videoFormat,
videoStream.Profile,
Optional(desiredPixelFormat),
ffmpegVideoStream.SquarePixelFrameSize(
new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height)),
@ -256,6 +257,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -256,6 +257,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
audioInputFile,
watermarkInputFile,
subtitleInputFile,
VaapiDriverName(hwAccel, vaapiDriver),
VaapiDeviceName(hwAccel, vaapiDevice),
FileSystemLayout.FFmpegReportsFolder,
FileSystemLayout.FontsCacheFolder,
ffmpegPath);
@ -317,6 +320,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -317,6 +320,7 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
playbackSettings.RealtimeOutput,
false,
GetVideoFormat(playbackSettings),
VideoProfile.Main,
new PixelFormatYuv420P(),
new FrameSize(desiredResolution.Width, desiredResolution.Height),
new FrameSize(desiredResolution.Width, desiredResolution.Height),
@ -395,6 +399,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -395,6 +399,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
audioInputFile,
None,
subtitleInputFile,
VaapiDriverName(hwAccel, vaapiDriver),
VaapiDeviceName(hwAccel, vaapiDevice),
FileSystemLayout.FFmpegReportsFolder,
FileSystemLayout.FontsCacheFolder,
ffmpegPath);
@ -423,6 +429,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -423,6 +429,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
None,
None,
None,
None,
None,
FileSystemLayout.FFmpegReportsFolder,
FileSystemLayout.FontsCacheFolder,
ffmpegPath);
@ -462,6 +470,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -462,6 +470,8 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
None,
None,
None,
None,
None,
FileSystemLayout.FFmpegReportsFolder,
FileSystemLayout.FontsCacheFolder,
ffmpegPath);

8
ErsatzTV.FFmpeg.Tests/ErsatzTV.FFmpeg.Tests.csproj

@ -9,15 +9,15 @@ @@ -9,15 +9,15 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.8.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.3.2" />
<PackageReference Include="Moq" Version="4.18.2" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.4.0" />
<PackageReference Include="Moq" Version="4.18.3" />
<PackageReference Include="NUnit" Version="3.13.3" />
<PackageReference Include="NUnit3TestAdapter" Version="4.2.1" />
<PackageReference Include="NUnit3TestAdapter" Version="4.3.1" />
<PackageReference Include="System.IO.FileSystem.Primitives" Version="4.3.0" />
<PackageReference Include="System.Text.Encoding.Extensions" Version="4.3.0" />
<PackageReference Include="System.Runtime.Handles" Version="4.3.0" />
<PackageReference Include="System.Runtime.InteropServices" Version="4.3.0" />
<PackageReference Include="coverlet.collector" Version="3.1.2">
<PackageReference Include="coverlet.collector" Version="3.2.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>

4
ErsatzTV.FFmpeg.Tests/PipelineBuilderBaseTests.cs

@ -55,6 +55,7 @@ public class PipelineBuilderBaseTests @@ -55,6 +55,7 @@ public class PipelineBuilderBaseTests
true,
false,
VideoFormat.Hevc,
VideoProfile.Main,
new PixelFormatYuv420P(),
new FrameSize(1920, 1080),
new FrameSize(1920, 1080),
@ -139,6 +140,7 @@ public class PipelineBuilderBaseTests @@ -139,6 +140,7 @@ public class PipelineBuilderBaseTests
true,
false,
VideoFormat.Hevc,
VideoProfile.Main,
new PixelFormatYuv420P(),
new FrameSize(1920, 1080),
new FrameSize(1920, 1080),
@ -248,6 +250,7 @@ public class PipelineBuilderBaseTests @@ -248,6 +250,7 @@ public class PipelineBuilderBaseTests
true,
false,
VideoFormat.Copy,
VideoProfile.Main,
Option<IPixelFormat>.None,
new FrameSize(1920, 1080),
new FrameSize(1920, 1080),
@ -324,6 +327,7 @@ public class PipelineBuilderBaseTests @@ -324,6 +327,7 @@ public class PipelineBuilderBaseTests
true,
false,
VideoFormat.Copy,
VideoProfile.Main,
new PixelFormatYuv420P(),
new FrameSize(1920, 1080),
new FrameSize(1920, 1080),

4
ErsatzTV.FFmpeg/Capabilities/AmfHardwareCapabilities.cs

@ -4,9 +4,9 @@ namespace ErsatzTV.FFmpeg.Capabilities; @@ -4,9 +4,9 @@ namespace ErsatzTV.FFmpeg.Capabilities;
public class AmfHardwareCapabilities : IHardwareCapabilities
{
public bool CanDecode(string videoFormat, Option<IPixelFormat> maybePixelFormat) => false;
public bool CanDecode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat) => false;
public bool CanEncode(string videoFormat, Option<IPixelFormat> maybePixelFormat)
public bool CanEncode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat)
{
int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8);

5
ErsatzTV.FFmpeg/Capabilities/DefaultHardwareCapabilities.cs

@ -4,8 +4,9 @@ namespace ErsatzTV.FFmpeg.Capabilities; @@ -4,8 +4,9 @@ namespace ErsatzTV.FFmpeg.Capabilities;
public class DefaultHardwareCapabilities : IHardwareCapabilities
{
public bool CanDecode(string videoFormat, Option<IPixelFormat> maybePixelFormat) => true;
public bool CanEncode(string videoFormat, Option<IPixelFormat> maybePixelFormat)
public bool CanDecode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat) => true;
public bool CanEncode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat)
{
int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8);

93
ErsatzTV.FFmpeg/Capabilities/HardwareCapabilitiesFactory.cs

@ -2,6 +2,7 @@ using System.Text; @@ -2,6 +2,7 @@ using System.Text;
using System.Text.RegularExpressions;
using CliWrap;
using CliWrap.Buffered;
using ErsatzTV.FFmpeg.Capabilities.Vaapi;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
@ -11,6 +12,7 @@ public class HardwareCapabilitiesFactory : IHardwareCapabilitiesFactory @@ -11,6 +12,7 @@ public class HardwareCapabilitiesFactory : IHardwareCapabilitiesFactory
{
private const string ArchitectureCacheKey = "ffmpeg.hardware.nvidia.architecture";
private const string ModelCacheKey = "ffmpeg.hardware.nvidia.model";
private const string VaapiCacheKeyFormat = "ffmpeg.hardware.vaapi.{0}.{1}";
private readonly ILogger<HardwareCapabilitiesFactory> _logger;
private readonly IMemoryCache _memoryCache;
@ -23,14 +25,103 @@ public class HardwareCapabilitiesFactory : IHardwareCapabilitiesFactory @@ -23,14 +25,103 @@ public class HardwareCapabilitiesFactory : IHardwareCapabilitiesFactory
public async Task<IHardwareCapabilities> GetHardwareCapabilities(
string ffmpegPath,
HardwareAccelerationMode hardwareAccelerationMode) =>
HardwareAccelerationMode hardwareAccelerationMode,
Option<string> vaapiDriver,
Option<string> vaapiDevice) =>
hardwareAccelerationMode switch
{
HardwareAccelerationMode.Nvenc => await GetNvidiaCapabilities(ffmpegPath),
HardwareAccelerationMode.Vaapi => await GetVaapiCapabilities(vaapiDriver, vaapiDevice),
HardwareAccelerationMode.Amf => new AmfHardwareCapabilities(),
_ => new DefaultHardwareCapabilities()
};
private async Task<IHardwareCapabilities> GetVaapiCapabilities(
Option<string> vaapiDriver,
Option<string> vaapiDevice)
{
try
{
if (vaapiDevice.IsNone)
{
// this shouldn't really happen
_logger.LogError(
"Cannot detect VAAPI capabilities without device {Device}",
vaapiDevice);
return new NoHardwareCapabilities();
}
string driver = vaapiDriver.IfNone(string.Empty);
string device = vaapiDevice.IfNone(string.Empty);
var cacheKey = string.Format(VaapiCacheKeyFormat, driver, device);
if (_memoryCache.TryGetValue(cacheKey, out List<VaapiProfileEntrypoint> profileEntrypoints))
{
return new VaapiHardwareCapabilities(profileEntrypoints, _logger);
}
BufferedCommandResult whichResult = await Cli.Wrap("which")
.WithArguments("vainfo")
.WithValidation(CommandResultValidation.None)
.ExecuteBufferedAsync(Encoding.UTF8);
if (whichResult.ExitCode != 0)
{
_logger.LogWarning("Unable to determine VAAPI capabilities; please install vainfo");
return new DefaultHardwareCapabilities();
}
var envVars = new Dictionary<string, string?>();
foreach (string libvaDriverName in vaapiDriver)
{
envVars.Add("LIBVA_DRIVER_NAME", libvaDriverName);
}
BufferedCommandResult result = await Cli.Wrap("vainfo")
.WithArguments($"--display drm --device {device}")
.WithEnvironmentVariables(envVars)
.WithValidation(CommandResultValidation.None)
.ExecuteBufferedAsync(Encoding.UTF8);
profileEntrypoints = new List<VaapiProfileEntrypoint>();
foreach (string line in result.StandardOutput.Split("\n"))
{
const string PROFILE_ENTRYPOINT_PATTERN = @"(VAProfile\w*).*(VAEntrypoint\w*)";
Match match = Regex.Match(line, PROFILE_ENTRYPOINT_PATTERN);
if (match.Success)
{
profileEntrypoints.Add(
new VaapiProfileEntrypoint(
match.Groups[1].Value.Trim(),
match.Groups[2].Value.Trim()));
}
}
if (profileEntrypoints.Any())
{
_logger.LogWarning(
"Detected {Count} VAAPI profile entrypoints for using {Driver} {Device}",
profileEntrypoints.Count,
driver,
device);
_memoryCache.Set(cacheKey, profileEntrypoints);
return new VaapiHardwareCapabilities(profileEntrypoints, _logger);
}
}
catch
{
// ignored
}
_logger.LogWarning(
"Error detecting VAAPI capabilities; some hardware accelerated features will be unavailable");
return new NoHardwareCapabilities();
}
private async Task<IHardwareCapabilities> GetNvidiaCapabilities(string ffmpegPath)
{
if (_memoryCache.TryGetValue(ArchitectureCacheKey, out int cachedArchitecture)

4
ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilities.cs

@ -4,6 +4,6 @@ namespace ErsatzTV.FFmpeg.Capabilities; @@ -4,6 +4,6 @@ namespace ErsatzTV.FFmpeg.Capabilities;
public interface IHardwareCapabilities
{
public bool CanDecode(string videoFormat, Option<IPixelFormat> maybePixelFormat);
public bool CanEncode(string videoFormat, Option<IPixelFormat> maybePixelFormat);
public bool CanDecode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat);
public bool CanEncode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat);
}

4
ErsatzTV.FFmpeg/Capabilities/IHardwareCapabilitiesFactory.cs

@ -4,5 +4,7 @@ public interface IHardwareCapabilitiesFactory @@ -4,5 +4,7 @@ public interface IHardwareCapabilitiesFactory
{
Task<IHardwareCapabilities> GetHardwareCapabilities(
string ffmpegPath,
HardwareAccelerationMode hardwareAccelerationMode);
HardwareAccelerationMode hardwareAccelerationMode,
Option<string> vaapiDriver,
Option<string> vaapiDevice);
}

4
ErsatzTV.FFmpeg/Capabilities/NoHardwareCapabilities.cs

@ -4,6 +4,6 @@ namespace ErsatzTV.FFmpeg.Capabilities; @@ -4,6 +4,6 @@ namespace ErsatzTV.FFmpeg.Capabilities;
public class NoHardwareCapabilities : IHardwareCapabilities
{
public bool CanDecode(string videoFormat, Option<IPixelFormat> maybePixelFormat) => false;
public bool CanEncode(string videoFormat, Option<IPixelFormat> maybePixelFormat) => false;
public bool CanDecode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat) => false;
public bool CanEncode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat) => false;
}

4
ErsatzTV.FFmpeg/Capabilities/NvidiaHardwareCapabilities.cs

@ -14,7 +14,7 @@ public class NvidiaHardwareCapabilities : IHardwareCapabilities @@ -14,7 +14,7 @@ public class NvidiaHardwareCapabilities : IHardwareCapabilities
_model = model;
}
public bool CanDecode(string videoFormat, Option<IPixelFormat> maybePixelFormat)
public bool CanDecode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat)
{
int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8);
@ -36,7 +36,7 @@ public class NvidiaHardwareCapabilities : IHardwareCapabilities @@ -36,7 +36,7 @@ public class NvidiaHardwareCapabilities : IHardwareCapabilities
};
}
public bool CanEncode(string videoFormat, Option<IPixelFormat> maybePixelFormat)
public bool CanEncode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat)
{
int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8);

7
ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiEntrypoint.cs

@ -0,0 +1,7 @@ @@ -0,0 +1,7 @@
namespace ErsatzTV.FFmpeg.Capabilities.Vaapi;
public class VaapiEntrypoint
{
public const string Decode = "VAEntrypointVLD";
public const string Encode = "VAEntrypointEncSlice";
}

17
ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiProfile.cs

@ -0,0 +1,17 @@ @@ -0,0 +1,17 @@
namespace ErsatzTV.FFmpeg.Capabilities.Vaapi;
public class VaapiProfile
{
public const string Mpeg2Simple = "VAProfileMPEG2Simple";
public const string Mpeg2Main = "VAProfileMPEG2Main";
public const string H264ConstrainedBaseline = "VAProfileH264ConstrainedBaseline";
public const string H264Main = "VAProfileH264Main";
public const string H264High = "VAProfileH264High";
public const string H264MultiviewHigh = "VAProfileH264MultiviewHigh";
public const string H264StereoHigh = "VAProfileH264StereoHigh";
public const string Vc1Simple = "VAProfileVC1Simple";
public const string Vc1Main = "VAProfileVC1Main";
public const string Vc1Advanced = "VAProfileVC1Advanced";
public const string HevcMain = "VAProfileHEVCMain";
public const string HevcMain10 = "VAProfileHEVCMain10";
}

3
ErsatzTV.FFmpeg/Capabilities/Vaapi/VaapiProfileEntrypoint.cs

@ -0,0 +1,3 @@ @@ -0,0 +1,3 @@
namespace ErsatzTV.FFmpeg.Capabilities.Vaapi;
public record VaapiProfileEntrypoint(string VaapiProfile, string VaapiEntrypoint);

123
ErsatzTV.FFmpeg/Capabilities/VaapiHardwareCapabilities.cs

@ -0,0 +1,123 @@ @@ -0,0 +1,123 @@
using ErsatzTV.FFmpeg.Capabilities.Vaapi;
using ErsatzTV.FFmpeg.Format;
using Microsoft.Extensions.Logging;
namespace ErsatzTV.FFmpeg.Capabilities;
public class VaapiHardwareCapabilities : IHardwareCapabilities
{
private readonly List<VaapiProfileEntrypoint> _profileEntrypoints;
private readonly ILogger _logger;
public VaapiHardwareCapabilities(List<VaapiProfileEntrypoint> profileEntrypoints, ILogger logger)
{
_profileEntrypoints = profileEntrypoints;
_logger = logger;
}
public bool CanDecode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat)
{
int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8);
bool result = (videoFormat, videoProfile.ToLowerInvariant()) switch
{
// no hardware decoding of 10-bit h264
(VideoFormat.H264, _) when bitDepth == 10 => false,
// no hardware decoding of h264 baseline profile
(VideoFormat.H264, "baseline" or "66") => false,
(VideoFormat.H264, "main" or "77") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.H264Main, VaapiEntrypoint.Decode)),
(VideoFormat.H264, "high" or "100") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.H264High, VaapiEntrypoint.Decode)),
(VideoFormat.H264, "high 10" or "110") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.H264High, VaapiEntrypoint.Decode)),
(VideoFormat.H264, "baseline constrained" or "578") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.H264ConstrainedBaseline, VaapiEntrypoint.Decode)),
(VideoFormat.Mpeg2Video, "main" or "4") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.Mpeg2Main, VaapiEntrypoint.Decode)),
(VideoFormat.Mpeg2Video, "simple" or "5") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.Mpeg2Simple, VaapiEntrypoint.Decode)),
(VideoFormat.Vc1, "simple" or "0") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.Vc1Simple, VaapiEntrypoint.Decode)),
(VideoFormat.Vc1, "main" or "1") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.Vc1Main, VaapiEntrypoint.Decode)),
(VideoFormat.Vc1, "advanced" or "3") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.Vc1Advanced, VaapiEntrypoint.Decode)),
(VideoFormat.Hevc, "main" or "1") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.HevcMain, VaapiEntrypoint.Decode)),
(VideoFormat.Hevc, "main 10" or "2") =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.HevcMain10, VaapiEntrypoint.Decode)),
// fall back to software decoder
_ => false
};
if (!result)
{
_logger.LogDebug(
"VAAPI does not support decoding {Format}/{Profile}, will use software decoder",
videoFormat,
videoProfile);
}
return result;
}
public bool CanEncode(string videoFormat, string videoProfile, Option<IPixelFormat> maybePixelFormat)
{
int bitDepth = maybePixelFormat.Map(pf => pf.BitDepth).IfNone(8);
bool result = videoFormat switch
{
// vaapi cannot encode 10-bit h264
VideoFormat.H264 when bitDepth == 10 => false,
VideoFormat.H264 =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.H264Main, VaapiEntrypoint.Encode)),
VideoFormat.Hevc when bitDepth == 10 =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.HevcMain10, VaapiEntrypoint.Encode)),
VideoFormat.Hevc =>
_profileEntrypoints.Contains(
new VaapiProfileEntrypoint(VaapiProfile.HevcMain, VaapiEntrypoint.Encode)),
_ => false
};
if (!result)
{
_logger.LogDebug(
"VAAPI does not support encoding {Format} with bit depth {BitDepth}, will use software encoder",
videoFormat,
bitDepth);
}
return result;
}
}

103
ErsatzTV.FFmpeg/Decoder/AvailableDecoders.cs

@ -1,103 +0,0 @@ @@ -1,103 +0,0 @@
using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.FFmpeg.Decoder.Cuvid;
using ErsatzTV.FFmpeg.Decoder.Qsv;
using ErsatzTV.FFmpeg.Format;
using Microsoft.Extensions.Logging;
namespace ErsatzTV.FFmpeg.Decoder;
public static class AvailableDecoders
{
public static Option<IDecoder> ForVideoFormat(
IHardwareCapabilities hardwareCapabilities,
FFmpegState ffmpegState,
FrameState currentState,
FrameState desiredState,
Option<WatermarkInputFile> watermarkInputFile,
Option<SubtitleInputFile> subtitleInputFile,
ILogger logger) =>
(ffmpegState.DecoderHardwareAccelerationMode, currentState.VideoFormat,
currentState.PixelFormat.Match(pf => pf.Name, () => string.Empty)) switch
{
(HardwareAccelerationMode.Nvenc, VideoFormat.Hevc, _)
when hardwareCapabilities.CanDecode(VideoFormat.Hevc, currentState.PixelFormat) =>
new DecoderHevcCuvid(ffmpegState.EncoderHardwareAccelerationMode),
// nvenc doesn't support hardware decoding of 10-bit content
(HardwareAccelerationMode.Nvenc, VideoFormat.H264, PixelFormat.YUV420P10LE or PixelFormat.YUV444P10LE)
=> new DecoderH264(),
// mpeg2_cuvid seems to have issues when yadif_cuda is used, so just use software decoding
(HardwareAccelerationMode.Nvenc, VideoFormat.Mpeg2Video, _) when desiredState.Deinterlaced =>
new DecoderMpeg2Video(),
(HardwareAccelerationMode.Nvenc, VideoFormat.H264, _)
when hardwareCapabilities.CanDecode(VideoFormat.H264, currentState.PixelFormat) =>
new DecoderH264Cuvid(ffmpegState.EncoderHardwareAccelerationMode),
(HardwareAccelerationMode.Nvenc, VideoFormat.Mpeg2Video, _) => new DecoderMpeg2Cuvid(
ffmpegState.EncoderHardwareAccelerationMode,
desiredState.Deinterlaced),
(HardwareAccelerationMode.Nvenc, VideoFormat.Vc1, _) => new DecoderVc1Cuvid(ffmpegState.EncoderHardwareAccelerationMode),
(HardwareAccelerationMode.Nvenc, VideoFormat.Vp9, _)
when hardwareCapabilities.CanDecode(VideoFormat.Vp9, currentState.PixelFormat) =>
new DecoderVp9Cuvid(ffmpegState.EncoderHardwareAccelerationMode),
(HardwareAccelerationMode.Nvenc, VideoFormat.Mpeg4, _) => new DecoderMpeg4Cuvid(ffmpegState.EncoderHardwareAccelerationMode),
// hevc_qsv decoder sometimes causes green lines with 10-bit content
(HardwareAccelerationMode.Qsv, VideoFormat.Hevc, PixelFormat.YUV420P10LE) => new DecoderHevc(),
// h264_qsv does not support decoding 10-bit content
(HardwareAccelerationMode.Qsv, VideoFormat.H264, PixelFormat.YUV420P10LE or PixelFormat.YUV444P10LE) =>
new DecoderH264(),
// qsv uses software deinterlace filter, so decode in software
(HardwareAccelerationMode.Qsv, VideoFormat.H264, _) when desiredState.Deinterlaced => new DecoderH264(),
(HardwareAccelerationMode.Qsv, VideoFormat.Mpeg2Video, _) when desiredState.Deinterlaced =>
new DecoderMpeg2Video(),
(HardwareAccelerationMode.Qsv, VideoFormat.Hevc, _) => new DecoderHevcQsv(),
(HardwareAccelerationMode.Qsv, VideoFormat.H264, _) => new DecoderH264Qsv(),
(HardwareAccelerationMode.Qsv, VideoFormat.Mpeg2Video, _) => new DecoderMpeg2Qsv(),
(HardwareAccelerationMode.Qsv, VideoFormat.Vc1, _) => new DecoderVc1Qsv(),
(HardwareAccelerationMode.Qsv, VideoFormat.Vp9, _) => new DecoderVp9Qsv(),
// vaapi should use implicit decoders when scaling or no watermark/subtitles
// otherwise, fall back to software decoders
(HardwareAccelerationMode.Vaapi, _, _) when watermarkInputFile.IsNone && subtitleInputFile.IsNone ||
currentState.ScaledSize != desiredState.ScaledSize =>
new DecoderVaapi(),
// videotoolbox should use implicit decoders
(HardwareAccelerationMode.VideoToolbox, _, _) => new DecoderVideoToolbox(),
(_, VideoFormat.Hevc, _) => new DecoderHevc(),
(_, VideoFormat.H264, _) => new DecoderH264(),
(_, VideoFormat.Mpeg1Video, _) => new DecoderMpeg1Video(),
(_, VideoFormat.Mpeg2Video, _) => new DecoderMpeg2Video(),
(_, VideoFormat.Vc1, _) => new DecoderVc1(),
(_, VideoFormat.MsMpeg4V2, _) => new DecoderMsMpeg4V2(),
(_, VideoFormat.MsMpeg4V3, _) => new DecoderMsMpeg4V3(),
(_, VideoFormat.Mpeg4, _) => new DecoderMpeg4(),
(_, VideoFormat.Vp9, _) => new DecoderVp9(),
(_, VideoFormat.Undetermined, _) => new DecoderImplicit(),
(_, VideoFormat.Copy, _) => new DecoderImplicit(),
(_, VideoFormat.GeneratedImage, _) => new DecoderImplicit(),
var (accel, videoFormat, pixelFormat) => LogUnknownDecoder(accel, videoFormat, pixelFormat, logger)
};
private static Option<IDecoder> LogUnknownDecoder(
HardwareAccelerationMode hardwareAccelerationMode,
string videoFormat,
string pixelFormat,
ILogger logger)
{
logger.LogWarning(
"Unable to determine decoder for {AccelMode} - {VideoFormat} - {PixelFormat}; may have playback issues",
hardwareAccelerationMode,
videoFormat,
pixelFormat);
return Option<IDecoder>.None;
}
}

77
ErsatzTV.FFmpeg/Encoder/AvailableEncoders.cs

@ -1,10 +1,4 @@ @@ -1,10 +1,4 @@
using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.FFmpeg.Encoder.Amf;
using ErsatzTV.FFmpeg.Encoder.Nvenc;
using ErsatzTV.FFmpeg.Encoder.Qsv;
using ErsatzTV.FFmpeg.Encoder.Vaapi;
using ErsatzTV.FFmpeg.Encoder.VideoToolbox;
using ErsatzTV.FFmpeg.Format;
using ErsatzTV.FFmpeg.Format;
using ErsatzTV.FFmpeg.State;
using Microsoft.Extensions.Logging;
@ -12,75 +6,6 @@ namespace ErsatzTV.FFmpeg.Encoder; @@ -12,75 +6,6 @@ namespace ErsatzTV.FFmpeg.Encoder;
public static class AvailableEncoders
{
public static Option<IEncoder> ForVideoFormat(
IHardwareCapabilities hardwareCapabilities,
FFmpegState ffmpegState,
FrameState currentState,
FrameState desiredState,
Option<WatermarkInputFile> maybeWatermarkInputFile,
Option<SubtitleInputFile> maybeSubtitleInputFile,
ILogger logger) =>
(ffmpegState.EncoderHardwareAccelerationMode, desiredState.VideoFormat) switch
{
(HardwareAccelerationMode.Nvenc, VideoFormat.Hevc) when hardwareCapabilities.CanEncode(
VideoFormat.Hevc,
desiredState.PixelFormat) =>
new EncoderHevcNvenc(),
(HardwareAccelerationMode.Nvenc, VideoFormat.H264) when hardwareCapabilities.CanEncode(
VideoFormat.H264,
desiredState.PixelFormat) =>
new EncoderH264Nvenc(),
(HardwareAccelerationMode.Qsv, VideoFormat.Hevc) when hardwareCapabilities.CanEncode(
VideoFormat.Hevc,
desiredState.PixelFormat) => new EncoderHevcQsv(),
(HardwareAccelerationMode.Qsv, VideoFormat.H264) when hardwareCapabilities.CanEncode(
VideoFormat.H264,
desiredState.PixelFormat) => new EncoderH264Qsv(),
(HardwareAccelerationMode.Vaapi, VideoFormat.Hevc) when hardwareCapabilities.CanEncode(
VideoFormat.Hevc,
desiredState.PixelFormat) => new EncoderHevcVaapi(),
(HardwareAccelerationMode.Vaapi, VideoFormat.H264) when hardwareCapabilities.CanEncode(
VideoFormat.H264,
desiredState.PixelFormat) => new EncoderH264Vaapi(),
(HardwareAccelerationMode.VideoToolbox, VideoFormat.Hevc) when hardwareCapabilities.CanEncode(
VideoFormat.Hevc,
desiredState.PixelFormat) => new EncoderHevcVideoToolbox(desiredState.BitDepth),
(HardwareAccelerationMode.VideoToolbox, VideoFormat.H264) when hardwareCapabilities.CanEncode(
VideoFormat.H264,
desiredState.PixelFormat) => new EncoderH264VideoToolbox(),
(HardwareAccelerationMode.Amf, VideoFormat.Hevc) when hardwareCapabilities.CanEncode(
VideoFormat.Hevc,
desiredState.PixelFormat) => new EncoderHevcAmf(),
(HardwareAccelerationMode.Amf, VideoFormat.H264) when hardwareCapabilities.CanEncode(
VideoFormat.H264,
desiredState.PixelFormat) => new EncoderH264Amf(),
(_, VideoFormat.Hevc) => new EncoderLibx265(currentState),
(_, VideoFormat.H264) => new EncoderLibx264(),
(_, VideoFormat.Mpeg2Video) => new EncoderMpeg2Video(),
(_, VideoFormat.Undetermined) => new EncoderImplicitVideo(),
(_, VideoFormat.Copy) => new EncoderCopyVideo(),
var (accel, videoFormat) => LogUnknownEncoder(accel, videoFormat, logger)
};
private static Option<IEncoder> LogUnknownEncoder(
HardwareAccelerationMode hardwareAccelerationMode,
string videoFormat,
ILogger logger)
{
logger.LogWarning(
"Unable to determine video encoder for {AccelMode} - {VideoFormat}; may have playback issues",
hardwareAccelerationMode,
videoFormat);
return Option<IEncoder>.None;
}
public static Option<IEncoder> ForAudioFormat(AudioState desiredState, ILogger logger) =>
desiredState.AudioFormat.Match(
audioFormat =>

2
ErsatzTV.FFmpeg/ErsatzTV.FFmpeg.csproj

@ -8,7 +8,7 @@ @@ -8,7 +8,7 @@
<ItemGroup>
<PackageReference Include="CliWrap" Version="3.5.0" />
<PackageReference Include="LanguageExt.Core" Version="4.2.9" />
<PackageReference Include="LanguageExt.Core" Version="4.4.0" />
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="6.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="6.0.2" />
</ItemGroup>

6
ErsatzTV.FFmpeg/Format/VideoProfile.cs

@ -0,0 +1,6 @@ @@ -0,0 +1,6 @@
namespace ErsatzTV.FFmpeg.Format;
public static class VideoProfile
{
public const string Main = "main";
}

1
ErsatzTV.FFmpeg/FrameState.cs

@ -6,6 +6,7 @@ public record FrameState( @@ -6,6 +6,7 @@ public record FrameState(
bool Realtime,
bool InfiniteLoop,
string VideoFormat,
string VideoProfile,
Option<IPixelFormat> PixelFormat,
FrameSize ScaledSize,
FrameSize PaddedSize,

38
ErsatzTV.FFmpeg/Option/HardwareAcceleration/AvailableHardwareAccelerationOptions.cs

@ -1,38 +0,0 @@ @@ -1,38 +0,0 @@
using Microsoft.Extensions.Logging;
namespace ErsatzTV.FFmpeg.Option.HardwareAcceleration;
public static class AvailableHardwareAccelerationOptions
{
public static Option<IPipelineStep> ForMode(
HardwareAccelerationMode mode,
Option<string> gpuDevice,
ILogger logger) =>
mode switch
{
HardwareAccelerationMode.Nvenc => new CudaHardwareAccelerationOption(),
HardwareAccelerationMode.Qsv => new QsvHardwareAccelerationOption(gpuDevice),
HardwareAccelerationMode.Vaapi => GetVaapiAcceleration(gpuDevice, logger),
HardwareAccelerationMode.VideoToolbox => new VideoToolboxHardwareAccelerationOption(),
HardwareAccelerationMode.Amf => new AmfHardwareAccelerationOption(),
HardwareAccelerationMode.None => Option<IPipelineStep>.None,
_ => LogUnknownMode(mode, logger)
};
private static Option<IPipelineStep> GetVaapiAcceleration(Option<string> vaapiDevice, ILogger logger)
{
foreach (string device in vaapiDevice)
{
return new VaapiHardwareAccelerationOption(device);
}
logger.LogWarning("VAAPI device name is missing; falling back to software mode");
return Option<IPipelineStep>.None;
}
private static Option<IPipelineStep> LogUnknownMode(HardwareAccelerationMode mode, ILogger logger)
{
logger.LogWarning("Unexpected hardware acceleration mode {AccelMode}; may have playback issues", mode);
return Option<IPipelineStep>.None;
}
}

23
ErsatzTV.FFmpeg/Option/HardwareAcceleration/VaapiHardwareAccelerationOption.cs

@ -3,11 +3,28 @@ @@ -3,11 +3,28 @@
public class VaapiHardwareAccelerationOption : GlobalOption
{
private readonly string _vaapiDevice;
private readonly bool _canDecode;
public VaapiHardwareAccelerationOption(string vaapiDevice) => _vaapiDevice = vaapiDevice;
public VaapiHardwareAccelerationOption(string vaapiDevice, bool canDecode)
{
_vaapiDevice = vaapiDevice;
_canDecode = canDecode;
}
public override IList<string> GlobalOptions
{
get
{
var result = new List<string> { "-vaapi_device", _vaapiDevice };
if (_canDecode)
{
result.InsertRange(0, new[] { "-hwaccel", "vaapi" });
}
public override IList<string> GlobalOptions => new List<string>
{ "-hwaccel", "vaapi", "-vaapi_device", _vaapiDevice };
return result;
}
}
public override FrameState NextState(FrameState currentState) => currentState with
{

10
ErsatzTV.FFmpeg/Pipeline/AmfPipelineBuilder.cs

@ -44,8 +44,14 @@ public class AmfPipelineBuilder : SoftwarePipelineBuilder @@ -44,8 +44,14 @@ public class AmfPipelineBuilder : SoftwarePipelineBuilder
PipelineContext context,
ICollection<IPipelineStep> pipelineSteps)
{
bool canDecode = _hardwareCapabilities.CanDecode(videoStream.Codec, videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat, desiredState.PixelFormat);
bool canDecode = _hardwareCapabilities.CanDecode(
videoStream.Codec,
desiredState.VideoProfile,
videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(
desiredState.VideoFormat,
desiredState.VideoProfile,
desiredState.PixelFormat);
pipelineSteps.Add(new AmfHardwareAccelerationOption());

2
ErsatzTV.FFmpeg/Pipeline/IPipelineBuilderFactory.cs

@ -8,6 +8,8 @@ public interface IPipelineBuilderFactory @@ -8,6 +8,8 @@ public interface IPipelineBuilderFactory
Option<AudioInputFile> audioInputFile,
Option<WatermarkInputFile> watermarkInputFile,
Option<SubtitleInputFile> subtitleInputFile,
Option<string> vaapiDriver,
Option<string> vaapiDevice,
string reportsFolder,
string fontsFolder,
string ffmpegPath);

10
ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs

@ -48,8 +48,14 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder @@ -48,8 +48,14 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder
PipelineContext context,
ICollection<IPipelineStep> pipelineSteps)
{
bool canDecode = _hardwareCapabilities.CanDecode(videoStream.Codec, videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat, desiredState.PixelFormat);
bool canDecode = _hardwareCapabilities.CanDecode(
videoStream.Codec,
desiredState.VideoProfile,
videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(
desiredState.VideoFormat,
desiredState.VideoProfile,
desiredState.PixelFormat);
// mpeg2_cuvid seems to have issues when yadif_cuda is used, so just use software decoding
if (context.ShouldDeinterlace && videoStream.Codec == VideoFormat.Mpeg2Video)

131
ErsatzTV.FFmpeg/Pipeline/PipelineBuilderFactory.cs

@ -26,68 +26,79 @@ public class PipelineBuilderFactory : IPipelineBuilderFactory @@ -26,68 +26,79 @@ public class PipelineBuilderFactory : IPipelineBuilderFactory
Option<AudioInputFile> audioInputFile,
Option<WatermarkInputFile> watermarkInputFile,
Option<SubtitleInputFile> subtitleInputFile,
Option<string> vaapiDriver,
Option<string> vaapiDevice,
string reportsFolder,
string fontsFolder,
string ffmpegPath) => hardwareAccelerationMode switch
string ffmpegPath)
{
HardwareAccelerationMode.Nvenc => new NvidiaPipelineBuilder(
await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hardwareAccelerationMode),
IHardwareCapabilities capabilities = await _hardwareCapabilitiesFactory.GetHardwareCapabilities(
ffmpegPath,
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger),
HardwareAccelerationMode.Vaapi => new VaapiPipelineBuilder(
await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hardwareAccelerationMode),
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger),
HardwareAccelerationMode.Qsv => new QsvPipelineBuilder(
await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hardwareAccelerationMode),
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger),
HardwareAccelerationMode.VideoToolbox => new VideoToolboxPipelineBuilder(
await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hardwareAccelerationMode),
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger),
HardwareAccelerationMode.Amf => new AmfPipelineBuilder(
await _hardwareCapabilitiesFactory.GetHardwareCapabilities(ffmpegPath, hardwareAccelerationMode),
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger),
_ => new SoftwarePipelineBuilder(
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger)
};
vaapiDriver,
vaapiDevice);
return hardwareAccelerationMode switch
{
HardwareAccelerationMode.Nvenc => new NvidiaPipelineBuilder(
capabilities,
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger),
HardwareAccelerationMode.Vaapi => new VaapiPipelineBuilder(
capabilities,
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger),
HardwareAccelerationMode.Qsv => new QsvPipelineBuilder(
capabilities,
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger),
HardwareAccelerationMode.VideoToolbox => new VideoToolboxPipelineBuilder(
capabilities,
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger),
HardwareAccelerationMode.Amf => new AmfPipelineBuilder(
capabilities,
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger),
_ => new SoftwarePipelineBuilder(
hardwareAccelerationMode,
videoInputFile,
audioInputFile,
watermarkInputFile,
subtitleInputFile,
reportsFolder,
fontsFolder,
_logger)
};
}
}

10
ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs

@ -48,8 +48,14 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -48,8 +48,14 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
PipelineContext context,
ICollection<IPipelineStep> pipelineSteps)
{
bool canDecode = _hardwareCapabilities.CanDecode(videoStream.Codec, videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat, desiredState.PixelFormat);
bool canDecode = _hardwareCapabilities.CanDecode(
videoStream.Codec,
desiredState.VideoProfile,
videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(
desiredState.VideoFormat,
desiredState.VideoProfile,
desiredState.PixelFormat);
pipelineSteps.Add(new QsvHardwareAccelerationOption(ffmpegState.VaapiDevice));

37
ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs

@ -2,6 +2,7 @@ using ErsatzTV.FFmpeg.Capabilities; @@ -2,6 +2,7 @@ using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.FFmpeg.Decoder;
using ErsatzTV.FFmpeg.Encoder;
using ErsatzTV.FFmpeg.Encoder.Vaapi;
using ErsatzTV.FFmpeg.Environment;
using ErsatzTV.FFmpeg.Filter;
using ErsatzTV.FFmpeg.Filter.Vaapi;
using ErsatzTV.FFmpeg.Format;
@ -47,16 +48,27 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -47,16 +48,27 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
PipelineContext context,
ICollection<IPipelineStep> pipelineSteps)
{
bool canDecode = _hardwareCapabilities.CanDecode(videoStream.Codec, videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat, desiredState.PixelFormat);
bool canDecode = _hardwareCapabilities.CanDecode(
videoStream.Codec,
desiredState.VideoProfile,
videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(
desiredState.VideoFormat,
desiredState.VideoProfile,
desiredState.PixelFormat);
foreach (string vaapiDevice in ffmpegState.VaapiDevice)
{
pipelineSteps.Add(new VaapiHardwareAccelerationOption(vaapiDevice));
pipelineSteps.Add(new VaapiHardwareAccelerationOption(vaapiDevice, canDecode));
foreach (string driverName in ffmpegState.VaapiDriver)
{
pipelineSteps.Add(new LibvaDriverNameVariable(driverName));
}
}
// use software decoding with an extensive pipeline
if (context.HasSubtitleOverlay && context.HasWatermark)
if (context is { HasSubtitleOverlay: true, HasWatermark: true })
{
canDecode = false;
}
@ -136,11 +148,11 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -136,11 +148,11 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
currentState = SetScale(videoInputFile, videoStream, context, ffmpegState, desiredState, currentState);
// _logger.LogDebug("After scale: {PixelFormat}", currentState.PixelFormat);
currentState = SetPad(videoInputFile, videoStream, desiredState, currentState);
currentState = SetPad(videoInputFile, desiredState, currentState);
// _logger.LogDebug("After pad: {PixelFormat}", currentState.PixelFormat);
// need to upload for hardware overlay
bool forceSoftwareOverlay = context.HasSubtitleOverlay && context.HasWatermark;
bool forceSoftwareOverlay = context is { HasSubtitleOverlay: true, HasWatermark: true };
if (currentState.FrameDataLocation == FrameDataLocation.Software && context.HasSubtitleOverlay &&
!forceSoftwareOverlay)
@ -164,7 +176,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -164,7 +176,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
subtitleInputFile,
context,
forceSoftwareOverlay,
ffmpegState,
currentState,
desiredState,
fontsFolder,
@ -174,7 +185,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -174,7 +185,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
videoStream,
watermarkInputFile,
context,
ffmpegState,
desiredState,
currentState,
watermarkOverlayFilterSteps);
@ -202,9 +212,7 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -202,9 +212,7 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
videoStream,
desiredState.PixelFormat,
ffmpegState,
currentState,
context,
pipelineSteps);
currentState);
return new FilterChain(
videoInputFile.FilterSteps,
@ -219,9 +227,7 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -219,9 +227,7 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
VideoStream videoStream,
Option<IPixelFormat> desiredPixelFormat,
FFmpegState ffmpegState,
FrameState currentState,
PipelineContext context,
ICollection<IPipelineStep> pipelineSteps)
FrameState currentState)
{
var result = new List<IPipelineFilterStep>();
@ -298,7 +304,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -298,7 +304,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
VideoStream videoStream,
Option<WatermarkInputFile> watermarkInputFile,
PipelineContext context,
FFmpegState ffmpegState,
FrameState desiredState,
FrameState currentState,
List<IPipelineFilterStep> watermarkOverlayFilterSteps)
@ -367,7 +372,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -367,7 +372,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
Option<SubtitleInputFile> subtitleInputFile,
PipelineContext context,
bool forceSoftwareOverlay,
FFmpegState ffmpegState,
FrameState currentState,
FrameState desiredState,
string fontsFolder,
@ -449,7 +453,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -449,7 +453,6 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
private static FrameState SetPad(
VideoInputFile videoInputFile,
VideoStream videoStream,
FrameState desiredState,
FrameState currentState)
{

10
ErsatzTV.FFmpeg/Pipeline/VideoToolboxPipelineBuilder.cs

@ -45,8 +45,14 @@ public class VideoToolboxPipelineBuilder : SoftwarePipelineBuilder @@ -45,8 +45,14 @@ public class VideoToolboxPipelineBuilder : SoftwarePipelineBuilder
PipelineContext context,
ICollection<IPipelineStep> pipelineSteps)
{
bool canDecode = _hardwareCapabilities.CanDecode(videoStream.Codec, videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(desiredState.VideoFormat, desiredState.PixelFormat);
bool canDecode = _hardwareCapabilities.CanDecode(
videoStream.Codec,
desiredState.VideoProfile,
videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(
desiredState.VideoFormat,
desiredState.VideoProfile,
desiredState.PixelFormat);
pipelineSteps.Add(new VideoToolboxHardwareAccelerationOption());

812
ErsatzTV.FFmpeg/PipelineBuilder.cs

@ -1,812 +0,0 @@ @@ -1,812 +0,0 @@
using ErsatzTV.FFmpeg.Capabilities;
using ErsatzTV.FFmpeg.Decoder;
using ErsatzTV.FFmpeg.Encoder;
using ErsatzTV.FFmpeg.Environment;
using ErsatzTV.FFmpeg.Filter;
using ErsatzTV.FFmpeg.Filter.Cuda;
using ErsatzTV.FFmpeg.Format;
using ErsatzTV.FFmpeg.Option;
using ErsatzTV.FFmpeg.Option.HardwareAcceleration;
using ErsatzTV.FFmpeg.Option.Metadata;
using ErsatzTV.FFmpeg.OutputFormat;
using ErsatzTV.FFmpeg.Pipeline;
using ErsatzTV.FFmpeg.Protocol;
using ErsatzTV.FFmpeg.Runtime;
using ErsatzTV.FFmpeg.State;
using Microsoft.Extensions.Logging;
namespace ErsatzTV.FFmpeg;
public class PipelineBuilder : IPipelineBuilder
{
private readonly Option<AudioInputFile> _audioInputFile;
private readonly string _fontsFolder;
private readonly IRuntimeInfo _runtimeInfo;
private readonly IHardwareCapabilities _hardwareCapabilities;
private readonly ILogger _logger;
private readonly List<IPipelineStep> _pipelineSteps;
private readonly string _reportsFolder;
private readonly Option<SubtitleInputFile> _subtitleInputFile;
private readonly Option<VideoInputFile> _videoInputFile;
private readonly Option<WatermarkInputFile> _watermarkInputFile;
public PipelineBuilder(
IRuntimeInfo runtimeInfo,
IHardwareCapabilities hardwareCapabilities,
Option<VideoInputFile> videoInputFile,
Option<AudioInputFile> audioInputFile,
Option<WatermarkInputFile> watermarkInputFile,
Option<SubtitleInputFile> subtitleInputFile,
string reportsFolder,
string fontsFolder,
ILogger logger)
{
_pipelineSteps = new List<IPipelineStep>
{
new NoStandardInputOption(),
new HideBannerOption(),
new NoStatsOption(),
new LoglevelErrorOption(),
new StandardFormatFlags(),
new NoDemuxDecodeDelayOutputOption(),
new FastStartOutputOption(),
new ClosedGopOutputOption()
};
_runtimeInfo = runtimeInfo;
_hardwareCapabilities = hardwareCapabilities;
_videoInputFile = videoInputFile;
_audioInputFile = audioInputFile;
_watermarkInputFile = watermarkInputFile;
_subtitleInputFile = subtitleInputFile;
_reportsFolder = reportsFolder;
_fontsFolder = fontsFolder;
_logger = logger;
}
public FFmpegPipeline Resize(string outputFile, FrameSize scaledSize)
{
_pipelineSteps.Clear();
_pipelineSteps.Add(new NoStandardInputOption());
_pipelineSteps.Add(new HideBannerOption());
_pipelineSteps.Add(new NoStatsOption());
_pipelineSteps.Add(new LoglevelErrorOption());
IPipelineFilterStep scaleStep = new ScaleImageFilter(scaledSize);
_videoInputFile.Iter(f => f.FilterSteps.Add(scaleStep));
_pipelineSteps.Add(new VideoFilter(new[] { scaleStep }));
_pipelineSteps.Add(scaleStep);
_pipelineSteps.Add(new FileNameOutputOption(outputFile));
return new FFmpegPipeline(_pipelineSteps);
}
public FFmpegPipeline Concat(ConcatInputFile concatInputFile, FFmpegState ffmpegState)
{
concatInputFile.AddOption(new ConcatInputFormat());
concatInputFile.AddOption(new RealtimeInputOption());
concatInputFile.AddOption(new InfiniteLoopInputOption(HardwareAccelerationMode.None));
foreach (int threadCount in ffmpegState.ThreadCount)
{
_pipelineSteps.Insert(0, new ThreadCountOption(threadCount));
}
_pipelineSteps.Add(new NoSceneDetectOutputOption(0));
_pipelineSteps.Add(new EncoderCopyAll());
if (ffmpegState.DoNotMapMetadata)
{
_pipelineSteps.Add(new DoNotMapMetadataOutputOption());
}
foreach (string desiredServiceProvider in ffmpegState.MetadataServiceProvider)
{
_pipelineSteps.Add(new MetadataServiceProviderOutputOption(desiredServiceProvider));
}
foreach (string desiredServiceName in ffmpegState.MetadataServiceName)
{
_pipelineSteps.Add(new MetadataServiceNameOutputOption(desiredServiceName));
}
_pipelineSteps.Add(new OutputFormatMpegTs());
_pipelineSteps.Add(new PipeProtocol());
if (ffmpegState.SaveReport)
{
_pipelineSteps.Add(new FFReportVariable(_reportsFolder, concatInputFile));
}
return new FFmpegPipeline(_pipelineSteps);
}
public virtual FFmpegPipeline Build(FFmpegState ffmpegState, FrameState desiredState)
{
Option<IPixelFormat> originalDesiredPixelFormat = desiredState.PixelFormat;
bool is10BitOutput = desiredState.PixelFormat.Map(pf => pf.BitDepth).IfNone(8) == 10;
if (ffmpegState.Start.Exists(s => s > TimeSpan.Zero) && desiredState.Realtime)
{
_logger.LogInformation(
"Forcing {Threads} ffmpeg thread due to buggy combination of stream seek and realtime output",
1);
_pipelineSteps.Insert(0, new ThreadCountOption(1));
}
else
{
foreach (int threadCount in ffmpegState.ThreadCount)
{
_pipelineSteps.Insert(0, new ThreadCountOption(threadCount));
}
}
var allVideoStreams = _videoInputFile.SelectMany(f => f.VideoStreams).ToList();
// -sc_threshold 0 is unsupported with mpeg2video
_pipelineSteps.Add(
allVideoStreams.All(s => s.Codec != VideoFormat.Mpeg2Video) &&
desiredState.VideoFormat != VideoFormat.Mpeg2Video
? new NoSceneDetectOutputOption(0)
: new NoSceneDetectOutputOption(1_000_000_000));
if (ffmpegState.SaveReport)
{
_pipelineSteps.Add(new FFReportVariable(_reportsFolder, None));
}
foreach (TimeSpan desiredStart in ffmpegState.Start.Filter(s => s > TimeSpan.Zero))
{
var option = new StreamSeekInputOption(desiredStart);
_audioInputFile.Iter(f => f.AddOption(option));
_videoInputFile.Iter(f => f.AddOption(option));
// need to seek text subtitle files
if (_subtitleInputFile.Map(s => !s.IsImageBased).IfNone(false))
{
_pipelineSteps.Add(new StreamSeekFilterOption(desiredStart));
}
}
foreach (TimeSpan desiredFinish in ffmpegState.Finish)
{
_pipelineSteps.Add(new TimeLimitOutputOption(desiredFinish));
}
foreach (VideoStream videoStream in allVideoStreams)
{
bool hasOverlay = _watermarkInputFile.IsSome ||
_subtitleInputFile.Map(s => s.IsImageBased && !s.Copy).IfNone(false);
Option<int> initialFrameRate = Option<int>.None;
foreach (string frameRateString in videoStream.FrameRate)
{
if (int.TryParse(frameRateString, out int parsedFrameRate))
{
initialFrameRate = parsedFrameRate;
}
}
var currentState = new FrameState(
false, // realtime
false, // infinite loop
videoStream.Codec,
videoStream.PixelFormat,
videoStream.FrameSize,
videoStream.FrameSize,
videoStream.IsAnamorphic,
initialFrameRate,
Option<int>.None,
Option<int>.None,
Option<int>.None,
false); // deinterlace
IEncoder encoder;
if (IsDesiredVideoState(currentState, desiredState))
{
encoder = new EncoderCopyVideo();
_pipelineSteps.Add(encoder);
}
else
{
Option<IPipelineStep> maybeAccel = AvailableHardwareAccelerationOptions.ForMode(
ffmpegState.EncoderHardwareAccelerationMode,
ffmpegState.VaapiDevice,
_logger);
if (maybeAccel.IsNone)
{
ffmpegState = ffmpegState with
{
// disable hw accel if we don't match anything
DecoderHardwareAccelerationMode = HardwareAccelerationMode.None,
EncoderHardwareAccelerationMode = HardwareAccelerationMode.None
};
}
foreach (IPipelineStep accel in maybeAccel)
{
bool canDecode = _hardwareCapabilities.CanDecode(currentState.VideoFormat, videoStream.PixelFormat);
bool canEncode = _hardwareCapabilities.CanEncode(
desiredState.VideoFormat,
desiredState.PixelFormat);
// disable hw accel if decoder/encoder isn't supported
if (!canDecode || !canEncode)
{
ffmpegState = ffmpegState with
{
DecoderHardwareAccelerationMode = canDecode
? ffmpegState.DecoderHardwareAccelerationMode
: HardwareAccelerationMode.None,
EncoderHardwareAccelerationMode = canEncode
? ffmpegState.EncoderHardwareAccelerationMode
: HardwareAccelerationMode.None
};
}
if (canDecode || canEncode)
{
currentState = accel.NextState(currentState);
_pipelineSteps.Add(accel);
}
}
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Nvenc && hasOverlay &&
is10BitOutput)
{
IPixelFormat pixelFormat = desiredState.PixelFormat.IfNone(new PixelFormatYuv420P10Le());
desiredState = desiredState with { PixelFormat = new PixelFormatNv12(pixelFormat.Name) };
}
//
// // qsv should stay nv12
// if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Qsv && hasOverlay)
// {
// IPixelFormat pixelFormat = desiredState.PixelFormat.IfNone(new PixelFormatYuv420P());
// desiredState = desiredState with { PixelFormat = new PixelFormatNv12(pixelFormat.Name) };
// }
foreach (string desiredVaapiDriver in ffmpegState.VaapiDriver)
{
IPipelineStep step = new LibvaDriverNameVariable(desiredVaapiDriver);
currentState = step.NextState(currentState);
_pipelineSteps.Add(step);
}
foreach (IDecoder decoder in AvailableDecoders.ForVideoFormat(
_hardwareCapabilities,
ffmpegState,
currentState,
desiredState,
_watermarkInputFile,
_subtitleInputFile,
_logger))
{
foreach (VideoInputFile videoInputFile in _videoInputFile)
{
videoInputFile.AddOption(decoder);
currentState = decoder.NextState(currentState);
}
}
}
if (_subtitleInputFile.Map(s => s.Copy) == Some(true))
{
_pipelineSteps.Add(new EncoderCopySubtitle());
}
if (videoStream.StillImage)
{
var option = new InfiniteLoopInputOption(ffmpegState.EncoderHardwareAccelerationMode);
_videoInputFile.Iter(f => f.AddOption(option));
}
if (!IsDesiredVideoState(currentState, desiredState))
{
if (desiredState.Realtime)
{
var option = new RealtimeInputOption();
_audioInputFile.Iter(f => f.AddOption(option));
_videoInputFile.Iter(f => f.AddOption(option));
}
if (desiredState.InfiniteLoop)
{
var option = new InfiniteLoopInputOption(ffmpegState.EncoderHardwareAccelerationMode);
_audioInputFile.Iter(f => f.AddOption(option));
_videoInputFile.Iter(f => f.AddOption(option));
}
foreach (int desiredFrameRate in desiredState.FrameRate)
{
if (currentState.FrameRate != desiredFrameRate)
{
IPipelineStep step = new FrameRateOutputOption(desiredFrameRate);
currentState = step.NextState(currentState);
_pipelineSteps.Add(step);
}
}
foreach (int desiredTimeScale in desiredState.VideoTrackTimeScale)
{
if (currentState.VideoTrackTimeScale != desiredTimeScale)
{
IPipelineStep step = new VideoTrackTimescaleOutputOption(desiredTimeScale);
currentState = step.NextState(currentState);
_pipelineSteps.Add(step);
}
}
foreach (int desiredBitrate in desiredState.VideoBitrate)
{
if (currentState.VideoBitrate != desiredBitrate)
{
IPipelineStep step = new VideoBitrateOutputOption(desiredBitrate);
currentState = step.NextState(currentState);
_pipelineSteps.Add(step);
}
}
foreach (int desiredBufferSize in desiredState.VideoBufferSize)
{
if (currentState.VideoBufferSize != desiredBufferSize)
{
IPipelineStep step = new VideoBufferSizeOutputOption(desiredBufferSize);
currentState = step.NextState(currentState);
_pipelineSteps.Add(step);
}
}
if (desiredState.Deinterlaced && !currentState.Deinterlaced)
{
IPipelineFilterStep step = AvailableDeinterlaceFilters.ForAcceleration(
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState,
_watermarkInputFile,
_subtitleInputFile);
currentState = step.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(step));
}
// TODO: this is a software-only flow, will need to be different for hardware accel
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None)
{
if (currentState.ScaledSize != desiredState.ScaledSize ||
currentState.PaddedSize != desiredState.PaddedSize)
{
IPipelineFilterStep scaleStep = new ScaleFilter(
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize,
videoStream.IsAnamorphicEdgeCase);
currentState = scaleStep.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(scaleStep));
// TODO: padding might not be needed, can we optimize this out?
IPipelineFilterStep padStep = new PadFilter(currentState, desiredState.PaddedSize);
currentState = padStep.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(padStep));
}
}
else if (currentState.ScaledSize != desiredState.ScaledSize)
{
IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
_runtimeInfo,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize,
ffmpegState.QsvExtraHardwareFrames,
videoStream.IsAnamorphicEdgeCase,
videoStream.SampleAspectRatio);
currentState = scaleFilter.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter));
// TODO: padding might not be needed, can we optimize this out?
if (currentState.PaddedSize != desiredState.PaddedSize)
{
IPipelineFilterStep padStep = new PadFilter(currentState, desiredState.PaddedSize);
currentState = padStep.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(padStep));
}
}
else if (currentState.PaddedSize != desiredState.PaddedSize)
{
IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
_runtimeInfo,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize,
ffmpegState.QsvExtraHardwareFrames,
videoStream.IsAnamorphicEdgeCase,
videoStream.SampleAspectRatio);
currentState = scaleFilter.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter));
if (currentState.PaddedSize != desiredState.PaddedSize)
{
IPipelineFilterStep padStep = new PadFilter(currentState, desiredState.PaddedSize);
currentState = padStep.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(padStep));
}
}
if (hasOverlay && currentState.PixelFormat.Map(pf => pf.FFmpegName) !=
desiredState.PixelFormat.Map(pf => pf.FFmpegName))
{
// this should only happen with nvenc?
// use scale filter to fix pixel format
foreach (IPixelFormat pixelFormat in desiredState.PixelFormat)
{
if (currentState.FrameDataLocation == FrameDataLocation.Software)
{
IPipelineFilterStep formatFilter = new PixelFormatFilter(pixelFormat);
currentState = formatFilter.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(formatFilter));
switch (ffmpegState.EncoderHardwareAccelerationMode)
{
case HardwareAccelerationMode.Nvenc:
var uploadFilter = new HardwareUploadFilter(ffmpegState);
currentState = uploadFilter.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(uploadFilter));
break;
}
}
else
{
if (ffmpegState.EncoderHardwareAccelerationMode != HardwareAccelerationMode.Qsv)
{
// the filter re-applies the current pixel format, so we have to set it first
currentState = currentState with { PixelFormat = desiredState.PixelFormat };
IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
_runtimeInfo,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize,
ffmpegState.QsvExtraHardwareFrames,
videoStream.IsAnamorphicEdgeCase,
videoStream.SampleAspectRatio);
currentState = scaleFilter.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter));
}
}
}
}
// nvenc custom logic
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Nvenc)
{
foreach (VideoInputFile videoInputFile in _videoInputFile)
{
// if we only deinterlace, we need to set pixel format again (using scale_cuda)
bool onlyYadif = videoInputFile.FilterSteps.Count == 1 &&
videoInputFile.FilterSteps.Any(fs => fs is YadifCudaFilter);
// if we have no filters and an overlay, we need to set pixel format
bool unfilteredWithOverlay = videoInputFile.FilterSteps.Count == 0 && hasOverlay;
if (onlyYadif || unfilteredWithOverlay)
{
// the filter re-applies the current pixel format, so we have to set it first
currentState = currentState with { PixelFormat = desiredState.PixelFormat };
IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
_runtimeInfo,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize,
ffmpegState.QsvExtraHardwareFrames,
videoStream.IsAnamorphicEdgeCase,
videoStream.SampleAspectRatio);
currentState = scaleFilter.NextState(currentState);
videoInputFile.FilterSteps.Add(scaleFilter);
}
}
}
if (ffmpegState.PtsOffset > 0)
{
foreach (int videoTrackTimeScale in desiredState.VideoTrackTimeScale)
{
IPipelineStep step = new OutputTsOffsetOption(
ffmpegState.PtsOffset,
videoTrackTimeScale);
currentState = step.NextState(currentState);
_pipelineSteps.Add(step);
}
}
}
// TODO: if all video filters are software, use software pixel format for hwaccel output
// might be able to skip scale_cuda=format=whatever,hwdownload,format=whatever
if (_audioInputFile.IsNone)
{
// always need to specify audio codec so ffmpeg doesn't default to a codec we don't want
foreach (IEncoder step in AvailableEncoders.ForAudioFormat(AudioState.Copy, _logger))
{
currentState = step.NextState(currentState);
_pipelineSteps.Add(step);
}
}
foreach (AudioInputFile audioInputFile in _audioInputFile)
{
// always need to specify audio codec so ffmpeg doesn't default to a codec we don't want
foreach (IEncoder step in AvailableEncoders.ForAudioFormat(audioInputFile.DesiredState, _logger))
{
currentState = step.NextState(currentState);
_pipelineSteps.Add(step);
}
foreach (AudioStream audioStream in audioInputFile.AudioStreams.HeadOrNone())
{
foreach (int desiredAudioChannels in audioInputFile.DesiredState.AudioChannels)
{
_pipelineSteps.Add(
new AudioChannelsOutputOption(
audioInputFile.DesiredState.AudioFormat,
audioStream.Channels,
desiredAudioChannels));
}
}
foreach (int desiredBitrate in audioInputFile.DesiredState.AudioBitrate)
{
_pipelineSteps.Add(new AudioBitrateOutputOption(desiredBitrate));
}
foreach (int desiredBufferSize in audioInputFile.DesiredState.AudioBufferSize)
{
_pipelineSteps.Add(new AudioBufferSizeOutputOption(desiredBufferSize));
}
foreach (int desiredSampleRate in audioInputFile.DesiredState.AudioSampleRate)
{
_pipelineSteps.Add(new AudioSampleRateOutputOption(desiredSampleRate));
}
if (audioInputFile.DesiredState.NormalizeLoudness)
{
_audioInputFile.Iter(f => f.FilterSteps.Add(new NormalizeLoudnessFilter()));
}
foreach (TimeSpan desiredDuration in audioInputFile.DesiredState.AudioDuration)
{
_audioInputFile.Iter(f => f.FilterSteps.Add(new AudioPadFilter(desiredDuration)));
}
}
foreach (SubtitleInputFile subtitleInputFile in _subtitleInputFile)
{
if (subtitleInputFile.IsImageBased)
{
// vaapi and videotoolbox use a software overlay, so we need to ensure the background is already in software
// though videotoolbox uses software decoders, so no need to download for that
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Vaapi)
{
var downloadFilter = new HardwareDownloadFilter(currentState);
currentState = downloadFilter.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(downloadFilter));
}
var pixelFormatFilter = new SubtitlePixelFormatFilter(ffmpegState, is10BitOutput);
subtitleInputFile.FilterSteps.Add(pixelFormatFilter);
subtitleInputFile.FilterSteps.Add(new SubtitleHardwareUploadFilter(currentState, ffmpegState));
FrameState fakeState = currentState;
foreach (string format in pixelFormatFilter.MaybeFormat)
{
fakeState = fakeState with
{
PixelFormat = AvailablePixelFormats.ForPixelFormat(format, _logger)
};
}
// hacky check for actual scaling or padding
if (_videoInputFile.Exists(
v => v.FilterSteps.Any(s => s.Filter.Contains(currentState.PaddedSize.Height.ToString()))))
{
// enable scaling the subtitle stream
fakeState = fakeState with { ScaledSize = new FrameSize(1, 1) };
}
IPipelineFilterStep scaleFilter = AvailableSubtitleScaleFilters.ForAcceleration(
ffmpegState.EncoderHardwareAccelerationMode,
fakeState,
desiredState.ScaledSize,
desiredState.PaddedSize,
ffmpegState.QsvExtraHardwareFrames);
subtitleInputFile.FilterSteps.Add(scaleFilter);
}
else
{
_videoInputFile.Iter(f => f.AddOption(new CopyTimestampInputOption()));
// text-based subtitles are always added in software, so always try to download the background
// nvidia needs some extra format help if the only filter will be the download filter
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Nvenc &&
currentState.FrameDataLocation == FrameDataLocation.Hardware &&
_videoInputFile.Map(f => f.FilterSteps.Count).IfNone(1) == 0)
{
IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
_runtimeInfo,
ffmpegState.EncoderHardwareAccelerationMode,
currentState,
desiredState.ScaledSize,
desiredState.PaddedSize,
ffmpegState.QsvExtraHardwareFrames,
videoStream.IsAnamorphicEdgeCase,
videoStream.SampleAspectRatio);
currentState = scaleFilter.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter));
}
var downloadFilter = new HardwareDownloadFilter(currentState);
currentState = downloadFilter.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(downloadFilter));
}
}
foreach (WatermarkInputFile watermarkInputFile in _watermarkInputFile)
{
// vaapi and videotoolbox use a software overlay, so we need to ensure the background is already in software
// though videotoolbox uses software decoders, so no need to download for that
if (ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.Vaapi)
{
var downloadFilter = new HardwareDownloadFilter(currentState);
currentState = downloadFilter.NextState(currentState);
_videoInputFile.Iter(f => f.FilterSteps.Add(downloadFilter));
}
watermarkInputFile.FilterSteps.Add(
new WatermarkPixelFormatFilter(ffmpegState, watermarkInputFile.DesiredState, is10BitOutput));
foreach (VideoStream watermarkStream in watermarkInputFile.VideoStreams)
{
if (watermarkStream.StillImage == false)
{
watermarkInputFile.AddOption(new DoNotIgnoreLoopInputOption());
}
else if (watermarkInputFile.DesiredState.MaybeFadePoints.Map(fp => fp.Count > 0).IfNone(false))
{
// looping is required to fade a static image in and out
watermarkInputFile.AddOption(
new InfiniteLoopInputOption(ffmpegState.EncoderHardwareAccelerationMode));
}
}
if (watermarkInputFile.DesiredState.Size == WatermarkSize.Scaled)
{
watermarkInputFile.FilterSteps.Add(
new WatermarkScaleFilter(watermarkInputFile.DesiredState, currentState.PaddedSize));
}
if (watermarkInputFile.DesiredState.Opacity != 100)
{
watermarkInputFile.FilterSteps.Add(new WatermarkOpacityFilter(watermarkInputFile.DesiredState));
}
foreach (List<WatermarkFadePoint> fadePoints in watermarkInputFile.DesiredState.MaybeFadePoints)
{
watermarkInputFile.FilterSteps.AddRange(fadePoints.Map(fp => new WatermarkFadeFilter(fp)));
}
watermarkInputFile.FilterSteps.Add(new WatermarkHardwareUploadFilter(currentState, ffmpegState));
}
// after everything else is done, apply the encoder
if (_pipelineSteps.OfType<IEncoder>().All(e => e.Kind != StreamKind.Video))
{
foreach (IEncoder e in AvailableEncoders.ForVideoFormat(
_hardwareCapabilities,
ffmpegState,
currentState,
desiredState,
_watermarkInputFile,
_subtitleInputFile,
_logger))
{
encoder = e;
_pipelineSteps.Add(encoder);
_videoInputFile.Iter(f => f.FilterSteps.Add(encoder));
currentState = encoder.NextState(currentState);
}
}
if (ffmpegState.DoNotMapMetadata)
{
_pipelineSteps.Add(new DoNotMapMetadataOutputOption());
}
foreach (string desiredServiceProvider in ffmpegState.MetadataServiceProvider)
{
_pipelineSteps.Add(new MetadataServiceProviderOutputOption(desiredServiceProvider));
}
foreach (string desiredServiceName in ffmpegState.MetadataServiceName)
{
_pipelineSteps.Add(new MetadataServiceNameOutputOption(desiredServiceName));
}
foreach (string desiredAudioLanguage in ffmpegState.MetadataAudioLanguage)
{
_pipelineSteps.Add(new MetadataAudioLanguageOutputOption(desiredAudioLanguage));
}
switch (ffmpegState.OutputFormat)
{
case OutputFormatKind.MpegTs:
_pipelineSteps.Add(new OutputFormatMpegTs());
_pipelineSteps.Add(new PipeProtocol());
// currentState = currentState with { OutputFormat = OutputFormatKind.MpegTs };
break;
case OutputFormatKind.Hls:
foreach (string playlistPath in ffmpegState.HlsPlaylistPath)
{
foreach (string segmentTemplate in ffmpegState.HlsSegmentTemplate)
{
var step = new OutputFormatHls(
desiredState,
videoStream.FrameRate,
segmentTemplate,
playlistPath);
currentState = step.NextState(currentState);
_pipelineSteps.Add(step);
}
}
break;
}
var complexFilter = new ComplexFilter(
currentState,
ffmpegState,
_videoInputFile,
_audioInputFile,
_watermarkInputFile,
_subtitleInputFile,
originalDesiredPixelFormat,
currentState.PaddedSize,
_fontsFolder,
_logger);
_pipelineSteps.Add(complexFilter);
}
return new FFmpegPipeline(_pipelineSteps);
}
private static bool IsDesiredVideoState(FrameState currentState, FrameState desiredState)
{
if (desiredState.VideoFormat == VideoFormat.Copy)
{
return true;
}
return currentState.VideoFormat == desiredState.VideoFormat &&
currentState.PixelFormat.Match(pf => pf.Name, () => string.Empty) ==
desiredState.PixelFormat.Match(pf => pf.Name, string.Empty) &&
(desiredState.VideoBitrate.IsNone || currentState.VideoBitrate == desiredState.VideoBitrate) &&
(desiredState.VideoBufferSize.IsNone || currentState.VideoBufferSize == desiredState.VideoBufferSize) &&
currentState.Realtime == desiredState.Realtime &&
(desiredState.VideoTrackTimeScale.IsNone ||
currentState.VideoTrackTimeScale == desiredState.VideoTrackTimeScale) &&
currentState.ScaledSize == desiredState.ScaledSize &&
currentState.PaddedSize == desiredState.PaddedSize &&
(desiredState.FrameRate.IsNone || currentState.FrameRate == desiredState.FrameRate);
}
}

11
ErsatzTV.Infrastructure.Tests/ErsatzTV.Infrastructure.Tests.csproj

@ -10,15 +10,18 @@ @@ -10,15 +10,18 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.8.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.3.2" />
<PackageReference Include="Moq" Version="4.18.2" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.4.0" />
<PackageReference Include="Moq" Version="4.18.3" />
<PackageReference Include="NUnit" Version="3.13.3" />
<PackageReference Include="NUnit3TestAdapter" Version="4.2.1" />
<PackageReference Include="NUnit3TestAdapter" Version="4.3.1" />
<PackageReference Include="NUnit.Analyzers" Version="3.5.0">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="3.1.2" />
<PackageReference Include="coverlet.collector" Version="3.2.0">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>

6
ErsatzTV.Infrastructure/ErsatzTV.Infrastructure.csproj

@ -11,7 +11,7 @@ @@ -11,7 +11,7 @@
<PackageReference Include="Blurhash.ImageSharp" Version="3.0.0" />
<PackageReference Include="CliWrap" Version="3.5.0" />
<PackageReference Include="Dapper" Version="2.0.123" />
<PackageReference Include="Jint" Version="3.0.0-beta-2042" />
<PackageReference Include="Jint" Version="3.0.0-beta-2044" />
<PackageReference Include="Lucene.Net" Version="4.8.0-beta00016" />
<PackageReference Include="Lucene.Net.Analysis.Common" Version="4.8.0-beta00016" />
<PackageReference Include="Lucene.Net.QueryParser" Version="4.8.0-beta00016" />
@ -21,14 +21,14 @@ @@ -21,14 +21,14 @@
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="6.0.10" />
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.3.48">
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.4.27">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Refit" Version="6.3.2" />
<PackageReference Include="Refit.Newtonsoft.Json" Version="6.3.2" />
<PackageReference Include="Refit.Xml" Version="6.3.2" />
<PackageReference Include="Scriban.Signed" Version="5.5.0" />
<PackageReference Include="Scriban.Signed" Version="5.5.1" />
</ItemGroup>
<ItemGroup>

1
ErsatzTV.sln.DotSettings

@ -21,6 +21,7 @@ @@ -21,6 +21,7 @@
<s:Boolean x:Key="/Default/UserDictionary/Words/=discardcorrupt/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=drawtext/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Emby/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Entrypoints/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=episodedetails/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=ersatztv/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=etvignore/@EntryIndexedValue">True</s:Boolean>

14
ErsatzTV/ErsatzTV.csproj

@ -53,29 +53,29 @@ @@ -53,29 +53,29 @@
<ItemGroup>
<PackageReference Include="Bugsnag.AspNet.Core" Version="3.1.0" />
<PackageReference Include="FluentValidation" Version="11.2.2" />
<PackageReference Include="FluentValidation" Version="11.4.0" />
<PackageReference Include="FluentValidation.AspNetCore" Version="11.2.2" />
<PackageReference Include="HtmlSanitizer" Version="8.0.601" />
<PackageReference Include="LanguageExt.Core" Version="4.2.9" />
<PackageReference Include="LanguageExt.Core" Version="4.4.0" />
<PackageReference Include="Markdig" Version="0.30.4" />
<PackageReference Include="MediatR.Courier.DependencyInjection" Version="5.0.0" />
<PackageReference Include="MediatR.Extensions.Microsoft.DependencyInjection" Version="11.0.0" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="6.0.10" />
<PackageReference Include="Microsoft.AspNetCore.SpaServices.Extensions" Version="6.0.10" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="6.0.11" />
<PackageReference Include="Microsoft.AspNetCore.SpaServices.Extensions" Version="6.0.11" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="6.0.10">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.3.48">
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.4.27">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="MudBlazor" Version="6.1.3-dev.1" />
<PackageReference Include="MudBlazor" Version="6.1.4" />
<PackageReference Include="NaturalSort.Extension" Version="4.0.0" />
<PackageReference Include="PPioli.FluentValidation.Blazor" Version="11.1.0" />
<PackageReference Include="Refit.HttpClientFactory" Version="6.3.2" />
<PackageReference Include="Serilog" Version="2.12.0" />
<PackageReference Include="Serilog.AspNetCore" Version="6.0.1" />
<PackageReference Include="Serilog.AspNetCore" Version="6.1.0" />
<PackageReference Include="Serilog.Settings.Configuration" Version="3.4.0" />
<PackageReference Include="Serilog.Sinks.SQLite" Version="6.0.1" />
<PackageReference Include="System.IO.FileSystem.Primitives" Version="4.3.0" />

Loading…
Cancel
Save