Browse Source

fix ten bit source and destination with segmenter v2 (#1633)

* fix software and vaapi 10 bit content

* fix nvidia 10 bit content

* some qsv improvements
pull/1636/head
Jason Dove 1 year ago committed by GitHub
parent
commit
70c4036dc9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 12
      ErsatzTV.Core/FFmpeg/FFmpegLibraryProcessService.cs
  2. 7
      ErsatzTV.Core/FFmpeg/SongVideoGenerator.cs
  3. 1
      ErsatzTV.FFmpeg/Capabilities/FFmpegCapabilities.cs
  4. 8
      ErsatzTV.FFmpeg/Decoder/DecoderRawVideo.cs
  5. 9
      ErsatzTV.FFmpeg/Decoder/DecoderVaapi.cs
  6. 28
      ErsatzTV.FFmpeg/FFmpegState.cs
  7. 8
      ErsatzTV.FFmpeg/Format/PixelFormatVaapi.cs
  8. 14
      ErsatzTV.FFmpeg/OutputFormat/OutputFormatNut.cs
  9. 5
      ErsatzTV.FFmpeg/OutputOption/PixelFormatOutputOption.cs
  10. 1
      ErsatzTV.FFmpeg/Pipeline/IPipelineBuilder.cs
  11. 23
      ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs
  12. 69
      ErsatzTV.FFmpeg/Pipeline/PipelineBuilderBase.cs
  13. 14
      ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs
  14. 45
      ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs
  15. 61
      ErsatzTV.Scanner.Tests/Core/FFmpeg/TranscodingTests.cs

12
ErsatzTV.Core/FFmpeg/FFmpegLibraryProcessService.cs

@ -615,16 +615,15 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -615,16 +615,15 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
_ => AudioFilter.None
});
IPixelFormat pixelFormat = channel.FFmpegProfile.VideoBitrate switch
IPixelFormat pixelFormat = channel.FFmpegProfile.BitDepth switch
{
8 => new PixelFormatYuv420P(), // PixelFormatNv12(PixelFormat.YUV420P),
10 => new PixelFormatYuv420P10Le(), // TODO: does 10 bit work?
_ => new PixelFormatUnknown(channel.FFmpegProfile.VideoBitrate)
FFmpegProfileBitDepth.TenBit => new PixelFormatYuv420P10Le(),
_ => new PixelFormatYuv420P()
};
var ffmpegVideoStream = new VideoStream(
Index: 0,
Codec: string.Empty,
Codec: VideoFormat.Raw,
Some(pixelFormat),
ColorParams.Default,
resolution,
@ -712,6 +711,9 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService @@ -712,6 +711,9 @@ public class FFmpegLibraryProcessService : IFFmpegProcessService
ffmpegPath);
FFmpegPipeline pipeline = pipelineBuilder.Build(ffmpegState, desiredState);
// copy video input options to concat input
concatInputFile.InputOptions.AddRange(videoInputFile.InputOptions);
return GetCommand(ffmpegPath, None, None, None, concatInputFile, pipeline);
}

7
ErsatzTV.Core/FFmpeg/SongVideoGenerator.cs

@ -40,14 +40,11 @@ public class SongVideoGenerator : ISongVideoGenerator @@ -40,14 +40,11 @@ public class SongVideoGenerator : ISongVideoGenerator
MediaVersion videoVersion = new FallbackMediaVersion
{
Id = -1,
Chapters = new List<MediaChapter>(),
Chapters = [],
Width = 192,
Height = 108,
SampleAspectRatio = "1:1",
Streams = new List<MediaStream>
{
new() { MediaStreamKind = MediaStreamKind.Video, Index = 0, PixelFormat = "yuv420p" }
}
Streams = [new MediaStream { MediaStreamKind = MediaStreamKind.Video, Index = 0, PixelFormat = "yuv420p" }]
};
string[] backgrounds =

1
ErsatzTV.FFmpeg/Capabilities/FFmpegCapabilities.cs

@ -73,6 +73,7 @@ public class FFmpegCapabilities : IFFmpegCapabilities @@ -73,6 +73,7 @@ public class FFmpegCapabilities : IFFmpegCapabilities
VideoFormat.Vp9 => new DecoderVp9(),
VideoFormat.Av1 => new DecoderAv1(_ffmpegDecoders),
VideoFormat.Raw => new DecoderRawVideo(),
VideoFormat.Undetermined => new DecoderImplicit(),
VideoFormat.Copy => new DecoderImplicit(),
VideoFormat.GeneratedImage => new DecoderImplicit(),

8
ErsatzTV.FFmpeg/Decoder/DecoderRawVideo.cs

@ -0,0 +1,8 @@ @@ -0,0 +1,8 @@
namespace ErsatzTV.FFmpeg.Decoder;
public class DecoderRawVideo : DecoderBase
{
public override string Name => "rawvideo";
protected override FrameDataLocation OutputFrameDataLocation => FrameDataLocation.Software;
}

9
ErsatzTV.FFmpeg/Decoder/DecoderVaapi.cs

@ -16,12 +16,9 @@ public class DecoderVaapi : DecoderBase @@ -16,12 +16,9 @@ public class DecoderVaapi : DecoderBase
FrameState nextState = base.NextState(currentState);
return currentState.PixelFormat.Match(
pixelFormat =>
{
return pixelFormat.BitDepth == 8
? nextState with { PixelFormat = new PixelFormatNv12(pixelFormat.Name) }
: nextState with { PixelFormat = new PixelFormatVaapi(pixelFormat.Name) };
},
pixelFormat => pixelFormat.BitDepth == 8
? nextState with { PixelFormat = new PixelFormatNv12(pixelFormat.Name) }
: nextState with { PixelFormat = new PixelFormatVaapi(pixelFormat.Name, 10) },
() => nextState);
}
}

28
ErsatzTV.FFmpeg/FFmpegState.cs

@ -46,32 +46,4 @@ public record FFmpegState( @@ -46,32 +46,4 @@ public record FFmpegState(
0,
Option<int>.None,
Option<int>.None);
public static FFmpegState ConcatSegmenter(
bool saveReport,
string channelName,
Option<string> vaapiDriver,
Option<string> vaapiDevice,
Option<string> hlsPlaylistPath,
Option<string> hlsSegmentTemplate) =>
new(
saveReport,
HardwareAccelerationMode.None,
HardwareAccelerationMode.Vaapi,
vaapiDriver,
vaapiDevice,
Option<TimeSpan>.None,
Option<TimeSpan>.None,
true, // do not map metadata
"ErsatzTV",
channelName,
Option<string>.None,
Option<string>.None,
Option<string>.None,
OutputFormatKind.Hls,
hlsPlaylistPath,
hlsSegmentTemplate,
0,
Option<int>.None,
Option<int>.None);
}

8
ErsatzTV.FFmpeg/Format/PixelFormatVaapi.cs

@ -2,11 +2,15 @@ @@ -2,11 +2,15 @@
public class PixelFormatVaapi : IPixelFormat
{
public PixelFormatVaapi(string name) => Name = name;
public PixelFormatVaapi(string name, int bitDepth = 8)
{
Name = name;
BitDepth = bitDepth;
}
public string Name { get; }
public string FFmpegName => "vaapi";
public int BitDepth => 8;
public int BitDepth { get; }
}

14
ErsatzTV.FFmpeg/OutputFormat/OutputFormatNut.cs

@ -0,0 +1,14 @@ @@ -0,0 +1,14 @@
using ErsatzTV.FFmpeg.Environment;
namespace ErsatzTV.FFmpeg.OutputFormat;
public class OutputFormatNut : IPipelineStep
{
public EnvironmentVariable[] EnvironmentVariables => Array.Empty<EnvironmentVariable>();
public string[] GlobalOptions => Array.Empty<string>();
public string[] InputOptions(InputFile inputFile) => Array.Empty<string>();
public string[] FilterOptions => Array.Empty<string>();
public string[] OutputOptions => ["-f", "nut"];
public FrameState NextState(FrameState currentState) => currentState;
}

5
ErsatzTV.FFmpeg/OutputOption/PixelFormatOutputOption.cs

@ -8,10 +8,7 @@ public class PixelFormatOutputOption : OutputOption @@ -8,10 +8,7 @@ public class PixelFormatOutputOption : OutputOption
public PixelFormatOutputOption(IPixelFormat pixelFormat) => _pixelFormat = pixelFormat;
public override string[] OutputOptions => new[]
{
"-pix_fmt", _pixelFormat.FFmpegName
};
public override string[] OutputOptions => ["-pix_fmt", _pixelFormat.Name];
public override FrameState NextState(FrameState currentState) =>
currentState with { PixelFormat = Some(_pixelFormat) };

1
ErsatzTV.FFmpeg/Pipeline/IPipelineBuilder.cs

@ -4,7 +4,6 @@ public interface IPipelineBuilder @@ -4,7 +4,6 @@ public interface IPipelineBuilder
{
FFmpegPipeline Resize(string outputFile, FrameSize scaledSize);
FFmpegPipeline Concat(ConcatInputFile concatInputFile, FFmpegState ffmpegState);
FFmpegPipeline ConcatSegmenter(ConcatInputFile concatInputFile, FFmpegState ffmpegState);
FFmpegPipeline WrapSegmenter(ConcatInputFile concatInputFile, FFmpegState ffmpegState);
FFmpegPipeline Build(FFmpegState ffmpegState, FrameState desiredState);
}

23
ErsatzTV.FFmpeg/Pipeline/NvidiaPipelineBuilder.cs

@ -333,9 +333,27 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder @@ -333,9 +333,27 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder
}
}
// _logger.LogDebug(
// "{CurrentPixelFormat} => {DesiredPixelFormat}",
// currentState
// .PixelFormat,
// desiredPixelFormat);
if (currentState.FrameDataLocation == FrameDataLocation.Hardware &&
ffmpegState.EncoderHardwareAccelerationMode == HardwareAccelerationMode.None)
{
if (currentState.PixelFormat.Map(f => f.FFmpegName) != format.FFmpegName)
{
_logger.LogDebug(
"Format {A} doesn't equal {B}",
currentState.PixelFormat.Map(f => f.FFmpegName),
format.FFmpegName);
var formatFilter = new CudaFormatFilter(format);
currentState = formatFilter.NextState(currentState);
result.Add(formatFilter);
}
var hardwareDownload = new CudaHardwareDownloadFilter(currentState.PixelFormat, Some(format));
currentState = hardwareDownload.NextState(currentState);
result.Add(hardwareDownload);
@ -375,6 +393,11 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder @@ -375,6 +393,11 @@ public class NvidiaPipelineBuilder : SoftwarePipelineBuilder
pipelineSteps.Add(new PixelFormatOutputOption(format));
}
}
if (ffmpegState.OutputFormat is OutputFormatKind.Nut && format.BitDepth == 10)
{
pipelineSteps.Add(new PixelFormatOutputOption(format));
}
}
return result;

69
ErsatzTV.FFmpeg/Pipeline/PipelineBuilderBase.cs

@ -121,71 +121,6 @@ public abstract class PipelineBuilderBase : IPipelineBuilder @@ -121,71 +121,6 @@ public abstract class PipelineBuilderBase : IPipelineBuilder
return new FFmpegPipeline(pipelineSteps, false);
}
public FFmpegPipeline ConcatSegmenter(ConcatInputFile concatInputFile, FFmpegState ffmpegState)
{
var pipelineSteps = new List<IPipelineStep>
{
new NoStandardInputOption(),
new HideBannerOption(),
new NoStatsOption(),
new LoglevelErrorOption(),
new StandardFormatFlags(),
new NoDemuxDecodeDelayOutputOption(),
new FastStartOutputOption(),
new ClosedGopOutputOption(),
new NoBFramesOutputOption()
};
concatInputFile.AddOption(new ConcatInputFormat());
concatInputFile.AddOption(new InfiniteLoopInputOption(HardwareAccelerationMode.None));
foreach (int threadCount in ffmpegState.ThreadCount)
{
pipelineSteps.Insert(0, new ThreadCountOption(threadCount));
}
pipelineSteps.Add(new NoSceneDetectOutputOption(0));
foreach (string vaapiDevice in ffmpegState.VaapiDevice)
{
pipelineSteps.Add(new VaapiHardwareAccelerationOption(vaapiDevice, FFmpegCapability.Software));
foreach (string driverName in ffmpegState.VaapiDriver)
{
pipelineSteps.Add(new LibvaDriverNameVariable(driverName));
}
}
pipelineSteps.Add(new EncoderH264Vaapi(RateControlMode.VBR));
pipelineSteps.Add(new EncoderAac());
//pipelineSteps.Add(new EncoderCopyAll());
if (ffmpegState.DoNotMapMetadata)
{
pipelineSteps.Add(new DoNotMapMetadataOutputOption());
}
pipelineSteps.AddRange(
ffmpegState.MetadataServiceProvider.Map(sp => new MetadataServiceProviderOutputOption(sp)));
pipelineSteps.AddRange(ffmpegState.MetadataServiceName.Map(sn => new MetadataServiceNameOutputOption(sn)));
foreach (string segmentTemplate in ffmpegState.HlsSegmentTemplate)
{
foreach (string playlistPath in ffmpegState.HlsPlaylistPath)
{
pipelineSteps.Add(new OutputFormatConcatHls(segmentTemplate, playlistPath));
}
}
if (ffmpegState.SaveReport)
{
pipelineSteps.Add(new FFReportVariable(_reportsFolder, concatInputFile));
}
return new FFmpegPipeline(pipelineSteps, false);
}
public FFmpegPipeline WrapSegmenter(ConcatInputFile concatInputFile, FFmpegState ffmpegState)
{
var pipelineSteps = new List<IPipelineStep>
@ -360,9 +295,7 @@ public abstract class PipelineBuilderBase : IPipelineBuilder @@ -360,9 +295,7 @@ public abstract class PipelineBuilderBase : IPipelineBuilder
pipelineSteps.Add(new PipeProtocol());
break;
case OutputFormatKind.Nut:
// yes, not really "nut" - but nut is currently used to indicate a transcoding
// source that feeds into a concat segmenter
pipelineSteps.Add(new OutputFormatMkv());
pipelineSteps.Add(new OutputFormatNut());
pipelineSteps.Add(new PipeProtocol());
break;
case OutputFormatKind.Mp4:

14
ErsatzTV.FFmpeg/Pipeline/QsvPipelineBuilder.cs

@ -359,6 +359,8 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -359,6 +359,8 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
{
_logger.LogDebug("FrameDataLocation == FrameDataLocation.Hardware");
formatForDownload = new PixelFormatNv12(formatForDownload.Name);
var hardwareDownload =
new HardwareDownloadFilter(currentState with { PixelFormat = Some(formatForDownload) });
currentState = hardwareDownload.NextState(currentState);
@ -385,6 +387,18 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder @@ -385,6 +387,18 @@ public class QsvPipelineBuilder : SoftwarePipelineBuilder
pipelineSteps.Add(new PixelFormatOutputOption(format));
}
// be explicit with pixel format when feeding to concat
if (ffmpegState.OutputFormat is OutputFormatKind.Nut)
{
Option<IPipelineStep> maybePixelFormat = pipelineSteps.Find(s => s is PixelFormatOutputOption);
foreach (IPipelineStep pf in maybePixelFormat)
{
pipelineSteps.Remove(pf);
}
pipelineSteps.Add(new PixelFormatOutputOption(format));
}
}
return result;

45
ErsatzTV.FFmpeg/Pipeline/VaapiPipelineBuilder.cs

@ -248,7 +248,8 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -248,7 +248,8 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
videoStream,
desiredState.PixelFormat,
ffmpegState,
currentState);
currentState,
pipelineSteps);
return new FilterChain(
videoInputFile.FilterSteps,
@ -263,7 +264,8 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -263,7 +264,8 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
VideoStream videoStream,
Option<IPixelFormat> desiredPixelFormat,
FFmpegState ffmpegState,
FrameState currentState)
FrameState currentState,
ICollection<IPipelineStep> pipelineSteps)
{
var result = new List<IPipelineFilterStep>();
@ -296,13 +298,17 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -296,13 +298,17 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
if (currentState.FrameDataLocation == FrameDataLocation.Hardware)
{
_logger.LogDebug("FrameDataLocation == FrameDataLocation.Hardware");
// don't try to download from 8-bit to 10-bit
HardwareDownloadFilter hardwareDownload = currentState.BitDepth == 8 &&
desiredPixelFormat.Map(pf => pf.BitDepth).IfNone(8) == 10
? new HardwareDownloadFilter(currentState)
: new HardwareDownloadFilter(currentState with { PixelFormat = Some(format) });
_logger.LogDebug(
"FrameDataLocation == FrameDataLocation.Hardware, {CurrentPixelFormat} bit => {DesiredPixelFormat}",
currentState.PixelFormat,
desiredPixelFormat);
// don't try to download from 8-bit to 10-bit, or 10-bit to 8-bit
HardwareDownloadFilter hardwareDownload =
currentState.BitDepth == 8 && desiredPixelFormat.Map(pf => pf.BitDepth).IfNone(8) == 10 ||
currentState.BitDepth == 10 && desiredPixelFormat.Map(pf => pf.BitDepth).IfNone(10) == 8
? new HardwareDownloadFilter(currentState)
: new HardwareDownloadFilter(currentState with { PixelFormat = Some(format) });
currentState = hardwareDownload.NextState(currentState);
result.Add(hardwareDownload);
@ -316,19 +322,26 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder @@ -316,19 +322,26 @@ public class VaapiPipelineBuilder : SoftwarePipelineBuilder
currentState.PixelFormat.Map(f => f.FFmpegName),
format.FFmpegName);
// NV12 is 8-bit
if (format is PixelFormatYuv420P)
{
format = new PixelFormatNv12(format.Name);
}
if (currentState.FrameDataLocation == FrameDataLocation.Hardware)
{
// NV12 is 8-bit
if (format is PixelFormatYuv420P)
{
format = new PixelFormatNv12(format.Name);
}
result.Add(new VaapiFormatFilter(format));
}
else
{
result.Add(new PixelFormatFilter(format));
if (ffmpegState.EncoderHardwareAccelerationMode is HardwareAccelerationMode.Vaapi)
{
result.Add(new PixelFormatFilter(format));
}
else
{
pipelineSteps.Add(new PixelFormatOutputOption(format));
}
}
}

61
ErsatzTV.Scanner.Tests/Core/FFmpeg/TranscodingTests.cs

@ -132,7 +132,7 @@ public class TranscodingTests @@ -132,7 +132,7 @@ public class TranscodingTests
public static ScalingBehavior[] ScalingBehaviors =
[
ScalingBehavior.ScaleAndPad,
ScalingBehavior.Crop,
//ScalingBehavior.Crop,
//ScalingBehavior.Stretch
];
@ -158,8 +158,8 @@ public class TranscodingTests @@ -158,8 +158,8 @@ public class TranscodingTests
// // // //
// new("mpeg2video", "yuv420p"),
// //
new InputFormat("libx265", "yuv420p")
// new("libx265", "yuv420p10le")
new InputFormat("libx265", "yuv420p"),
new InputFormat("libx265", "yuv420p10le")
//
// new("mpeg4", "yuv420p"),
//
@ -184,8 +184,8 @@ public class TranscodingTests @@ -184,8 +184,8 @@ public class TranscodingTests
public static FFmpegProfileBitDepth[] BitDepths =
[
FFmpegProfileBitDepth.EightBit
//FFmpegProfileBitDepth.TenBit
FFmpegProfileBitDepth.EightBit,
FFmpegProfileBitDepth.TenBit
];
public static FFmpegProfileVideoFormat[] VideoFormats =
@ -198,13 +198,20 @@ public class TranscodingTests @@ -198,13 +198,20 @@ public class TranscodingTests
public static HardwareAccelerationKind[] TestAccelerations =
[
HardwareAccelerationKind.None,
HardwareAccelerationKind.Nvenc,
//HardwareAccelerationKind.Vaapi,
HardwareAccelerationKind.Qsv,
//HardwareAccelerationKind.Nvenc,
HardwareAccelerationKind.Vaapi,
//HardwareAccelerationKind.Qsv,
// HardwareAccelerationKind.VideoToolbox,
// HardwareAccelerationKind.Amf
];
public static StreamingMode[] StreamingModes =
[
StreamingMode.TransportStream,
//StreamingMode.HttpLiveStreamingSegmenter,
StreamingMode.HttpLiveStreamingSegmenterV2
];
public static string[] FilesToTest => [string.Empty];
}
@ -220,7 +227,9 @@ public class TranscodingTests @@ -220,7 +227,9 @@ public class TranscodingTests
[ValueSource(typeof(TestData), nameof(TestData.VideoFormats))]
FFmpegProfileVideoFormat profileVideoFormat,
[ValueSource(typeof(TestData), nameof(TestData.TestAccelerations))]
HardwareAccelerationKind profileAcceleration)
HardwareAccelerationKind profileAcceleration,
[ValueSource(typeof(TestData), nameof(TestData.StreamingModes))]
StreamingMode streamingMode)
{
var localFileSystem = new LocalFileSystem(
Substitute.For<IClient>(),
@ -271,19 +280,15 @@ public class TranscodingTests @@ -271,19 +280,15 @@ public class TranscodingTests
DeinterlaceVideo = true,
BitDepth = profileBitDepth
},
StreamingMode = StreamingMode.TransportStream,
StreamingMode = streamingMode,
SubtitleMode = ChannelSubtitleMode.None
};
string file = Path.Combine(TestContext.CurrentContext.TestDirectory, Path.Combine("Resources", "song.mp3"));
var songVersion = new MediaVersion
{
MediaFiles = new List<MediaFile>
{
new() { Path = file }
},
Streams = new List<MediaStream>()
MediaFiles = [new MediaFile { Path = file }],
Streams = []
};
var song = new Song
@ -294,6 +299,7 @@ public class TranscodingTests @@ -294,6 +299,7 @@ public class TranscodingTests
{
Title = "Song Title",
Artists = ["Song Artist"],
AlbumArtists = [],
Artwork = []
}
],
@ -378,6 +384,7 @@ public class TranscodingTests @@ -378,6 +384,7 @@ public class TranscodingTests
profileAcceleration,
VaapiDriver.RadeonSI,
localStatisticsProvider,
streamingMode,
() => videoVersion);
}
@ -405,7 +412,9 @@ public class TranscodingTests @@ -405,7 +412,9 @@ public class TranscodingTests
[ValueSource(typeof(TestData), nameof(TestData.VideoFormats))]
FFmpegProfileVideoFormat profileVideoFormat,
[ValueSource(typeof(TestData), nameof(TestData.TestAccelerations))]
HardwareAccelerationKind profileAcceleration)
HardwareAccelerationKind profileAcceleration,
[ValueSource(typeof(TestData), nameof(TestData.StreamingModes))]
StreamingMode streamingMode)
{
string file = fileToTest;
if (string.IsNullOrWhiteSpace(file))
@ -613,7 +622,7 @@ public class TranscodingTests @@ -613,7 +622,7 @@ public class TranscodingTests
BitDepth = profileBitDepth,
ScalingBehavior = scalingBehavior
},
StreamingMode = StreamingMode.TransportStream,
StreamingMode = streamingMode,
SubtitleMode = subtitleMode
},
v,
@ -652,6 +661,7 @@ public class TranscodingTests @@ -652,6 +661,7 @@ public class TranscodingTests
profileAcceleration,
VaapiDriver.RadeonSI,
localStatisticsProvider,
streamingMode,
() => v);
}
@ -898,6 +908,7 @@ public class TranscodingTests @@ -898,6 +908,7 @@ public class TranscodingTests
HardwareAccelerationKind profileAcceleration,
VaapiDriver vaapiDriver,
ILocalStatisticsProvider localStatisticsProvider,
StreamingMode streamingMode,
Func<MediaVersion> getFinalMediaVersion)
{
string[] unsupportedMessages =
@ -994,8 +1005,14 @@ public class TranscodingTests @@ -994,8 +1005,14 @@ public class TranscodingTests
foreach (MediaStream videoStream in v.Streams.Filter(s => s.MediaStreamKind == MediaStreamKind.Video))
{
// verify pixel format
videoStream.PixelFormat.Should().Be(
profileBitDepth == FFmpegProfileBitDepth.TenBit ? PixelFormat.YUV420P10LE : PixelFormat.YUV420P);
string expectedPixelFormat = (profileBitDepth, streamingMode) switch
{
//(FFmpegProfileBitDepth.TenBit, StreamingMode.HttpLiveStreamingSegmenterV2) => PixelFormat.RGB555LE,
(FFmpegProfileBitDepth.TenBit, _) => PixelFormat.YUV420P10LE,
_ => PixelFormat.YUV420P
};
videoStream.PixelFormat.Should().Be(expectedPixelFormat);
// verify colors
var colorParams = new ColorParams(
@ -1007,9 +1024,11 @@ public class TranscodingTests @@ -1007,9 +1024,11 @@ public class TranscodingTests
// AMF doesn't seem to set this metadata properly
// MPEG2Video doesn't always seem to set this properly
// RADEONSI driver doesn't set this properly
// NUT doesn't set this properly
if (profileAcceleration != HardwareAccelerationKind.Amf &&
profileVideoFormat != FFmpegProfileVideoFormat.Mpeg2Video &&
(profileAcceleration != HardwareAccelerationKind.Vaapi || vaapiDriver != VaapiDriver.RadeonSI))
(profileAcceleration != HardwareAccelerationKind.Vaapi || vaapiDriver != VaapiDriver.RadeonSI) &&
streamingMode != StreamingMode.HttpLiveStreamingSegmenterV2)
{
colorParams.IsBt709.Should().BeTrue($"{colorParams}");
}

Loading…
Cancel
Save