using CliWrap; using ErsatzTV.Core.Domain; using ErsatzTV.Core.Domain.Filler; using ErsatzTV.Core.Interfaces.FFmpeg; using ErsatzTV.Core.Interfaces.Repositories; using ErsatzTV.FFmpeg; using ErsatzTV.FFmpeg.Environment; using ErsatzTV.FFmpeg.Format; using ErsatzTV.FFmpeg.OutputFormat; using ErsatzTV.FFmpeg.Pipeline; using ErsatzTV.FFmpeg.State; using Microsoft.Extensions.Logging; using MediaStream = ErsatzTV.Core.Domain.MediaStream; namespace ErsatzTV.Core.FFmpeg; public class FFmpegLibraryProcessService : IFFmpegProcessService { private readonly IConfigElementRepository _configElementRepository; private readonly FFmpegProcessService _ffmpegProcessService; private readonly IFFmpegStreamSelector _ffmpegStreamSelector; private readonly ILogger _logger; private readonly IPipelineBuilderFactory _pipelineBuilderFactory; private readonly ITempFilePool _tempFilePool; public FFmpegLibraryProcessService( FFmpegProcessService ffmpegProcessService, IFFmpegStreamSelector ffmpegStreamSelector, ITempFilePool tempFilePool, IPipelineBuilderFactory pipelineBuilderFactory, IConfigElementRepository configElementRepository, ILogger logger) { _ffmpegProcessService = ffmpegProcessService; _ffmpegStreamSelector = ffmpegStreamSelector; _tempFilePool = tempFilePool; _pipelineBuilderFactory = pipelineBuilderFactory; _configElementRepository = configElementRepository; _logger = logger; } public async Task ForPlayoutItem( string ffmpegPath, string ffprobePath, bool saveReports, Channel channel, MediaVersion videoVersion, MediaItemAudioVersion audioVersion, string videoPath, string audioPath, Func>> getSubtitles, string preferredAudioLanguage, string preferredAudioTitle, string preferredSubtitleLanguage, ChannelSubtitleMode subtitleMode, DateTimeOffset start, DateTimeOffset finish, DateTimeOffset now, Option playoutItemWatermark, Option globalWatermark, VaapiDriver vaapiDriver, string vaapiDevice, Option qsvExtraHardwareFrames, bool hlsRealtime, FillerKind fillerKind, TimeSpan inPoint, TimeSpan outPoint, long ptsOffset, Option targetFramerate, bool disableWatermarks, Action pipelineAction) { MediaStream videoStream = await _ffmpegStreamSelector.SelectVideoStream(videoVersion); Option maybeAudioStream = await _ffmpegStreamSelector.SelectAudioStream( audioVersion, channel.StreamingMode, channel, preferredAudioLanguage, preferredAudioTitle); FFmpegPlaybackSettings playbackSettings = FFmpegPlaybackSettingsCalculator.CalculateSettings( channel.StreamingMode, channel.FFmpegProfile, videoVersion, videoStream, maybeAudioStream, start, now, inPoint, outPoint, hlsRealtime, targetFramerate); List allSubtitles = await getSubtitles(playbackSettings); Option maybeSubtitle = await _ffmpegStreamSelector.SelectSubtitleStream( allSubtitles, channel, preferredSubtitleLanguage, subtitleMode); foreach (Subtitle subtitle in maybeSubtitle) { if (subtitle.SubtitleKind == SubtitleKind.Sidecar) { // proxy to avoid dealing with escaping subtitle.Path = $"http://localhost:{Settings.ListenPort}/media/subtitle/{subtitle.Id}"; } } Option watermarkOptions = disableWatermarks ? None : await _ffmpegProcessService.GetWatermarkOptions( ffprobePath, channel, playoutItemWatermark, globalWatermark, videoVersion, None, None); Option> maybeFadePoints = watermarkOptions .Map(o => o.Watermark) .Flatten() .Where(wm => wm.Mode == ChannelWatermarkMode.Intermittent) .Map( wm => WatermarkCalculator.CalculateFadePoints( start, inPoint, outPoint, playbackSettings.StreamSeek, wm.FrequencyMinutes, wm.DurationSeconds)); string audioFormat = playbackSettings.AudioFormat switch { FFmpegProfileAudioFormat.Aac => AudioFormat.Aac, FFmpegProfileAudioFormat.Ac3 => AudioFormat.Ac3, FFmpegProfileAudioFormat.Copy => AudioFormat.Copy, _ => throw new ArgumentOutOfRangeException($"unexpected audio format {playbackSettings.VideoFormat}") }; var audioState = new AudioState( audioFormat, playbackSettings.AudioChannels, playbackSettings.AudioBitrate, playbackSettings.AudioBufferSize, playbackSettings.AudioSampleRate, videoPath == audioPath ? playbackSettings.AudioDuration : Option.None, playbackSettings.NormalizeLoudnessMode switch { NormalizeLoudnessMode.LoudNorm => AudioFilter.LoudNorm, _ => AudioFilter.None }); // don't log generated images, or hls direct, which are expected to have unknown format bool isUnknownPixelFormatExpected = videoPath != audioPath || channel.StreamingMode == StreamingMode.HttpLiveStreamingDirect; ILogger pixelFormatLogger = isUnknownPixelFormatExpected ? null : _logger; IPixelFormat pixelFormat = await AvailablePixelFormats .ForPixelFormat(videoStream.PixelFormat, pixelFormatLogger) .IfNoneAsync( () => { return videoStream.BitsPerRawSample switch { 8 => new PixelFormatYuv420P(), 10 => new PixelFormatYuv420P10Le(), _ => new PixelFormatUnknown(videoStream.BitsPerRawSample) }; }); var ffmpegVideoStream = new VideoStream( videoStream.Index, videoStream.Codec, Some(pixelFormat), new ColorParams( videoStream.ColorRange, videoStream.ColorSpace, videoStream.ColorTransfer, videoStream.ColorPrimaries), new FrameSize(videoVersion.Width, videoVersion.Height), videoVersion.SampleAspectRatio, videoVersion.DisplayAspectRatio, videoVersion.RFrameRate, videoPath != audioPath, // still image when paths are different videoVersion.VideoScanKind == VideoScanKind.Progressive ? ScanKind.Progressive : ScanKind.Interlaced); var videoInputFile = new VideoInputFile(videoPath, new List { ffmpegVideoStream }); Option audioInputFile = maybeAudioStream.Map( audioStream => { var ffmpegAudioStream = new AudioStream(audioStream.Index, audioStream.Codec, audioStream.Channels); return new AudioInputFile(audioPath, new List { ffmpegAudioStream }, audioState); }); OutputFormatKind outputFormat = OutputFormatKind.MpegTs; switch (channel.StreamingMode) { case StreamingMode.HttpLiveStreamingSegmenter: outputFormat = OutputFormatKind.Hls; break; case StreamingMode.HttpLiveStreamingDirect: { // use mpeg-ts by default outputFormat = OutputFormatKind.MpegTs; // override with setting if applicable Option maybeOutputFormat = await _configElementRepository .GetValue(ConfigElementKey.FFmpegHlsDirectOutputFormat); foreach (OutputFormatKind of in maybeOutputFormat) { outputFormat = of; } break; } } Option subtitleLanguage = Option.None; Option subtitleTitle = Option.None; Option subtitleInputFile = maybeSubtitle.Map>( subtitle => { if (!subtitle.IsImage && subtitle.SubtitleKind == SubtitleKind.Embedded && (!subtitle.IsExtracted || string.IsNullOrWhiteSpace(subtitle.Path))) { _logger.LogWarning("Subtitles are not yet available for this item"); return None; } var ffmpegSubtitleStream = new ErsatzTV.FFmpeg.MediaStream( subtitle.IsImage ? subtitle.StreamIndex : 0, subtitle.Codec, StreamKind.Video); string path = subtitle.IsImage switch { true => videoPath, false when subtitle.SubtitleKind == SubtitleKind.Sidecar => subtitle.Path, _ => Path.Combine(FileSystemLayout.SubtitleCacheFolder, subtitle.Path) }; SubtitleMethod method = SubtitleMethod.Burn; if (channel.StreamingMode == StreamingMode.HttpLiveStreamingDirect) { method = (outputFormat, subtitle.SubtitleKind, subtitle.Codec) switch { // mkv supports all subtitle codecs, maybe? (OutputFormatKind.Mkv, SubtitleKind.Embedded, _) => SubtitleMethod.Copy, // MP4 supports vobsub (OutputFormatKind.Mp4, SubtitleKind.Embedded, "dvdsub" or "dvd_subtitle" or "vobsub") => SubtitleMethod.Copy, // MP4 does not support PGS (OutputFormatKind.Mp4, SubtitleKind.Embedded, "pgs" or "pgssub" or "hdmv_pgs_subtitle") => SubtitleMethod.None, // ignore text subtitles for now _ => SubtitleMethod.None }; if (method == SubtitleMethod.None) { return None; } // hls direct won't use extracted embedded subtitles if (subtitle.SubtitleKind == SubtitleKind.Embedded) { path = videoPath; ffmpegSubtitleStream = ffmpegSubtitleStream with { Index = subtitle.StreamIndex }; } } if (method == SubtitleMethod.Copy) { subtitleLanguage = Optional(subtitle.Language); subtitleTitle = Optional(subtitle.Title); } return new SubtitleInputFile( path, new List { ffmpegSubtitleStream }, method); }).Flatten(); Option watermarkInputFile = GetWatermarkInputFile(watermarkOptions, maybeFadePoints); string videoFormat = GetVideoFormat(playbackSettings); HardwareAccelerationMode hwAccel = GetHardwareAccelerationMode(playbackSettings, fillerKind); Option hlsPlaylistPath = outputFormat == OutputFormatKind.Hls ? Path.Combine(FileSystemLayout.TranscodeFolder, channel.Number, "live.m3u8") : Option.None; Option hlsSegmentTemplate = outputFormat == OutputFormatKind.Hls ? Path.Combine(FileSystemLayout.TranscodeFolder, channel.Number, "live%06d.ts") : Option.None; FrameSize scaledSize = ffmpegVideoStream.SquarePixelFrameSize( new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height)); var paddedSize = new FrameSize( channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height); Option cropSize = Option.None; if (channel.FFmpegProfile.ScalingBehavior is ScalingBehavior.Stretch) { scaledSize = paddedSize; } if (channel.FFmpegProfile.ScalingBehavior is ScalingBehavior.Crop) { paddedSize = ffmpegVideoStream.SquarePixelFrameSizeForCrop( new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height)); cropSize = new FrameSize( channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height); } var desiredState = new FrameState( playbackSettings.RealtimeOutput, fillerKind == FillerKind.Fallback, videoFormat, Optional(videoStream.Profile), Optional(playbackSettings.PixelFormat), scaledSize, paddedSize, cropSize, false, playbackSettings.FrameRate, playbackSettings.VideoBitrate, playbackSettings.VideoBufferSize, playbackSettings.VideoTrackTimeScale, playbackSettings.Deinterlace); var ffmpegState = new FFmpegState( saveReports, hwAccel, hwAccel, VaapiDriverName(hwAccel, vaapiDriver), VaapiDeviceName(hwAccel, vaapiDevice), playbackSettings.StreamSeek, finish - now, channel.StreamingMode != StreamingMode.HttpLiveStreamingDirect, "ErsatzTV", channel.Name, maybeAudioStream.Map(s => Optional(s.Language)).Flatten(), subtitleLanguage, subtitleTitle, outputFormat, hlsPlaylistPath, hlsSegmentTemplate, ptsOffset, playbackSettings.ThreadCount, qsvExtraHardwareFrames); _logger.LogDebug("FFmpeg desired state {FrameState}", desiredState); IPipelineBuilder pipelineBuilder = await _pipelineBuilderFactory.GetBuilder( hwAccel, videoInputFile, audioInputFile, watermarkInputFile, subtitleInputFile, VaapiDriverName(hwAccel, vaapiDriver), VaapiDeviceName(hwAccel, vaapiDevice), FileSystemLayout.FFmpegReportsFolder, FileSystemLayout.FontsCacheFolder, ffmpegPath); FFmpegPipeline pipeline = pipelineBuilder.Build(ffmpegState, desiredState); pipelineAction?.Invoke(pipeline); return GetCommand(ffmpegPath, videoInputFile, audioInputFile, watermarkInputFile, None, pipeline); } public async Task ForError( string ffmpegPath, Channel channel, Option duration, string errorMessage, bool hlsRealtime, long ptsOffset, VaapiDriver vaapiDriver, string vaapiDevice, Option qsvExtraHardwareFrames) { FFmpegPlaybackSettings playbackSettings = FFmpegPlaybackSettingsCalculator.CalculateErrorSettings( channel.StreamingMode, channel.FFmpegProfile, hlsRealtime); IDisplaySize desiredResolution = channel.FFmpegProfile.Resolution; var fontSize = (int)Math.Round(channel.FFmpegProfile.Resolution.Height / 20.0); var margin = (int)Math.Round(channel.FFmpegProfile.Resolution.Height * 0.05); string subtitleFile = await new SubtitleBuilder(_tempFilePool) .WithResolution(desiredResolution) .WithFontName("Roboto") .WithFontSize(fontSize) .WithAlignment(2) .WithMarginV(margin) .WithPrimaryColor("&HFFFFFF") .WithFormattedContent(errorMessage.Replace(Environment.NewLine, "\\N")) .BuildFile(); string audioFormat = playbackSettings.AudioFormat switch { FFmpegProfileAudioFormat.Ac3 => AudioFormat.Ac3, _ => AudioFormat.Aac }; var audioState = new AudioState( audioFormat, playbackSettings.AudioChannels, playbackSettings.AudioBitrate, playbackSettings.AudioBufferSize, playbackSettings.AudioSampleRate, Option.None, AudioFilter.None); var desiredState = new FrameState( playbackSettings.RealtimeOutput, false, GetVideoFormat(playbackSettings), VideoProfile.Main, new PixelFormatYuv420P(), new FrameSize(desiredResolution.Width, desiredResolution.Height), new FrameSize(desiredResolution.Width, desiredResolution.Height), Option.None, false, playbackSettings.FrameRate, playbackSettings.VideoBitrate, playbackSettings.VideoBufferSize, playbackSettings.VideoTrackTimeScale, playbackSettings.Deinterlace); OutputFormatKind outputFormat = channel.StreamingMode == StreamingMode.HttpLiveStreamingSegmenter ? OutputFormatKind.Hls : OutputFormatKind.MpegTs; Option hlsPlaylistPath = outputFormat == OutputFormatKind.Hls ? Path.Combine(FileSystemLayout.TranscodeFolder, channel.Number, "live.m3u8") : Option.None; Option hlsSegmentTemplate = outputFormat == OutputFormatKind.Hls ? Path.Combine(FileSystemLayout.TranscodeFolder, channel.Number, "live%06d.ts") : Option.None; string videoPath = Path.Combine(FileSystemLayout.ResourcesCacheFolder, "background.png"); var videoVersion = BackgroundImageMediaVersion.ForPath(videoPath, desiredResolution); var ffmpegVideoStream = new VideoStream( 0, VideoFormat.GeneratedImage, new PixelFormatUnknown(), // leave this unknown so we convert to desired yuv420p ColorParams.Default, new FrameSize(videoVersion.Width, videoVersion.Height), videoVersion.SampleAspectRatio, videoVersion.DisplayAspectRatio, None, true, ScanKind.Progressive); var videoInputFile = new VideoInputFile(videoPath, new List { ffmpegVideoStream }); // TODO: ignore accel if this already failed once HardwareAccelerationMode hwAccel = GetHardwareAccelerationMode(playbackSettings, FillerKind.None); _logger.LogDebug("HW accel mode: {HwAccel}", hwAccel); var ffmpegState = new FFmpegState( false, HardwareAccelerationMode.None, // no hw accel decode since errors loop hwAccel, VaapiDriverName(hwAccel, vaapiDriver), VaapiDeviceName(hwAccel, vaapiDevice), playbackSettings.StreamSeek, duration, channel.StreamingMode != StreamingMode.HttpLiveStreamingDirect, "ErsatzTV", channel.Name, None, None, None, outputFormat, hlsPlaylistPath, hlsSegmentTemplate, ptsOffset, Option.None, qsvExtraHardwareFrames); var ffmpegSubtitleStream = new ErsatzTV.FFmpeg.MediaStream(0, "ass", StreamKind.Video); var audioInputFile = new NullAudioInputFile(audioState); var subtitleInputFile = new SubtitleInputFile( subtitleFile, new List { ffmpegSubtitleStream }, SubtitleMethod.Burn); _logger.LogDebug("FFmpeg desired error state {FrameState}", desiredState); IPipelineBuilder pipelineBuilder = await _pipelineBuilderFactory.GetBuilder( hwAccel, videoInputFile, audioInputFile, None, subtitleInputFile, VaapiDriverName(hwAccel, vaapiDriver), VaapiDeviceName(hwAccel, vaapiDevice), FileSystemLayout.FFmpegReportsFolder, FileSystemLayout.FontsCacheFolder, ffmpegPath); FFmpegPipeline pipeline = pipelineBuilder.Build(ffmpegState, desiredState); return GetCommand(ffmpegPath, videoInputFile, audioInputFile, None, None, pipeline); } public async Task ConcatChannel( string ffmpegPath, bool saveReports, Channel channel, string scheme, string host) { var resolution = new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height); var concatInputFile = new ConcatInputFile( $"http://localhost:{Settings.ListenPort}/ffmpeg/concat/{channel.Number}", resolution); IPipelineBuilder pipelineBuilder = await _pipelineBuilderFactory.GetBuilder( HardwareAccelerationMode.None, None, None, None, None, None, None, FileSystemLayout.FFmpegReportsFolder, FileSystemLayout.FontsCacheFolder, ffmpegPath); FFmpegPipeline pipeline = pipelineBuilder.Concat( concatInputFile, FFmpegState.Concat(saveReports, channel.Name)); return GetCommand(ffmpegPath, None, None, None, concatInputFile, pipeline); } public async Task WrapSegmenter( string ffmpegPath, bool saveReports, Channel channel, string scheme, string host) { var resolution = new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height); var concatInputFile = new ConcatInputFile( $"http://localhost:{Settings.ListenPort}/iptv/channel/{channel.Number}.m3u8?mode=segmenter", resolution); IPipelineBuilder pipelineBuilder = await _pipelineBuilderFactory.GetBuilder( HardwareAccelerationMode.None, None, None, None, None, None, None, FileSystemLayout.FFmpegReportsFolder, FileSystemLayout.FontsCacheFolder, ffmpegPath); FFmpegPipeline pipeline = pipelineBuilder.WrapSegmenter( concatInputFile, FFmpegState.Concat(saveReports, channel.Name)); return GetCommand(ffmpegPath, None, None, None, concatInputFile, pipeline); } public async Task ResizeImage(string ffmpegPath, string inputFile, string outputFile, int height) { var videoInputFile = new VideoInputFile( inputFile, new List { new( 0, string.Empty, None, ColorParams.Default, FrameSize.Unknown, string.Empty, string.Empty, None, true, ScanKind.Progressive) }); IPipelineBuilder pipelineBuilder = await _pipelineBuilderFactory.GetBuilder( HardwareAccelerationMode.None, videoInputFile, None, None, None, None, None, FileSystemLayout.FFmpegReportsFolder, FileSystemLayout.FontsCacheFolder, ffmpegPath); FFmpegPipeline pipeline = pipelineBuilder.Resize(outputFile, new FrameSize(-1, height)); return GetCommand(ffmpegPath, videoInputFile, None, None, None, pipeline, false); } public Task> GenerateSongImage( string ffmpegPath, string ffprobePath, Option subtitleFile, Channel channel, Option playoutItemWatermark, Option globalWatermark, MediaVersion videoVersion, string videoPath, bool boxBlur, Option watermarkPath, WatermarkLocation watermarkLocation, int horizontalMarginPercent, int verticalMarginPercent, int watermarkWidthPercent, CancellationToken cancellationToken) => _ffmpegProcessService.GenerateSongImage( ffmpegPath, ffprobePath, subtitleFile, channel, playoutItemWatermark, globalWatermark, videoVersion, videoPath, boxBlur, watermarkPath, watermarkLocation, horizontalMarginPercent, verticalMarginPercent, watermarkWidthPercent, cancellationToken); private static Option GetWatermarkInputFile( Option watermarkOptions, Option> maybeFadePoints) { foreach (WatermarkOptions options in watermarkOptions) { foreach (ChannelWatermark watermark in options.Watermark) { // skip watermark if intermittent and no fade points if (watermark.Mode != ChannelWatermarkMode.None && (watermark.Mode != ChannelWatermarkMode.Intermittent || maybeFadePoints.Map(fp => fp.Count > 0).IfNone(false))) { foreach (string path in options.ImagePath) { var watermarkInputFile = new WatermarkInputFile( path, new List { new( options.ImageStreamIndex.IfNone(0), "unknown", new PixelFormatUnknown(), ColorParams.Default, new FrameSize(1, 1), string.Empty, string.Empty, Option.None, !options.IsAnimated, ScanKind.Progressive) }, new WatermarkState( maybeFadePoints.Map( lst => lst.Map( fp => { return fp switch { FadeInPoint fip => (WatermarkFadePoint)new WatermarkFadeIn( fip.Time, fip.EnableStart, fip.EnableFinish), FadeOutPoint fop => new WatermarkFadeOut( fop.Time, fop.EnableStart, fop.EnableFinish), _ => throw new NotSupportedException() // this will never happen }; }).ToList()), watermark.Location, watermark.Size, watermark.WidthPercent, watermark.HorizontalMarginPercent, watermark.VerticalMarginPercent, watermark.Opacity, watermark.PlaceWithinSourceContent)); return watermarkInputFile; } } } } return None; } private Command GetCommand( string ffmpegPath, Option videoInputFile, Option audioInputFile, Option watermarkInputFile, Option concatInputFile, FFmpegPipeline pipeline, bool log = true) { IEnumerable loggedSteps = pipeline.PipelineSteps.Map(ps => ps.GetType().Name); IEnumerable loggedAudioFilters = audioInputFile.Map(f => f.FilterSteps.Map(af => af.GetType().Name)).Flatten(); IEnumerable loggedVideoFilters = videoInputFile.Map(f => f.FilterSteps.Map(vf => vf.GetType().Name)).Flatten(); if (log) { _logger.LogDebug( "FFmpeg pipeline {PipelineSteps}, {AudioFilters}, {VideoFilters}", loggedSteps, loggedAudioFilters, loggedVideoFilters ); } IList environmentVariables = CommandGenerator.GenerateEnvironmentVariables(pipeline.PipelineSteps); IList arguments = CommandGenerator.GenerateArguments( videoInputFile, audioInputFile, watermarkInputFile, concatInputFile, pipeline.PipelineSteps, pipeline.IsIntelVaapiOrQsv); if (environmentVariables.Any()) { _logger.LogDebug("FFmpeg environment variables {EnvVars}", environmentVariables); } return Cli.Wrap(ffmpegPath) .WithArguments(arguments) .WithValidation(CommandResultValidation.None) .WithStandardErrorPipe(PipeTarget.ToStream(Stream.Null)) .WithEnvironmentVariables(environmentVariables.ToDictionary(e => e.Key, e => e.Value)); } private static Option VaapiDriverName(HardwareAccelerationMode accelerationMode, VaapiDriver driver) { if (accelerationMode == HardwareAccelerationMode.Vaapi) { switch (driver) { case VaapiDriver.i965: return "i965"; case VaapiDriver.iHD: return "iHD"; case VaapiDriver.RadeonSI: return "radeonsi"; case VaapiDriver.Nouveau: return "nouveau"; } } return Option.None; } private static Option VaapiDeviceName(HardwareAccelerationMode accelerationMode, string vaapiDevice) => accelerationMode == HardwareAccelerationMode.Vaapi || OperatingSystem.IsLinux() && accelerationMode == HardwareAccelerationMode.Qsv ? string.IsNullOrWhiteSpace(vaapiDevice) ? "/dev/dri/renderD128" : vaapiDevice : Option.None; private static string GetVideoFormat(FFmpegPlaybackSettings playbackSettings) => playbackSettings.VideoFormat switch { FFmpegProfileVideoFormat.Hevc => VideoFormat.Hevc, FFmpegProfileVideoFormat.H264 => VideoFormat.H264, FFmpegProfileVideoFormat.Mpeg2Video => VideoFormat.Mpeg2Video, FFmpegProfileVideoFormat.Copy => VideoFormat.Copy, _ => throw new ArgumentOutOfRangeException($"unexpected video format {playbackSettings.VideoFormat}") }; private static HardwareAccelerationMode GetHardwareAccelerationMode( FFmpegPlaybackSettings playbackSettings, FillerKind fillerKind) => playbackSettings.HardwareAcceleration switch { _ when fillerKind == FillerKind.Fallback => HardwareAccelerationMode.None, HardwareAccelerationKind.Nvenc => HardwareAccelerationMode.Nvenc, HardwareAccelerationKind.Qsv => HardwareAccelerationMode.Qsv, HardwareAccelerationKind.Vaapi => HardwareAccelerationMode.Vaapi, HardwareAccelerationKind.VideoToolbox => HardwareAccelerationMode.VideoToolbox, HardwareAccelerationKind.Amf => HardwareAccelerationMode.Amf, _ => HardwareAccelerationMode.None }; }