| | 1 | | #nullable disable |
| | 2 | |
|
| | 3 | | #pragma warning disable CS1591, SA1401 |
| | 4 | |
|
| | 5 | | using System; |
| | 6 | | using System.Collections.Generic; |
| | 7 | | using System.Globalization; |
| | 8 | | using System.Linq; |
| | 9 | | using Jellyfin.Data.Enums; |
| | 10 | | using Jellyfin.Database.Implementations.Entities; |
| | 11 | | using MediaBrowser.Model.Dlna; |
| | 12 | | using MediaBrowser.Model.Drawing; |
| | 13 | | using MediaBrowser.Model.Dto; |
| | 14 | | using MediaBrowser.Model.Entities; |
| | 15 | | using MediaBrowser.Model.MediaInfo; |
| | 16 | | using MediaBrowser.Model.Net; |
| | 17 | | using MediaBrowser.Model.Session; |
| | 18 | |
|
| | 19 | | namespace MediaBrowser.Controller.MediaEncoding |
| | 20 | | { |
| | 21 | | // For now, a common base class until the API and MediaEncoding classes are unified |
| | 22 | | public class EncodingJobInfo |
| | 23 | | { |
| 0 | 24 | | private static readonly char[] _separators = ['|', ',']; |
| | 25 | |
|
| | 26 | | public int? OutputAudioBitrate; |
| | 27 | | public int? OutputAudioChannels; |
| | 28 | |
|
| | 29 | | private TranscodeReason? _transcodeReasons = null; |
| | 30 | |
|
| | 31 | | public EncodingJobInfo(TranscodingJobType jobType) |
| | 32 | | { |
| 0 | 33 | | TranscodingType = jobType; |
| 0 | 34 | | RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); |
| 0 | 35 | | SupportedAudioCodecs = Array.Empty<string>(); |
| 0 | 36 | | SupportedVideoCodecs = Array.Empty<string>(); |
| 0 | 37 | | SupportedSubtitleCodecs = Array.Empty<string>(); |
| 0 | 38 | | } |
| | 39 | |
|
| | 40 | | public TranscodeReason TranscodeReasons |
| | 41 | | { |
| | 42 | | get |
| | 43 | | { |
| 0 | 44 | | if (!_transcodeReasons.HasValue) |
| | 45 | | { |
| 0 | 46 | | if (BaseRequest.TranscodeReasons is null) |
| | 47 | | { |
| 0 | 48 | | _transcodeReasons = 0; |
| 0 | 49 | | return 0; |
| | 50 | | } |
| | 51 | |
|
| 0 | 52 | | _ = Enum.TryParse<TranscodeReason>(BaseRequest.TranscodeReasons, out var reason); |
| 0 | 53 | | _transcodeReasons = reason; |
| | 54 | | } |
| | 55 | |
|
| 0 | 56 | | return _transcodeReasons.Value; |
| | 57 | | } |
| | 58 | | } |
| | 59 | |
|
| | 60 | | public IProgress<double> Progress { get; set; } |
| | 61 | |
|
| | 62 | | public MediaStream VideoStream { get; set; } |
| | 63 | |
|
| | 64 | | public VideoType VideoType { get; set; } |
| | 65 | |
|
| | 66 | | public Dictionary<string, string> RemoteHttpHeaders { get; set; } |
| | 67 | |
|
| | 68 | | public string OutputVideoCodec { get; set; } |
| | 69 | |
|
| | 70 | | public MediaProtocol InputProtocol { get; set; } |
| | 71 | |
|
| | 72 | | public string MediaPath { get; set; } |
| | 73 | |
|
| | 74 | | public bool IsInputVideo { get; set; } |
| | 75 | |
|
| | 76 | | public string OutputAudioCodec { get; set; } |
| | 77 | |
|
| | 78 | | public int? OutputVideoBitrate { get; set; } |
| | 79 | |
|
| | 80 | | public MediaStream SubtitleStream { get; set; } |
| | 81 | |
|
| | 82 | | public SubtitleDeliveryMethod SubtitleDeliveryMethod { get; set; } |
| | 83 | |
|
| | 84 | | public string[] SupportedSubtitleCodecs { get; set; } |
| | 85 | |
|
| | 86 | | public int InternalSubtitleStreamOffset { get; set; } |
| | 87 | |
|
| | 88 | | public MediaSourceInfo MediaSource { get; set; } |
| | 89 | |
|
| | 90 | | public User User { get; set; } |
| | 91 | |
|
| | 92 | | public long? RunTimeTicks { get; set; } |
| | 93 | |
|
| | 94 | | public bool ReadInputAtNativeFramerate { get; set; } |
| | 95 | |
|
| | 96 | | public string OutputFilePath { get; set; } |
| | 97 | |
|
| | 98 | | public string MimeType { get; set; } |
| | 99 | |
|
| 0 | 100 | | public bool IgnoreInputDts => MediaSource.IgnoreDts; |
| | 101 | |
|
| 0 | 102 | | public bool IgnoreInputIndex => MediaSource.IgnoreIndex; |
| | 103 | |
|
| 0 | 104 | | public bool GenPtsInput => MediaSource.GenPtsInput; |
| | 105 | |
|
| 0 | 106 | | public bool DiscardCorruptFramesInput => false; |
| | 107 | |
|
| 0 | 108 | | public bool EnableFastSeekInput => false; |
| | 109 | |
|
| 0 | 110 | | public bool GenPtsOutput => false; |
| | 111 | |
|
| | 112 | | public string OutputContainer { get; set; } |
| | 113 | |
|
| | 114 | | public string OutputVideoSync { get; set; } |
| | 115 | |
|
| | 116 | | public string AlbumCoverPath { get; set; } |
| | 117 | |
|
| | 118 | | public string InputAudioSync { get; set; } |
| | 119 | |
|
| | 120 | | public string InputVideoSync { get; set; } |
| | 121 | |
|
| | 122 | | public TransportStreamTimestamp InputTimestamp { get; set; } |
| | 123 | |
|
| | 124 | | public MediaStream AudioStream { get; set; } |
| | 125 | |
|
| | 126 | | public string[] SupportedAudioCodecs { get; set; } |
| | 127 | |
|
| | 128 | | public string[] SupportedVideoCodecs { get; set; } |
| | 129 | |
|
| | 130 | | public string InputContainer { get; set; } |
| | 131 | |
|
| | 132 | | public IsoType? IsoType { get; set; } |
| | 133 | |
|
| | 134 | | public BaseEncodingJobOptions BaseRequest { get; set; } |
| | 135 | |
|
| | 136 | | public bool IsVideoRequest { get; set; } |
| | 137 | |
|
| | 138 | | public TranscodingJobType TranscodingType { get; set; } |
| | 139 | |
|
| 0 | 140 | | public long? StartTimeTicks => BaseRequest.StartTimeTicks; |
| | 141 | |
|
| 0 | 142 | | public bool CopyTimestamps => BaseRequest.CopyTimestamps; |
| | 143 | |
|
| | 144 | | public bool IsSegmentedLiveStream |
| 0 | 145 | | => TranscodingType != TranscodingJobType.Progressive && !RunTimeTicks.HasValue; |
| | 146 | |
|
| 0 | 147 | | public int? TotalOutputBitrate => (OutputAudioBitrate ?? 0) + (OutputVideoBitrate ?? 0); |
| | 148 | |
|
| | 149 | | public int? OutputWidth |
| | 150 | | { |
| | 151 | | get |
| | 152 | | { |
| 0 | 153 | | if (VideoStream is not null && VideoStream.Width.HasValue && VideoStream.Height.HasValue) |
| | 154 | | { |
| 0 | 155 | | var size = new ImageDimensions(VideoStream.Width.Value, VideoStream.Height.Value); |
| | 156 | |
|
| 0 | 157 | | var newSize = DrawingUtils.Resize( |
| 0 | 158 | | size, |
| 0 | 159 | | BaseRequest.Width ?? 0, |
| 0 | 160 | | BaseRequest.Height ?? 0, |
| 0 | 161 | | BaseRequest.MaxWidth ?? 0, |
| 0 | 162 | | BaseRequest.MaxHeight ?? 0); |
| | 163 | |
|
| 0 | 164 | | return newSize.Width; |
| | 165 | | } |
| | 166 | |
|
| 0 | 167 | | if (!IsVideoRequest) |
| | 168 | | { |
| 0 | 169 | | return null; |
| | 170 | | } |
| | 171 | |
|
| 0 | 172 | | return BaseRequest.MaxWidth ?? BaseRequest.Width; |
| | 173 | | } |
| | 174 | | } |
| | 175 | |
|
| | 176 | | public int? OutputHeight |
| | 177 | | { |
| | 178 | | get |
| | 179 | | { |
| 0 | 180 | | if (VideoStream is not null && VideoStream.Width.HasValue && VideoStream.Height.HasValue) |
| | 181 | | { |
| 0 | 182 | | var size = new ImageDimensions(VideoStream.Width.Value, VideoStream.Height.Value); |
| | 183 | |
|
| 0 | 184 | | var newSize = DrawingUtils.Resize( |
| 0 | 185 | | size, |
| 0 | 186 | | BaseRequest.Width ?? 0, |
| 0 | 187 | | BaseRequest.Height ?? 0, |
| 0 | 188 | | BaseRequest.MaxWidth ?? 0, |
| 0 | 189 | | BaseRequest.MaxHeight ?? 0); |
| | 190 | |
|
| 0 | 191 | | return newSize.Height; |
| | 192 | | } |
| | 193 | |
|
| 0 | 194 | | if (!IsVideoRequest) |
| | 195 | | { |
| 0 | 196 | | return null; |
| | 197 | | } |
| | 198 | |
|
| 0 | 199 | | return BaseRequest.MaxHeight ?? BaseRequest.Height; |
| | 200 | | } |
| | 201 | | } |
| | 202 | |
|
| | 203 | | public int? OutputAudioSampleRate |
| | 204 | | { |
| | 205 | | get |
| | 206 | | { |
| 0 | 207 | | if (BaseRequest.Static |
| 0 | 208 | | || EncodingHelper.IsCopyCodec(OutputAudioCodec)) |
| | 209 | | { |
| 0 | 210 | | if (AudioStream is not null) |
| | 211 | | { |
| 0 | 212 | | return AudioStream.SampleRate; |
| | 213 | | } |
| | 214 | | } |
| 0 | 215 | | else if (BaseRequest.AudioSampleRate.HasValue) |
| | 216 | | { |
| | 217 | | // Don't exceed what the encoder supports |
| | 218 | | // Seeing issues of attempting to encode to 88200 |
| 0 | 219 | | return BaseRequest.AudioSampleRate.Value; |
| | 220 | | } |
| | 221 | |
|
| 0 | 222 | | return null; |
| | 223 | | } |
| | 224 | | } |
| | 225 | |
|
| | 226 | | public int? OutputAudioBitDepth |
| | 227 | | { |
| | 228 | | get |
| | 229 | | { |
| 0 | 230 | | if (BaseRequest.Static |
| 0 | 231 | | || EncodingHelper.IsCopyCodec(OutputAudioCodec)) |
| | 232 | | { |
| 0 | 233 | | if (AudioStream is not null) |
| | 234 | | { |
| 0 | 235 | | return AudioStream.BitDepth; |
| | 236 | | } |
| | 237 | | } |
| | 238 | |
|
| 0 | 239 | | return null; |
| | 240 | | } |
| | 241 | | } |
| | 242 | |
|
| | 243 | | /// <summary> |
| | 244 | | /// Gets the target video level. |
| | 245 | | /// </summary> |
| | 246 | | public double? TargetVideoLevel |
| | 247 | | { |
| | 248 | | get |
| | 249 | | { |
| 0 | 250 | | if (BaseRequest.Static || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 251 | | { |
| 0 | 252 | | return VideoStream?.Level; |
| | 253 | | } |
| | 254 | |
|
| 0 | 255 | | var level = GetRequestedLevel(ActualOutputVideoCodec); |
| 0 | 256 | | if (double.TryParse(level, CultureInfo.InvariantCulture, out var result)) |
| | 257 | | { |
| 0 | 258 | | return result; |
| | 259 | | } |
| | 260 | |
|
| 0 | 261 | | return null; |
| | 262 | | } |
| | 263 | | } |
| | 264 | |
|
| | 265 | | /// <summary> |
| | 266 | | /// Gets the target video bit depth. |
| | 267 | | /// </summary> |
| | 268 | | public int? TargetVideoBitDepth |
| | 269 | | { |
| | 270 | | get |
| | 271 | | { |
| 0 | 272 | | if (BaseRequest.Static |
| 0 | 273 | | || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 274 | | { |
| 0 | 275 | | return VideoStream?.BitDepth; |
| | 276 | | } |
| | 277 | |
|
| 0 | 278 | | return null; |
| | 279 | | } |
| | 280 | | } |
| | 281 | |
|
| | 282 | | /// <summary> |
| | 283 | | /// Gets the target reference frames. |
| | 284 | | /// </summary> |
| | 285 | | /// <value>The target reference frames.</value> |
| | 286 | | public int? TargetRefFrames |
| | 287 | | { |
| | 288 | | get |
| | 289 | | { |
| 0 | 290 | | if (BaseRequest.Static |
| 0 | 291 | | || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 292 | | { |
| 0 | 293 | | return VideoStream?.RefFrames; |
| | 294 | | } |
| | 295 | |
|
| 0 | 296 | | return null; |
| | 297 | | } |
| | 298 | | } |
| | 299 | |
|
| | 300 | | /// <summary> |
| | 301 | | /// Gets the target framerate. |
| | 302 | | /// </summary> |
| | 303 | | public float? TargetFramerate |
| | 304 | | { |
| | 305 | | get |
| | 306 | | { |
| 0 | 307 | | if (BaseRequest.Static |
| 0 | 308 | | || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 309 | | { |
| 0 | 310 | | return VideoStream?.ReferenceFrameRate; |
| | 311 | | } |
| | 312 | |
|
| 0 | 313 | | return BaseRequest.MaxFramerate ?? BaseRequest.Framerate; |
| | 314 | | } |
| | 315 | | } |
| | 316 | |
|
| | 317 | | public TransportStreamTimestamp TargetTimestamp |
| | 318 | | { |
| | 319 | | get |
| | 320 | | { |
| 0 | 321 | | if (BaseRequest.Static) |
| | 322 | | { |
| 0 | 323 | | return InputTimestamp; |
| | 324 | | } |
| | 325 | |
|
| 0 | 326 | | return string.Equals(OutputContainer, "m2ts", StringComparison.OrdinalIgnoreCase) ? |
| 0 | 327 | | TransportStreamTimestamp.Valid : |
| 0 | 328 | | TransportStreamTimestamp.None; |
| | 329 | | } |
| | 330 | | } |
| | 331 | |
|
| | 332 | | /// <summary> |
| | 333 | | /// Gets the target packet length. |
| | 334 | | /// </summary> |
| | 335 | | public int? TargetPacketLength |
| | 336 | | { |
| | 337 | | get |
| | 338 | | { |
| 0 | 339 | | if (BaseRequest.Static || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 340 | | { |
| 0 | 341 | | return VideoStream?.PacketLength; |
| | 342 | | } |
| | 343 | |
|
| 0 | 344 | | return null; |
| | 345 | | } |
| | 346 | | } |
| | 347 | |
|
| | 348 | | /// <summary> |
| | 349 | | /// Gets the target video profile. |
| | 350 | | /// </summary> |
| | 351 | | public string TargetVideoProfile |
| | 352 | | { |
| | 353 | | get |
| | 354 | | { |
| 0 | 355 | | if (BaseRequest.Static || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 356 | | { |
| 0 | 357 | | return VideoStream?.Profile; |
| | 358 | | } |
| | 359 | |
|
| 0 | 360 | | var requestedProfile = GetRequestedProfiles(ActualOutputVideoCodec).FirstOrDefault(); |
| 0 | 361 | | if (!string.IsNullOrEmpty(requestedProfile)) |
| | 362 | | { |
| 0 | 363 | | return requestedProfile; |
| | 364 | | } |
| | 365 | |
|
| 0 | 366 | | return null; |
| | 367 | | } |
| | 368 | | } |
| | 369 | |
|
| | 370 | | /// <summary> |
| | 371 | | /// Gets the target video range type. |
| | 372 | | /// </summary> |
| | 373 | | public VideoRangeType TargetVideoRangeType |
| | 374 | | { |
| | 375 | | get |
| | 376 | | { |
| 0 | 377 | | if (BaseRequest.Static || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 378 | | { |
| 0 | 379 | | return VideoStream?.VideoRangeType ?? VideoRangeType.Unknown; |
| | 380 | | } |
| | 381 | |
|
| 0 | 382 | | if (Enum.TryParse(GetRequestedRangeTypes(ActualOutputVideoCodec).FirstOrDefault() ?? "Unknown", true, ou |
| | 383 | | { |
| 0 | 384 | | return requestedRangeType; |
| | 385 | | } |
| | 386 | |
|
| 0 | 387 | | return VideoRangeType.Unknown; |
| | 388 | | } |
| | 389 | | } |
| | 390 | |
|
| | 391 | | public string TargetVideoCodecTag |
| | 392 | | { |
| | 393 | | get |
| | 394 | | { |
| 0 | 395 | | if (BaseRequest.Static |
| 0 | 396 | | || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 397 | | { |
| 0 | 398 | | return VideoStream?.CodecTag; |
| | 399 | | } |
| | 400 | |
|
| 0 | 401 | | return null; |
| | 402 | | } |
| | 403 | | } |
| | 404 | |
|
| | 405 | | public bool? IsTargetAnamorphic |
| | 406 | | { |
| | 407 | | get |
| | 408 | | { |
| 0 | 409 | | if (BaseRequest.Static |
| 0 | 410 | | || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 411 | | { |
| 0 | 412 | | return VideoStream?.IsAnamorphic; |
| | 413 | | } |
| | 414 | |
|
| 0 | 415 | | return false; |
| | 416 | | } |
| | 417 | | } |
| | 418 | |
|
| | 419 | | public string ActualOutputVideoCodec |
| | 420 | | { |
| | 421 | | get |
| | 422 | | { |
| 0 | 423 | | if (VideoStream is null) |
| | 424 | | { |
| 0 | 425 | | return null; |
| | 426 | | } |
| | 427 | |
|
| 0 | 428 | | if (EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 429 | | { |
| 0 | 430 | | return VideoStream.Codec; |
| | 431 | | } |
| | 432 | |
|
| 0 | 433 | | return OutputVideoCodec; |
| | 434 | | } |
| | 435 | | } |
| | 436 | |
|
| | 437 | | public string ActualOutputAudioCodec |
| | 438 | | { |
| | 439 | | get |
| | 440 | | { |
| 0 | 441 | | if (AudioStream is null) |
| | 442 | | { |
| 0 | 443 | | return null; |
| | 444 | | } |
| | 445 | |
|
| 0 | 446 | | if (EncodingHelper.IsCopyCodec(OutputAudioCodec)) |
| | 447 | | { |
| 0 | 448 | | return AudioStream.Codec; |
| | 449 | | } |
| | 450 | |
|
| 0 | 451 | | return OutputAudioCodec; |
| | 452 | | } |
| | 453 | | } |
| | 454 | |
|
| | 455 | | public bool? IsTargetInterlaced |
| | 456 | | { |
| | 457 | | get |
| | 458 | | { |
| 0 | 459 | | if (BaseRequest.Static |
| 0 | 460 | | || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 461 | | { |
| 0 | 462 | | return VideoStream?.IsInterlaced; |
| | 463 | | } |
| | 464 | |
|
| 0 | 465 | | if (DeInterlace(ActualOutputVideoCodec, true)) |
| | 466 | | { |
| 0 | 467 | | return false; |
| | 468 | | } |
| | 469 | |
|
| 0 | 470 | | return VideoStream?.IsInterlaced; |
| | 471 | | } |
| | 472 | | } |
| | 473 | |
|
| | 474 | | public bool? IsTargetAVC |
| | 475 | | { |
| | 476 | | get |
| | 477 | | { |
| 0 | 478 | | if (BaseRequest.Static || EncodingHelper.IsCopyCodec(OutputVideoCodec)) |
| | 479 | | { |
| 0 | 480 | | return VideoStream?.IsAVC; |
| | 481 | | } |
| | 482 | |
|
| 0 | 483 | | return false; |
| | 484 | | } |
| | 485 | | } |
| | 486 | |
|
| | 487 | | public int? TargetVideoStreamCount |
| | 488 | | { |
| | 489 | | get |
| | 490 | | { |
| 0 | 491 | | if (BaseRequest.Static) |
| | 492 | | { |
| 0 | 493 | | return GetMediaStreamCount(MediaStreamType.Video, int.MaxValue); |
| | 494 | | } |
| | 495 | |
|
| 0 | 496 | | return GetMediaStreamCount(MediaStreamType.Video, 1); |
| | 497 | | } |
| | 498 | | } |
| | 499 | |
|
| | 500 | | public int? TargetAudioStreamCount |
| | 501 | | { |
| | 502 | | get |
| | 503 | | { |
| 0 | 504 | | if (BaseRequest.Static) |
| | 505 | | { |
| 0 | 506 | | return GetMediaStreamCount(MediaStreamType.Audio, int.MaxValue); |
| | 507 | | } |
| | 508 | |
|
| 0 | 509 | | return GetMediaStreamCount(MediaStreamType.Audio, 1); |
| | 510 | | } |
| | 511 | | } |
| | 512 | |
|
| 0 | 513 | | public bool EnableAudioVbrEncoding => BaseRequest.EnableAudioVbrEncoding; |
| | 514 | |
|
| 0 | 515 | | public int HlsListSize => 0; |
| | 516 | |
|
| | 517 | | public bool EnableBreakOnNonKeyFrames(string videoCodec) |
| | 518 | | { |
| 0 | 519 | | if (TranscodingType != TranscodingJobType.Progressive) |
| | 520 | | { |
| 0 | 521 | | if (IsSegmentedLiveStream) |
| | 522 | | { |
| 0 | 523 | | return false; |
| | 524 | | } |
| | 525 | |
|
| 0 | 526 | | return BaseRequest.BreakOnNonKeyFrames && EncodingHelper.IsCopyCodec(videoCodec); |
| | 527 | | } |
| | 528 | |
|
| 0 | 529 | | return false; |
| | 530 | | } |
| | 531 | |
|
| | 532 | | private int? GetMediaStreamCount(MediaStreamType type, int limit) |
| | 533 | | { |
| 0 | 534 | | var count = MediaSource.GetStreamCount(type); |
| | 535 | |
|
| 0 | 536 | | if (count.HasValue) |
| | 537 | | { |
| 0 | 538 | | count = Math.Min(count.Value, limit); |
| | 539 | | } |
| | 540 | |
|
| 0 | 541 | | return count; |
| | 542 | | } |
| | 543 | |
|
| | 544 | | public string GetMimeType(string outputPath, bool enableStreamDefault = true) |
| | 545 | | { |
| 0 | 546 | | if (!string.IsNullOrEmpty(MimeType)) |
| | 547 | | { |
| 0 | 548 | | return MimeType; |
| | 549 | | } |
| | 550 | |
|
| 0 | 551 | | if (enableStreamDefault) |
| | 552 | | { |
| 0 | 553 | | return MimeTypes.GetMimeType(outputPath); |
| | 554 | | } |
| | 555 | |
|
| 0 | 556 | | return MimeTypes.GetMimeType(outputPath, null); |
| | 557 | | } |
| | 558 | |
|
| | 559 | | public bool DeInterlace(string videoCodec, bool forceDeinterlaceIfSourceIsInterlaced) |
| | 560 | | { |
| 0 | 561 | | var videoStream = VideoStream; |
| 0 | 562 | | var isInputInterlaced = videoStream is not null && videoStream.IsInterlaced; |
| | 563 | |
|
| 0 | 564 | | if (!isInputInterlaced) |
| | 565 | | { |
| 0 | 566 | | return false; |
| | 567 | | } |
| | 568 | |
|
| | 569 | | // Support general param |
| 0 | 570 | | if (BaseRequest.DeInterlace) |
| | 571 | | { |
| 0 | 572 | | return true; |
| | 573 | | } |
| | 574 | |
|
| 0 | 575 | | if (!string.IsNullOrEmpty(videoCodec)) |
| | 576 | | { |
| 0 | 577 | | if (string.Equals(BaseRequest.GetOption(videoCodec, "deinterlace"), "true", StringComparison.OrdinalIgno |
| | 578 | | { |
| 0 | 579 | | return true; |
| | 580 | | } |
| | 581 | | } |
| | 582 | |
|
| 0 | 583 | | return forceDeinterlaceIfSourceIsInterlaced; |
| | 584 | | } |
| | 585 | |
|
| | 586 | | public string[] GetRequestedProfiles(string codec) |
| | 587 | | { |
| 0 | 588 | | if (!string.IsNullOrEmpty(BaseRequest.Profile)) |
| | 589 | | { |
| 0 | 590 | | return BaseRequest.Profile.Split(_separators, StringSplitOptions.RemoveEmptyEntries); |
| | 591 | | } |
| | 592 | |
|
| 0 | 593 | | if (!string.IsNullOrEmpty(codec)) |
| | 594 | | { |
| 0 | 595 | | var profile = BaseRequest.GetOption(codec, "profile"); |
| | 596 | |
|
| 0 | 597 | | if (!string.IsNullOrEmpty(profile)) |
| | 598 | | { |
| 0 | 599 | | return profile.Split(_separators, StringSplitOptions.RemoveEmptyEntries); |
| | 600 | | } |
| | 601 | | } |
| | 602 | |
|
| 0 | 603 | | return Array.Empty<string>(); |
| | 604 | | } |
| | 605 | |
|
| | 606 | | public string[] GetRequestedRangeTypes(string codec) |
| | 607 | | { |
| 0 | 608 | | if (!string.IsNullOrEmpty(BaseRequest.VideoRangeType)) |
| | 609 | | { |
| 0 | 610 | | return BaseRequest.VideoRangeType.Split(_separators, StringSplitOptions.RemoveEmptyEntries); |
| | 611 | | } |
| | 612 | |
|
| 0 | 613 | | if (!string.IsNullOrEmpty(codec)) |
| | 614 | | { |
| 0 | 615 | | var rangetype = BaseRequest.GetOption(codec, "rangetype"); |
| | 616 | |
|
| 0 | 617 | | if (!string.IsNullOrEmpty(rangetype)) |
| | 618 | | { |
| 0 | 619 | | return rangetype.Split(_separators, StringSplitOptions.RemoveEmptyEntries); |
| | 620 | | } |
| | 621 | | } |
| | 622 | |
|
| 0 | 623 | | return Array.Empty<string>(); |
| | 624 | | } |
| | 625 | |
|
| | 626 | | public string[] GetRequestedCodecTags(string codec) |
| | 627 | | { |
| 0 | 628 | | if (!string.IsNullOrEmpty(BaseRequest.CodecTag)) |
| | 629 | | { |
| 0 | 630 | | return BaseRequest.CodecTag.Split(_separators, StringSplitOptions.RemoveEmptyEntries); |
| | 631 | | } |
| | 632 | |
|
| 0 | 633 | | if (!string.IsNullOrEmpty(codec)) |
| | 634 | | { |
| 0 | 635 | | var codectag = BaseRequest.GetOption(codec, "codectag"); |
| | 636 | |
|
| 0 | 637 | | if (!string.IsNullOrEmpty(codectag)) |
| | 638 | | { |
| 0 | 639 | | return codectag.Split(_separators, StringSplitOptions.RemoveEmptyEntries); |
| | 640 | | } |
| | 641 | | } |
| | 642 | |
|
| 0 | 643 | | return Array.Empty<string>(); |
| | 644 | | } |
| | 645 | |
|
| | 646 | | public string GetRequestedLevel(string codec) |
| | 647 | | { |
| 0 | 648 | | if (!string.IsNullOrEmpty(BaseRequest.Level)) |
| | 649 | | { |
| 0 | 650 | | return BaseRequest.Level; |
| | 651 | | } |
| | 652 | |
|
| 0 | 653 | | if (!string.IsNullOrEmpty(codec)) |
| | 654 | | { |
| 0 | 655 | | return BaseRequest.GetOption(codec, "level"); |
| | 656 | | } |
| | 657 | |
|
| 0 | 658 | | return null; |
| | 659 | | } |
| | 660 | |
|
| | 661 | | public int? GetRequestedMaxRefFrames(string codec) |
| | 662 | | { |
| 0 | 663 | | if (BaseRequest.MaxRefFrames.HasValue) |
| | 664 | | { |
| 0 | 665 | | return BaseRequest.MaxRefFrames; |
| | 666 | | } |
| | 667 | |
|
| 0 | 668 | | if (!string.IsNullOrEmpty(codec)) |
| | 669 | | { |
| 0 | 670 | | var value = BaseRequest.GetOption(codec, "maxrefframes"); |
| 0 | 671 | | if (int.TryParse(value, CultureInfo.InvariantCulture, out var result)) |
| | 672 | | { |
| 0 | 673 | | return result; |
| | 674 | | } |
| | 675 | | } |
| | 676 | |
|
| 0 | 677 | | return null; |
| | 678 | | } |
| | 679 | |
|
| | 680 | | public int? GetRequestedVideoBitDepth(string codec) |
| | 681 | | { |
| 0 | 682 | | if (BaseRequest.MaxVideoBitDepth.HasValue) |
| | 683 | | { |
| 0 | 684 | | return BaseRequest.MaxVideoBitDepth; |
| | 685 | | } |
| | 686 | |
|
| 0 | 687 | | if (!string.IsNullOrEmpty(codec)) |
| | 688 | | { |
| 0 | 689 | | var value = BaseRequest.GetOption(codec, "videobitdepth"); |
| 0 | 690 | | if (int.TryParse(value, CultureInfo.InvariantCulture, out var result)) |
| | 691 | | { |
| 0 | 692 | | return result; |
| | 693 | | } |
| | 694 | | } |
| | 695 | |
|
| 0 | 696 | | return null; |
| | 697 | | } |
| | 698 | |
|
| | 699 | | public int? GetRequestedAudioBitDepth(string codec) |
| | 700 | | { |
| 0 | 701 | | if (BaseRequest.MaxAudioBitDepth.HasValue) |
| | 702 | | { |
| 0 | 703 | | return BaseRequest.MaxAudioBitDepth; |
| | 704 | | } |
| | 705 | |
|
| 0 | 706 | | if (!string.IsNullOrEmpty(codec)) |
| | 707 | | { |
| 0 | 708 | | var value = BaseRequest.GetOption(codec, "audiobitdepth"); |
| 0 | 709 | | if (int.TryParse(value, CultureInfo.InvariantCulture, out var result)) |
| | 710 | | { |
| 0 | 711 | | return result; |
| | 712 | | } |
| | 713 | | } |
| | 714 | |
|
| 0 | 715 | | return null; |
| | 716 | | } |
| | 717 | |
|
| | 718 | | public int? GetRequestedAudioChannels(string codec) |
| | 719 | | { |
| 0 | 720 | | if (!string.IsNullOrEmpty(codec)) |
| | 721 | | { |
| 0 | 722 | | var value = BaseRequest.GetOption(codec, "audiochannels"); |
| 0 | 723 | | if (int.TryParse(value, CultureInfo.InvariantCulture, out var result)) |
| | 724 | | { |
| 0 | 725 | | return result; |
| | 726 | | } |
| | 727 | | } |
| | 728 | |
|
| 0 | 729 | | if (BaseRequest.MaxAudioChannels.HasValue) |
| | 730 | | { |
| 0 | 731 | | return BaseRequest.MaxAudioChannels; |
| | 732 | | } |
| | 733 | |
|
| 0 | 734 | | if (BaseRequest.AudioChannels.HasValue) |
| | 735 | | { |
| 0 | 736 | | return BaseRequest.AudioChannels; |
| | 737 | | } |
| | 738 | |
|
| 0 | 739 | | if (BaseRequest.TranscodingMaxAudioChannels.HasValue) |
| | 740 | | { |
| 0 | 741 | | return BaseRequest.TranscodingMaxAudioChannels; |
| | 742 | | } |
| | 743 | |
|
| 0 | 744 | | return null; |
| | 745 | | } |
| | 746 | |
|
| | 747 | | public virtual void ReportTranscodingProgress(TimeSpan? transcodingPosition, float? framerate, double? percentCo |
| | 748 | | { |
| 0 | 749 | | Progress.Report(percentComplete.Value); |
| 0 | 750 | | } |
| | 751 | | } |
| | 752 | | } |