2
0

EncodingJobInfo.cs 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Globalization;
  4. using System.Linq;
  5. using MediaBrowser.Controller.Entities;
  6. using MediaBrowser.Model.Dlna;
  7. using MediaBrowser.Model.Dto;
  8. using MediaBrowser.Model.Entities;
  9. using MediaBrowser.Model.IO;
  10. using MediaBrowser.Model.Logging;
  11. using MediaBrowser.Model.MediaInfo;
  12. using MediaBrowser.Model.Drawing;
  13. using MediaBrowser.Model.Session;
  14. namespace MediaBrowser.Controller.MediaEncoding
  15. {
  16. // For now, a common base class until the API and MediaEncoding classes are unified
  17. public abstract class EncodingJobInfo
  18. {
  19. private readonly ILogger _logger;
  20. public MediaStream VideoStream { get; set; }
  21. public VideoType VideoType { get; set; }
  22. public Dictionary<string, string> RemoteHttpHeaders { get; set; }
  23. public string OutputVideoCodec { get; set; }
  24. public MediaProtocol InputProtocol { get; set; }
  25. public string MediaPath { get; set; }
  26. public bool IsInputVideo { get; set; }
  27. public IIsoMount IsoMount { get; set; }
  28. public string[] PlayableStreamFileNames { get; set; }
  29. public string OutputAudioCodec { get; set; }
  30. public int? OutputVideoBitrate { get; set; }
  31. public MediaStream SubtitleStream { get; set; }
  32. public SubtitleDeliveryMethod SubtitleDeliveryMethod { get; set; }
  33. public List<string> SupportedSubtitleCodecs { get; set; }
  34. public int InternalSubtitleStreamOffset { get; set; }
  35. public MediaSourceInfo MediaSource { get; set; }
  36. public User User { get; set; }
  37. public long? RunTimeTicks { get; set; }
  38. public bool ReadInputAtNativeFramerate { get; set; }
  39. private TranscodeReason[] _transcodeReasons = null;
  40. public TranscodeReason[] TranscodeReasons
  41. {
  42. get
  43. {
  44. if (_transcodeReasons == null)
  45. {
  46. _transcodeReasons = (BaseRequest.TranscodeReasons ?? string.Empty)
  47. .Split(',')
  48. .Where(i => !string.IsNullOrWhiteSpace(i))
  49. .Select(v => (TranscodeReason)Enum.Parse(typeof(TranscodeReason), v, true))
  50. .ToArray();
  51. }
  52. return _transcodeReasons;
  53. }
  54. }
  55. public bool IgnoreInputDts
  56. {
  57. get
  58. {
  59. return MediaSource.IgnoreDts;
  60. }
  61. }
  62. public bool IgnoreInputIndex
  63. {
  64. get
  65. {
  66. return MediaSource.IgnoreIndex;
  67. }
  68. }
  69. public bool GenPtsInput
  70. {
  71. get
  72. {
  73. return MediaSource.GenPtsInput;
  74. }
  75. }
  76. public bool DiscardCorruptFramesInput
  77. {
  78. get
  79. {
  80. return false;
  81. }
  82. }
  83. public bool EnableFastSeekInput
  84. {
  85. get
  86. {
  87. return false;
  88. }
  89. }
  90. public bool GenPtsOutput
  91. {
  92. get
  93. {
  94. return false;
  95. }
  96. }
  97. public string OutputContainer { get; set; }
  98. public string OutputVideoSync
  99. {
  100. get
  101. {
  102. // For live tv + in progress recordings
  103. if (string.Equals(InputContainer, "mpegts", StringComparison.OrdinalIgnoreCase) || string.Equals(InputContainer, "ts", StringComparison.OrdinalIgnoreCase))
  104. {
  105. if (!MediaSource.RunTimeTicks.HasValue)
  106. {
  107. return "cfr";
  108. }
  109. }
  110. return "-1";
  111. }
  112. }
  113. public bool EnableMpDecimate
  114. {
  115. get { return MediaSource.EnableMpDecimate; }
  116. }
  117. public string AlbumCoverPath { get; set; }
  118. public string InputAudioSync { get; set; }
  119. public string InputVideoSync { get; set; }
  120. public TransportStreamTimestamp InputTimestamp { get; set; }
  121. public MediaStream AudioStream { get; set; }
  122. public List<string> SupportedAudioCodecs { get; set; }
  123. public List<string> SupportedVideoCodecs { get; set; }
  124. public string InputContainer { get; set; }
  125. public IsoType? IsoType { get; set; }
  126. public bool EnableMpegtsM2TsMode { get; set; }
  127. public BaseEncodingJobOptions BaseRequest { get; set; }
  128. public long? StartTimeTicks
  129. {
  130. get { return BaseRequest.StartTimeTicks; }
  131. }
  132. public bool CopyTimestamps
  133. {
  134. get { return BaseRequest.CopyTimestamps; }
  135. }
  136. public int? OutputAudioBitrate;
  137. public int? OutputAudioChannels;
  138. public bool DeInterlace(string videoCodec, bool forceDeinterlaceIfSourceIsInterlaced)
  139. {
  140. var videoStream = VideoStream;
  141. var isInputInterlaced = videoStream != null && videoStream.IsInterlaced;
  142. if (!isInputInterlaced)
  143. {
  144. return false;
  145. }
  146. // Support general param
  147. if (BaseRequest.DeInterlace)
  148. {
  149. return true;
  150. }
  151. if (!string.IsNullOrWhiteSpace(videoCodec))
  152. {
  153. if (string.Equals(BaseRequest.GetOption(videoCodec, "deinterlace"), "true", StringComparison.OrdinalIgnoreCase))
  154. {
  155. return true;
  156. }
  157. }
  158. if (forceDeinterlaceIfSourceIsInterlaced)
  159. {
  160. if (isInputInterlaced)
  161. {
  162. return true;
  163. }
  164. }
  165. return false;
  166. }
  167. public string[] GetRequestedProfiles(string codec)
  168. {
  169. if (!string.IsNullOrWhiteSpace(BaseRequest.Profile))
  170. {
  171. return BaseRequest.Profile.Split(new[] { '|', ',' }, StringSplitOptions.RemoveEmptyEntries);
  172. }
  173. if (!string.IsNullOrWhiteSpace(codec))
  174. {
  175. var profile = BaseRequest.GetOption(codec, "profile");
  176. if (!string.IsNullOrWhiteSpace(profile))
  177. {
  178. return profile.Split(new[] { '|', ',' }, StringSplitOptions.RemoveEmptyEntries);
  179. }
  180. }
  181. return new string[] { };
  182. }
  183. public string GetRequestedLevel(string codec)
  184. {
  185. if (!string.IsNullOrWhiteSpace(BaseRequest.Level))
  186. {
  187. return BaseRequest.Level;
  188. }
  189. if (!string.IsNullOrWhiteSpace(codec))
  190. {
  191. return BaseRequest.GetOption(codec, "level");
  192. }
  193. return null;
  194. }
  195. public int? GetRequestedMaxRefFrames(string codec)
  196. {
  197. if (!string.IsNullOrWhiteSpace(BaseRequest.Level))
  198. {
  199. return BaseRequest.MaxRefFrames;
  200. }
  201. if (!string.IsNullOrWhiteSpace(codec))
  202. {
  203. var value = BaseRequest.GetOption(codec, "maxrefframes");
  204. int result;
  205. if (!string.IsNullOrWhiteSpace(value) && int.TryParse(value, NumberStyles.Any, CultureInfo.InvariantCulture, out result))
  206. {
  207. return result;
  208. }
  209. }
  210. return null;
  211. }
  212. public bool IsVideoRequest { get; set; }
  213. public TranscodingJobType TranscodingType { get; set; }
  214. public EncodingJobInfo(ILogger logger, TranscodingJobType jobType)
  215. {
  216. _logger = logger;
  217. TranscodingType = jobType;
  218. RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
  219. PlayableStreamFileNames = new string[] { };
  220. SupportedAudioCodecs = new List<string>();
  221. SupportedVideoCodecs = new List<string>();
  222. SupportedSubtitleCodecs = new List<string>();
  223. }
  224. public bool IsSegmentedLiveStream
  225. {
  226. get
  227. {
  228. return TranscodingType != TranscodingJobType.Progressive && !RunTimeTicks.HasValue;
  229. }
  230. }
  231. public bool EnableBreakOnNonKeyFrames(string videoCodec)
  232. {
  233. if (TranscodingType != TranscodingJobType.Progressive)
  234. {
  235. if (IsSegmentedLiveStream)
  236. {
  237. return false;
  238. }
  239. return BaseRequest.BreakOnNonKeyFrames && string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase);
  240. }
  241. return false;
  242. }
  243. public int? TotalOutputBitrate
  244. {
  245. get
  246. {
  247. return (OutputAudioBitrate ?? 0) + (OutputVideoBitrate ?? 0);
  248. }
  249. }
  250. public int? OutputWidth
  251. {
  252. get
  253. {
  254. if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
  255. {
  256. var size = new ImageSize
  257. {
  258. Width = VideoStream.Width.Value,
  259. Height = VideoStream.Height.Value
  260. };
  261. var newSize = DrawingUtils.Resize(size,
  262. BaseRequest.Width,
  263. BaseRequest.Height,
  264. BaseRequest.MaxWidth,
  265. BaseRequest.MaxHeight);
  266. return Convert.ToInt32(newSize.Width);
  267. }
  268. if (!IsVideoRequest)
  269. {
  270. return null;
  271. }
  272. return BaseRequest.MaxWidth ?? BaseRequest.Width;
  273. }
  274. }
  275. public int? OutputHeight
  276. {
  277. get
  278. {
  279. if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
  280. {
  281. var size = new ImageSize
  282. {
  283. Width = VideoStream.Width.Value,
  284. Height = VideoStream.Height.Value
  285. };
  286. var newSize = DrawingUtils.Resize(size,
  287. BaseRequest.Width,
  288. BaseRequest.Height,
  289. BaseRequest.MaxWidth,
  290. BaseRequest.MaxHeight);
  291. return Convert.ToInt32(newSize.Height);
  292. }
  293. if (!IsVideoRequest)
  294. {
  295. return null;
  296. }
  297. return BaseRequest.MaxHeight ?? BaseRequest.Height;
  298. }
  299. }
  300. public int? OutputAudioSampleRate
  301. {
  302. get
  303. {
  304. if (BaseRequest.Static || string.Equals(OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase))
  305. {
  306. if (AudioStream != null)
  307. {
  308. return AudioStream.SampleRate;
  309. }
  310. }
  311. else if (BaseRequest.AudioSampleRate.HasValue)
  312. {
  313. // Don't exceed what the encoder supports
  314. // Seeing issues of attempting to encode to 88200
  315. return Math.Min(44100, BaseRequest.AudioSampleRate.Value);
  316. }
  317. return null;
  318. }
  319. }
  320. public int? OutputAudioBitDepth
  321. {
  322. get
  323. {
  324. if (BaseRequest.Static || string.Equals(OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase))
  325. {
  326. if (AudioStream != null)
  327. {
  328. return AudioStream.BitDepth;
  329. }
  330. }
  331. //else if (BaseRequest.AudioSampleRate.HasValue)
  332. //{
  333. // // Don't exceed what the encoder supports
  334. // // Seeing issues of attempting to encode to 88200
  335. // return Math.Min(44100, BaseRequest.AudioSampleRate.Value);
  336. //}
  337. return null;
  338. }
  339. }
  340. /// <summary>
  341. /// Predicts the audio sample rate that will be in the output stream
  342. /// </summary>
  343. public double? TargetVideoLevel
  344. {
  345. get
  346. {
  347. if (BaseRequest.Static)
  348. {
  349. return VideoStream == null ? null : VideoStream.Level;
  350. }
  351. var level = GetRequestedLevel(ActualOutputVideoCodec);
  352. double result;
  353. if (!string.IsNullOrWhiteSpace(level) && double.TryParse(level, NumberStyles.Any, CultureInfo.InvariantCulture, out result))
  354. {
  355. return result;
  356. }
  357. return null;
  358. }
  359. }
  360. /// <summary>
  361. /// Predicts the audio sample rate that will be in the output stream
  362. /// </summary>
  363. public int? TargetVideoBitDepth
  364. {
  365. get
  366. {
  367. var stream = VideoStream;
  368. return stream == null || !BaseRequest.Static ? null : stream.BitDepth;
  369. }
  370. }
  371. /// <summary>
  372. /// Gets the target reference frames.
  373. /// </summary>
  374. /// <value>The target reference frames.</value>
  375. public int? TargetRefFrames
  376. {
  377. get
  378. {
  379. if (BaseRequest.Static)
  380. {
  381. return VideoStream == null ? null : VideoStream.RefFrames;
  382. }
  383. return null;
  384. }
  385. }
  386. /// <summary>
  387. /// Predicts the audio sample rate that will be in the output stream
  388. /// </summary>
  389. public float? TargetFramerate
  390. {
  391. get
  392. {
  393. var stream = VideoStream;
  394. var requestedFramerate = BaseRequest.MaxFramerate ?? BaseRequest.Framerate;
  395. return requestedFramerate.HasValue && !BaseRequest.Static
  396. ? requestedFramerate
  397. : stream == null ? null : stream.AverageFrameRate ?? stream.RealFrameRate;
  398. }
  399. }
  400. public TransportStreamTimestamp TargetTimestamp
  401. {
  402. get
  403. {
  404. var defaultValue = string.Equals(OutputContainer, "m2ts", StringComparison.OrdinalIgnoreCase) ?
  405. TransportStreamTimestamp.Valid :
  406. TransportStreamTimestamp.None;
  407. return !BaseRequest.Static
  408. ? defaultValue
  409. : InputTimestamp;
  410. }
  411. }
  412. /// <summary>
  413. /// Predicts the audio sample rate that will be in the output stream
  414. /// </summary>
  415. public int? TargetPacketLength
  416. {
  417. get
  418. {
  419. var stream = VideoStream;
  420. return !BaseRequest.Static
  421. ? null
  422. : stream == null ? null : stream.PacketLength;
  423. }
  424. }
  425. /// <summary>
  426. /// Predicts the audio sample rate that will be in the output stream
  427. /// </summary>
  428. public string TargetVideoProfile
  429. {
  430. get
  431. {
  432. if (BaseRequest.Static)
  433. {
  434. return VideoStream == null ? null : VideoStream.Profile;
  435. }
  436. var requestedProfile = GetRequestedProfiles(ActualOutputVideoCodec).FirstOrDefault();
  437. if (!string.IsNullOrWhiteSpace(requestedProfile))
  438. {
  439. return requestedProfile;
  440. }
  441. return null;
  442. }
  443. }
  444. public string TargetVideoCodecTag
  445. {
  446. get
  447. {
  448. var stream = VideoStream;
  449. return !BaseRequest.Static
  450. ? null
  451. : stream == null ? null : stream.CodecTag;
  452. }
  453. }
  454. public bool? IsTargetAnamorphic
  455. {
  456. get
  457. {
  458. if (BaseRequest.Static)
  459. {
  460. return VideoStream == null ? null : VideoStream.IsAnamorphic;
  461. }
  462. return false;
  463. }
  464. }
  465. public string ActualOutputVideoCodec
  466. {
  467. get
  468. {
  469. var codec = OutputVideoCodec;
  470. if (string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase))
  471. {
  472. var stream = VideoStream;
  473. if (stream != null)
  474. {
  475. return stream.Codec;
  476. }
  477. return null;
  478. }
  479. return codec;
  480. }
  481. }
  482. public bool? IsTargetInterlaced
  483. {
  484. get
  485. {
  486. if (BaseRequest.Static)
  487. {
  488. return VideoStream == null ? (bool?)null : VideoStream.IsInterlaced;
  489. }
  490. if (DeInterlace(ActualOutputVideoCodec, true))
  491. {
  492. return false;
  493. }
  494. return VideoStream == null ? (bool?)null : VideoStream.IsInterlaced;
  495. }
  496. }
  497. public bool? IsTargetAVC
  498. {
  499. get
  500. {
  501. if (BaseRequest.Static)
  502. {
  503. return VideoStream == null ? null : VideoStream.IsAVC;
  504. }
  505. return false;
  506. }
  507. }
  508. public int? TargetVideoStreamCount
  509. {
  510. get
  511. {
  512. if (BaseRequest.Static)
  513. {
  514. return GetMediaStreamCount(MediaStreamType.Video, int.MaxValue);
  515. }
  516. return GetMediaStreamCount(MediaStreamType.Video, 1);
  517. }
  518. }
  519. public int? TargetAudioStreamCount
  520. {
  521. get
  522. {
  523. if (BaseRequest.Static)
  524. {
  525. return GetMediaStreamCount(MediaStreamType.Audio, int.MaxValue);
  526. }
  527. return GetMediaStreamCount(MediaStreamType.Audio, 1);
  528. }
  529. }
  530. private int? GetMediaStreamCount(MediaStreamType type, int limit)
  531. {
  532. var count = MediaSource.GetStreamCount(type);
  533. if (count.HasValue)
  534. {
  535. count = Math.Min(count.Value, limit);
  536. }
  537. return count;
  538. }
  539. protected void DisposeIsoMount()
  540. {
  541. if (IsoMount != null)
  542. {
  543. try
  544. {
  545. IsoMount.Dispose();
  546. }
  547. catch (Exception ex)
  548. {
  549. _logger.ErrorException("Error disposing iso mount", ex);
  550. }
  551. IsoMount = null;
  552. }
  553. }
  554. public abstract void ReportTranscodingProgress(TimeSpan? transcodingPosition, float? framerate, double? percentComplete, long? bytesTranscoded, int? bitRate);
  555. }
  556. /// <summary>
  557. /// Enum TranscodingJobType
  558. /// </summary>
  559. public enum TranscodingJobType
  560. {
  561. /// <summary>
  562. /// The progressive
  563. /// </summary>
  564. Progressive,
  565. /// <summary>
  566. /// The HLS
  567. /// </summary>
  568. Hls,
  569. /// <summary>
  570. /// The dash
  571. /// </summary>
  572. Dash
  573. }
  574. }