EncodingJobInfo.cs 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Globalization;
  4. using System.Linq;
  5. using MediaBrowser.Controller.Entities;
  6. using MediaBrowser.Model.Dlna;
  7. using MediaBrowser.Model.Dto;
  8. using MediaBrowser.Model.Entities;
  9. using MediaBrowser.Model.IO;
  10. using MediaBrowser.Model.Logging;
  11. using MediaBrowser.Model.MediaInfo;
  12. using MediaBrowser.Model.Drawing;
  13. using MediaBrowser.Model.Session;
  14. namespace MediaBrowser.Controller.MediaEncoding
  15. {
  16. // For now, a common base class until the API and MediaEncoding classes are unified
  17. public abstract class EncodingJobInfo
  18. {
  19. private readonly ILogger _logger;
  20. public MediaStream VideoStream { get; set; }
  21. public VideoType VideoType { get; set; }
  22. public Dictionary<string, string> RemoteHttpHeaders { get; set; }
  23. public string OutputVideoCodec { get; set; }
  24. public MediaProtocol InputProtocol { get; set; }
  25. public string MediaPath { get; set; }
  26. public bool IsInputVideo { get; set; }
  27. public IIsoMount IsoMount { get; set; }
  28. public string[] PlayableStreamFileNames { get; set; }
  29. public string OutputAudioCodec { get; set; }
  30. public int? OutputVideoBitrate { get; set; }
  31. public MediaStream SubtitleStream { get; set; }
  32. public SubtitleDeliveryMethod SubtitleDeliveryMethod { get; set; }
  33. public List<string> SupportedSubtitleCodecs { get; set; }
  34. public int InternalSubtitleStreamOffset { get; set; }
  35. public MediaSourceInfo MediaSource { get; set; }
  36. public User User { get; set; }
  37. public long? RunTimeTicks { get; set; }
  38. public bool ReadInputAtNativeFramerate { get; set; }
  39. private TranscodeReason[] _transcodeReasons = null;
  40. public TranscodeReason[] TranscodeReasons
  41. {
  42. get
  43. {
  44. if (_transcodeReasons == null)
  45. {
  46. _transcodeReasons = (BaseRequest.TranscodeReasons ?? string.Empty)
  47. .Split(',')
  48. .Where(i => !string.IsNullOrWhiteSpace(i))
  49. .Select(v => (TranscodeReason)Enum.Parse(typeof(TranscodeReason), v, true))
  50. .ToArray();
  51. }
  52. return _transcodeReasons;
  53. }
  54. }
  55. public bool IgnoreInputDts
  56. {
  57. get
  58. {
  59. return MediaSource.IgnoreDts;
  60. }
  61. }
  62. public bool IgnoreInputIndex
  63. {
  64. get
  65. {
  66. return MediaSource.IgnoreIndex;
  67. }
  68. }
  69. public bool GenPtsInput
  70. {
  71. get
  72. {
  73. return MediaSource.GenPtsInput;
  74. }
  75. }
  76. public bool DiscardCorruptFramesInput
  77. {
  78. get
  79. {
  80. return false;
  81. }
  82. }
  83. public bool EnableFastSeekInput
  84. {
  85. get
  86. {
  87. return false;
  88. }
  89. }
  90. public bool GenPtsOutput
  91. {
  92. get
  93. {
  94. return false;
  95. }
  96. }
  97. public string OutputContainer { get; set; }
  98. public string OutputVideoSync
  99. {
  100. get
  101. {
  102. // For live tv + in progress recordings
  103. if (string.Equals(InputContainer, "mpegts", StringComparison.OrdinalIgnoreCase) || string.Equals(InputContainer, "ts", StringComparison.OrdinalIgnoreCase))
  104. {
  105. if (!MediaSource.RunTimeTicks.HasValue)
  106. {
  107. return "cfr";
  108. }
  109. }
  110. return "-1";
  111. }
  112. }
  113. public bool EnableMpDecimate
  114. {
  115. get { return MediaSource.EnableMpDecimate; }
  116. }
  117. public string AlbumCoverPath { get; set; }
  118. public string InputAudioSync { get; set; }
  119. public string InputVideoSync { get; set; }
  120. public TransportStreamTimestamp InputTimestamp { get; set; }
  121. public MediaStream AudioStream { get; set; }
  122. public List<string> SupportedAudioCodecs { get; set; }
  123. public List<string> SupportedVideoCodecs { get; set; }
  124. public string InputContainer { get; set; }
  125. public IsoType? IsoType { get; set; }
  126. public bool EnableMpegtsM2TsMode { get; set; }
  127. public BaseEncodingJobOptions BaseRequest { get; set; }
  128. public long? StartTimeTicks
  129. {
  130. get { return BaseRequest.StartTimeTicks; }
  131. }
  132. public bool CopyTimestamps
  133. {
  134. get { return BaseRequest.CopyTimestamps; }
  135. }
  136. public int? OutputAudioBitrate;
  137. public int? OutputAudioChannels;
  138. public bool DeInterlace(string videoCodec)
  139. {
  140. // Support general param
  141. if (BaseRequest.DeInterlace)
  142. {
  143. return true;
  144. }
  145. if (!string.IsNullOrWhiteSpace(videoCodec))
  146. {
  147. if (string.Equals(BaseRequest.GetOption(videoCodec, "deinterlace"), "true", StringComparison.OrdinalIgnoreCase))
  148. {
  149. return true;
  150. }
  151. }
  152. return false;
  153. }
  154. public bool IsVideoRequest { get; set; }
  155. public TranscodingJobType TranscodingType { get; set; }
  156. public EncodingJobInfo(ILogger logger, TranscodingJobType jobType)
  157. {
  158. _logger = logger;
  159. TranscodingType = jobType;
  160. RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
  161. PlayableStreamFileNames = new string[]{};
  162. SupportedAudioCodecs = new List<string>();
  163. SupportedVideoCodecs = new List<string>();
  164. SupportedSubtitleCodecs = new List<string>();
  165. }
  166. public bool IsSegmentedLiveStream
  167. {
  168. get
  169. {
  170. return TranscodingType != TranscodingJobType.Progressive && !RunTimeTicks.HasValue;
  171. }
  172. }
  173. public bool EnableBreakOnNonKeyFrames(string videoCodec)
  174. {
  175. if (TranscodingType != TranscodingJobType.Progressive)
  176. {
  177. if (IsSegmentedLiveStream)
  178. {
  179. return false;
  180. }
  181. return BaseRequest.BreakOnNonKeyFrames && string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase);
  182. }
  183. return false;
  184. }
  185. public int? TotalOutputBitrate
  186. {
  187. get
  188. {
  189. return (OutputAudioBitrate ?? 0) + (OutputVideoBitrate ?? 0);
  190. }
  191. }
  192. public int? OutputWidth
  193. {
  194. get
  195. {
  196. if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
  197. {
  198. var size = new ImageSize
  199. {
  200. Width = VideoStream.Width.Value,
  201. Height = VideoStream.Height.Value
  202. };
  203. var newSize = DrawingUtils.Resize(size,
  204. BaseRequest.Width,
  205. BaseRequest.Height,
  206. BaseRequest.MaxWidth,
  207. BaseRequest.MaxHeight);
  208. return Convert.ToInt32(newSize.Width);
  209. }
  210. if (!IsVideoRequest)
  211. {
  212. return null;
  213. }
  214. return BaseRequest.MaxWidth ?? BaseRequest.Width;
  215. }
  216. }
  217. public int? OutputHeight
  218. {
  219. get
  220. {
  221. if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
  222. {
  223. var size = new ImageSize
  224. {
  225. Width = VideoStream.Width.Value,
  226. Height = VideoStream.Height.Value
  227. };
  228. var newSize = DrawingUtils.Resize(size,
  229. BaseRequest.Width,
  230. BaseRequest.Height,
  231. BaseRequest.MaxWidth,
  232. BaseRequest.MaxHeight);
  233. return Convert.ToInt32(newSize.Height);
  234. }
  235. if (!IsVideoRequest)
  236. {
  237. return null;
  238. }
  239. return BaseRequest.MaxHeight ?? BaseRequest.Height;
  240. }
  241. }
  242. public int? OutputAudioSampleRate
  243. {
  244. get
  245. {
  246. if (BaseRequest.Static || string.Equals(OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase))
  247. {
  248. if (AudioStream != null)
  249. {
  250. return AudioStream.SampleRate;
  251. }
  252. }
  253. else if (BaseRequest.AudioSampleRate.HasValue)
  254. {
  255. // Don't exceed what the encoder supports
  256. // Seeing issues of attempting to encode to 88200
  257. return Math.Min(44100, BaseRequest.AudioSampleRate.Value);
  258. }
  259. return null;
  260. }
  261. }
  262. public int? OutputAudioBitDepth
  263. {
  264. get
  265. {
  266. if (BaseRequest.Static || string.Equals(OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase))
  267. {
  268. if (AudioStream != null)
  269. {
  270. return AudioStream.BitDepth;
  271. }
  272. }
  273. //else if (BaseRequest.AudioSampleRate.HasValue)
  274. //{
  275. // // Don't exceed what the encoder supports
  276. // // Seeing issues of attempting to encode to 88200
  277. // return Math.Min(44100, BaseRequest.AudioSampleRate.Value);
  278. //}
  279. return null;
  280. }
  281. }
  282. /// <summary>
  283. /// Predicts the audio sample rate that will be in the output stream
  284. /// </summary>
  285. public double? TargetVideoLevel
  286. {
  287. get
  288. {
  289. var stream = VideoStream;
  290. var request = BaseRequest;
  291. return !string.IsNullOrEmpty(request.Level) && !request.Static
  292. ? double.Parse(request.Level, CultureInfo.InvariantCulture)
  293. : stream == null ? null : stream.Level;
  294. }
  295. }
  296. /// <summary>
  297. /// Predicts the audio sample rate that will be in the output stream
  298. /// </summary>
  299. public int? TargetVideoBitDepth
  300. {
  301. get
  302. {
  303. var stream = VideoStream;
  304. return stream == null || !BaseRequest.Static ? null : stream.BitDepth;
  305. }
  306. }
  307. /// <summary>
  308. /// Gets the target reference frames.
  309. /// </summary>
  310. /// <value>The target reference frames.</value>
  311. public int? TargetRefFrames
  312. {
  313. get
  314. {
  315. var stream = VideoStream;
  316. return stream == null || !BaseRequest.Static ? null : stream.RefFrames;
  317. }
  318. }
  319. /// <summary>
  320. /// Predicts the audio sample rate that will be in the output stream
  321. /// </summary>
  322. public float? TargetFramerate
  323. {
  324. get
  325. {
  326. var stream = VideoStream;
  327. var requestedFramerate = BaseRequest.MaxFramerate ?? BaseRequest.Framerate;
  328. return requestedFramerate.HasValue && !BaseRequest.Static
  329. ? requestedFramerate
  330. : stream == null ? null : stream.AverageFrameRate ?? stream.RealFrameRate;
  331. }
  332. }
  333. public TransportStreamTimestamp TargetTimestamp
  334. {
  335. get
  336. {
  337. var defaultValue = string.Equals(OutputContainer, "m2ts", StringComparison.OrdinalIgnoreCase) ?
  338. TransportStreamTimestamp.Valid :
  339. TransportStreamTimestamp.None;
  340. return !BaseRequest.Static
  341. ? defaultValue
  342. : InputTimestamp;
  343. }
  344. }
  345. /// <summary>
  346. /// Predicts the audio sample rate that will be in the output stream
  347. /// </summary>
  348. public int? TargetPacketLength
  349. {
  350. get
  351. {
  352. var stream = VideoStream;
  353. return !BaseRequest.Static
  354. ? null
  355. : stream == null ? null : stream.PacketLength;
  356. }
  357. }
  358. /// <summary>
  359. /// Predicts the audio sample rate that will be in the output stream
  360. /// </summary>
  361. public string TargetVideoProfile
  362. {
  363. get
  364. {
  365. var stream = VideoStream;
  366. return !string.IsNullOrEmpty(BaseRequest.Profile) && !BaseRequest.Static
  367. ? BaseRequest.Profile
  368. : stream == null ? null : stream.Profile;
  369. }
  370. }
  371. public string TargetVideoCodecTag
  372. {
  373. get
  374. {
  375. var stream = VideoStream;
  376. return !BaseRequest.Static
  377. ? null
  378. : stream == null ? null : stream.CodecTag;
  379. }
  380. }
  381. public bool? IsTargetAnamorphic
  382. {
  383. get
  384. {
  385. if (BaseRequest.Static)
  386. {
  387. return VideoStream == null ? null : VideoStream.IsAnamorphic;
  388. }
  389. return false;
  390. }
  391. }
  392. public string ActualOutputVideoCodec
  393. {
  394. get
  395. {
  396. var codec = OutputVideoCodec;
  397. if (string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase))
  398. {
  399. var stream = VideoStream;
  400. if (stream != null)
  401. {
  402. return stream.Codec;
  403. }
  404. return null;
  405. }
  406. return codec;
  407. }
  408. }
  409. public bool? IsTargetInterlaced
  410. {
  411. get
  412. {
  413. if (BaseRequest.Static)
  414. {
  415. return VideoStream == null ? (bool?)null : VideoStream.IsInterlaced;
  416. }
  417. if (DeInterlace(ActualOutputVideoCodec))
  418. {
  419. return false;
  420. }
  421. return VideoStream == null ? (bool?)null : VideoStream.IsInterlaced;
  422. }
  423. }
  424. public bool? IsTargetAVC
  425. {
  426. get
  427. {
  428. if (BaseRequest.Static)
  429. {
  430. return VideoStream == null ? null : VideoStream.IsAVC;
  431. }
  432. return false;
  433. }
  434. }
  435. public int? TargetVideoStreamCount
  436. {
  437. get
  438. {
  439. if (BaseRequest.Static)
  440. {
  441. return GetMediaStreamCount(MediaStreamType.Video, int.MaxValue);
  442. }
  443. return GetMediaStreamCount(MediaStreamType.Video, 1);
  444. }
  445. }
  446. public int? TargetAudioStreamCount
  447. {
  448. get
  449. {
  450. if (BaseRequest.Static)
  451. {
  452. return GetMediaStreamCount(MediaStreamType.Audio, int.MaxValue);
  453. }
  454. return GetMediaStreamCount(MediaStreamType.Audio, 1);
  455. }
  456. }
  457. private int? GetMediaStreamCount(MediaStreamType type, int limit)
  458. {
  459. var count = MediaSource.GetStreamCount(type);
  460. if (count.HasValue)
  461. {
  462. count = Math.Min(count.Value, limit);
  463. }
  464. return count;
  465. }
  466. protected void DisposeIsoMount()
  467. {
  468. if (IsoMount != null)
  469. {
  470. try
  471. {
  472. IsoMount.Dispose();
  473. }
  474. catch (Exception ex)
  475. {
  476. _logger.ErrorException("Error disposing iso mount", ex);
  477. }
  478. IsoMount = null;
  479. }
  480. }
  481. public abstract void ReportTranscodingProgress(TimeSpan? transcodingPosition, float? framerate, double? percentComplete, long? bytesTranscoded, int? bitRate);
  482. }
  483. /// <summary>
  484. /// Enum TranscodingJobType
  485. /// </summary>
  486. public enum TranscodingJobType
  487. {
  488. /// <summary>
  489. /// The progressive
  490. /// </summary>
  491. Progressive,
  492. /// <summary>
  493. /// The HLS
  494. /// </summary>
  495. Hls,
  496. /// <summary>
  497. /// The dash
  498. /// </summary>
  499. Dash
  500. }
  501. }