EncodingJobInfo.cs 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Globalization;
  4. using System.Linq;
  5. using MediaBrowser.Controller.Entities;
  6. using MediaBrowser.Model.Dlna;
  7. using MediaBrowser.Model.Dto;
  8. using MediaBrowser.Model.Entities;
  9. using MediaBrowser.Model.IO;
  10. using MediaBrowser.Model.Logging;
  11. using MediaBrowser.Model.MediaInfo;
  12. using MediaBrowser.Model.Drawing;
  13. using MediaBrowser.Model.Session;
  14. namespace MediaBrowser.Controller.MediaEncoding
  15. {
  16. // For now, a common base class until the API and MediaEncoding classes are unified
  17. public abstract class EncodingJobInfo
  18. {
  19. private readonly ILogger _logger;
  20. public MediaStream VideoStream { get; set; }
  21. public VideoType VideoType { get; set; }
  22. public Dictionary<string, string> RemoteHttpHeaders { get; set; }
  23. public string OutputVideoCodec { get; set; }
  24. public MediaProtocol InputProtocol { get; set; }
  25. public string MediaPath { get; set; }
  26. public bool IsInputVideo { get; set; }
  27. public IIsoMount IsoMount { get; set; }
  28. public List<string> PlayableStreamFileNames { get; set; }
  29. public string OutputAudioCodec { get; set; }
  30. public int? OutputVideoBitrate { get; set; }
  31. public MediaStream SubtitleStream { get; set; }
  32. public SubtitleDeliveryMethod SubtitleDeliveryMethod { get; set; }
  33. public List<string> SupportedSubtitleCodecs { get; set; }
  34. public int InternalSubtitleStreamOffset { get; set; }
  35. public MediaSourceInfo MediaSource { get; set; }
  36. public User User { get; set; }
  37. public long? RunTimeTicks { get; set; }
  38. public bool ReadInputAtNativeFramerate { get; set; }
  39. private List<TranscodeReason> _transcodeReasons = null;
  40. public List<TranscodeReason> TranscodeReasons
  41. {
  42. get
  43. {
  44. if (_transcodeReasons == null)
  45. {
  46. _transcodeReasons = (BaseRequest.TranscodeReasons ?? string.Empty)
  47. .Split(',')
  48. .Where(i => !string.IsNullOrWhiteSpace(i))
  49. .Select(v => (TranscodeReason)Enum.Parse(typeof(TranscodeReason), v, true))
  50. .ToList();
  51. }
  52. return _transcodeReasons;
  53. }
  54. }
  55. public bool IgnoreInputDts
  56. {
  57. get
  58. {
  59. return MediaSource.IgnoreDts;
  60. }
  61. }
  62. public bool IgnoreInputIndex
  63. {
  64. get
  65. {
  66. return MediaSource.IgnoreIndex;
  67. }
  68. }
  69. public bool GenPtsInput
  70. {
  71. get
  72. {
  73. return MediaSource.GenPtsInput;
  74. }
  75. }
  76. public bool DiscardCorruptFramesInput
  77. {
  78. get
  79. {
  80. return false;
  81. }
  82. }
  83. public bool EnableFastSeekInput
  84. {
  85. get
  86. {
  87. return false;
  88. }
  89. }
  90. public bool GenPtsOutput
  91. {
  92. get
  93. {
  94. return false;
  95. }
  96. }
  97. public string OutputContainer { get; set; }
  98. public string OutputVideoSync
  99. {
  100. get
  101. {
  102. // For live tv + in progress recordings
  103. if (string.Equals(InputContainer, "mpegts", StringComparison.OrdinalIgnoreCase) || string.Equals(InputContainer, "ts", StringComparison.OrdinalIgnoreCase))
  104. {
  105. if (!MediaSource.RunTimeTicks.HasValue)
  106. {
  107. return "cfr";
  108. }
  109. }
  110. return "-1";
  111. }
  112. }
  113. public string AlbumCoverPath { get; set; }
  114. public string InputAudioSync { get; set; }
  115. public string InputVideoSync { get; set; }
  116. public TransportStreamTimestamp InputTimestamp { get; set; }
  117. public MediaStream AudioStream { get; set; }
  118. public List<string> SupportedAudioCodecs { get; set; }
  119. public List<string> SupportedVideoCodecs { get; set; }
  120. public string InputContainer { get; set; }
  121. public IsoType? IsoType { get; set; }
  122. public bool EnableMpegtsM2TsMode { get; set; }
  123. public BaseEncodingJobOptions BaseRequest { get; set; }
  124. public long? StartTimeTicks
  125. {
  126. get { return BaseRequest.StartTimeTicks; }
  127. }
  128. public bool CopyTimestamps
  129. {
  130. get { return BaseRequest.CopyTimestamps; }
  131. }
  132. public int? OutputAudioBitrate;
  133. public int? OutputAudioChannels;
  134. public bool DeInterlace { get; set; }
  135. public bool IsVideoRequest { get; set; }
  136. public TranscodingJobType TranscodingType { get; set; }
  137. public EncodingJobInfo(ILogger logger, TranscodingJobType jobType)
  138. {
  139. _logger = logger;
  140. TranscodingType = jobType;
  141. RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
  142. PlayableStreamFileNames = new List<string>();
  143. SupportedAudioCodecs = new List<string>();
  144. SupportedVideoCodecs = new List<string>();
  145. SupportedSubtitleCodecs = new List<string>();
  146. }
  147. public bool IsSegmentedLiveStream
  148. {
  149. get
  150. {
  151. return TranscodingType != TranscodingJobType.Progressive && !RunTimeTicks.HasValue;
  152. }
  153. }
  154. public bool EnableBreakOnNonKeyFrames(string videoCodec)
  155. {
  156. if (TranscodingType != TranscodingJobType.Progressive)
  157. {
  158. if (IsSegmentedLiveStream)
  159. {
  160. return false;
  161. }
  162. return BaseRequest.BreakOnNonKeyFrames && string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase);
  163. }
  164. return false;
  165. }
  166. public int? TotalOutputBitrate
  167. {
  168. get
  169. {
  170. return (OutputAudioBitrate ?? 0) + (OutputVideoBitrate ?? 0);
  171. }
  172. }
  173. public int? OutputWidth
  174. {
  175. get
  176. {
  177. if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
  178. {
  179. var size = new ImageSize
  180. {
  181. Width = VideoStream.Width.Value,
  182. Height = VideoStream.Height.Value
  183. };
  184. var newSize = DrawingUtils.Resize(size,
  185. BaseRequest.Width,
  186. BaseRequest.Height,
  187. BaseRequest.MaxWidth,
  188. BaseRequest.MaxHeight);
  189. return Convert.ToInt32(newSize.Width);
  190. }
  191. if (!IsVideoRequest)
  192. {
  193. return null;
  194. }
  195. return BaseRequest.MaxWidth ?? BaseRequest.Width;
  196. }
  197. }
  198. public int? OutputHeight
  199. {
  200. get
  201. {
  202. if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
  203. {
  204. var size = new ImageSize
  205. {
  206. Width = VideoStream.Width.Value,
  207. Height = VideoStream.Height.Value
  208. };
  209. var newSize = DrawingUtils.Resize(size,
  210. BaseRequest.Width,
  211. BaseRequest.Height,
  212. BaseRequest.MaxWidth,
  213. BaseRequest.MaxHeight);
  214. return Convert.ToInt32(newSize.Height);
  215. }
  216. if (!IsVideoRequest)
  217. {
  218. return null;
  219. }
  220. return BaseRequest.MaxHeight ?? BaseRequest.Height;
  221. }
  222. }
  223. public int? OutputAudioSampleRate
  224. {
  225. get
  226. {
  227. if (BaseRequest.Static || string.Equals(OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase))
  228. {
  229. if (AudioStream != null)
  230. {
  231. return AudioStream.SampleRate;
  232. }
  233. }
  234. else if (BaseRequest.AudioSampleRate.HasValue)
  235. {
  236. // Don't exceed what the encoder supports
  237. // Seeing issues of attempting to encode to 88200
  238. return Math.Min(44100, BaseRequest.AudioSampleRate.Value);
  239. }
  240. return null;
  241. }
  242. }
  243. public int? OutputAudioBitDepth
  244. {
  245. get
  246. {
  247. if (BaseRequest.Static || string.Equals(OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase))
  248. {
  249. if (AudioStream != null)
  250. {
  251. return AudioStream.BitDepth;
  252. }
  253. }
  254. //else if (BaseRequest.AudioSampleRate.HasValue)
  255. //{
  256. // // Don't exceed what the encoder supports
  257. // // Seeing issues of attempting to encode to 88200
  258. // return Math.Min(44100, BaseRequest.AudioSampleRate.Value);
  259. //}
  260. return null;
  261. }
  262. }
  263. /// <summary>
  264. /// Predicts the audio sample rate that will be in the output stream
  265. /// </summary>
  266. public double? TargetVideoLevel
  267. {
  268. get
  269. {
  270. var stream = VideoStream;
  271. var request = BaseRequest;
  272. return !string.IsNullOrEmpty(request.Level) && !request.Static
  273. ? double.Parse(request.Level, CultureInfo.InvariantCulture)
  274. : stream == null ? null : stream.Level;
  275. }
  276. }
  277. /// <summary>
  278. /// Predicts the audio sample rate that will be in the output stream
  279. /// </summary>
  280. public int? TargetVideoBitDepth
  281. {
  282. get
  283. {
  284. var stream = VideoStream;
  285. return stream == null || !BaseRequest.Static ? null : stream.BitDepth;
  286. }
  287. }
  288. /// <summary>
  289. /// Gets the target reference frames.
  290. /// </summary>
  291. /// <value>The target reference frames.</value>
  292. public int? TargetRefFrames
  293. {
  294. get
  295. {
  296. var stream = VideoStream;
  297. return stream == null || !BaseRequest.Static ? null : stream.RefFrames;
  298. }
  299. }
  300. /// <summary>
  301. /// Predicts the audio sample rate that will be in the output stream
  302. /// </summary>
  303. public float? TargetFramerate
  304. {
  305. get
  306. {
  307. var stream = VideoStream;
  308. var requestedFramerate = BaseRequest.MaxFramerate ?? BaseRequest.Framerate;
  309. return requestedFramerate.HasValue && !BaseRequest.Static
  310. ? requestedFramerate
  311. : stream == null ? null : stream.AverageFrameRate ?? stream.RealFrameRate;
  312. }
  313. }
  314. public TransportStreamTimestamp TargetTimestamp
  315. {
  316. get
  317. {
  318. var defaultValue = string.Equals(OutputContainer, "m2ts", StringComparison.OrdinalIgnoreCase) ?
  319. TransportStreamTimestamp.Valid :
  320. TransportStreamTimestamp.None;
  321. return !BaseRequest.Static
  322. ? defaultValue
  323. : InputTimestamp;
  324. }
  325. }
  326. /// <summary>
  327. /// Predicts the audio sample rate that will be in the output stream
  328. /// </summary>
  329. public int? TargetPacketLength
  330. {
  331. get
  332. {
  333. var stream = VideoStream;
  334. return !BaseRequest.Static
  335. ? null
  336. : stream == null ? null : stream.PacketLength;
  337. }
  338. }
  339. /// <summary>
  340. /// Predicts the audio sample rate that will be in the output stream
  341. /// </summary>
  342. public string TargetVideoProfile
  343. {
  344. get
  345. {
  346. var stream = VideoStream;
  347. return !string.IsNullOrEmpty(BaseRequest.Profile) && !BaseRequest.Static
  348. ? BaseRequest.Profile
  349. : stream == null ? null : stream.Profile;
  350. }
  351. }
  352. public string TargetVideoCodecTag
  353. {
  354. get
  355. {
  356. var stream = VideoStream;
  357. return !BaseRequest.Static
  358. ? null
  359. : stream == null ? null : stream.CodecTag;
  360. }
  361. }
  362. public bool? IsTargetAnamorphic
  363. {
  364. get
  365. {
  366. if (BaseRequest.Static)
  367. {
  368. return VideoStream == null ? null : VideoStream.IsAnamorphic;
  369. }
  370. return false;
  371. }
  372. }
  373. public bool? IsTargetInterlaced
  374. {
  375. get
  376. {
  377. if (BaseRequest.Static)
  378. {
  379. return VideoStream == null ? (bool?)null : VideoStream.IsInterlaced;
  380. }
  381. if (DeInterlace)
  382. {
  383. return false;
  384. }
  385. return VideoStream == null ? (bool?)null : VideoStream.IsInterlaced;
  386. }
  387. }
  388. public bool? IsTargetAVC
  389. {
  390. get
  391. {
  392. if (BaseRequest.Static)
  393. {
  394. return VideoStream == null ? null : VideoStream.IsAVC;
  395. }
  396. return false;
  397. }
  398. }
  399. public int? TargetVideoStreamCount
  400. {
  401. get
  402. {
  403. if (BaseRequest.Static)
  404. {
  405. return GetMediaStreamCount(MediaStreamType.Video, int.MaxValue);
  406. }
  407. return GetMediaStreamCount(MediaStreamType.Video, 1);
  408. }
  409. }
  410. public int? TargetAudioStreamCount
  411. {
  412. get
  413. {
  414. if (BaseRequest.Static)
  415. {
  416. return GetMediaStreamCount(MediaStreamType.Audio, int.MaxValue);
  417. }
  418. return GetMediaStreamCount(MediaStreamType.Audio, 1);
  419. }
  420. }
  421. private int? GetMediaStreamCount(MediaStreamType type, int limit)
  422. {
  423. var count = MediaSource.GetStreamCount(type);
  424. if (count.HasValue)
  425. {
  426. count = Math.Min(count.Value, limit);
  427. }
  428. return count;
  429. }
  430. protected void DisposeIsoMount()
  431. {
  432. if (IsoMount != null)
  433. {
  434. try
  435. {
  436. IsoMount.Dispose();
  437. }
  438. catch (Exception ex)
  439. {
  440. _logger.ErrorException("Error disposing iso mount", ex);
  441. }
  442. IsoMount = null;
  443. }
  444. }
  445. public abstract void ReportTranscodingProgress(TimeSpan? transcodingPosition, float? framerate, double? percentComplete, long? bytesTranscoded, int? bitRate);
  446. }
  447. /// <summary>
  448. /// Enum TranscodingJobType
  449. /// </summary>
  450. public enum TranscodingJobType
  451. {
  452. /// <summary>
  453. /// The progressive
  454. /// </summary>
  455. Progressive,
  456. /// <summary>
  457. /// The HLS
  458. /// </summary>
  459. Hls,
  460. /// <summary>
  461. /// The dash
  462. /// </summary>
  463. Dash
  464. }
  465. }