EncodingJobInfo.cs 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Globalization;
  4. using System.Linq;
  5. using MediaBrowser.Controller.Entities;
  6. using MediaBrowser.Model.Dlna;
  7. using MediaBrowser.Model.Dto;
  8. using MediaBrowser.Model.Entities;
  9. using MediaBrowser.Model.IO;
  10. using MediaBrowser.Model.Logging;
  11. using MediaBrowser.Model.MediaInfo;
  12. using MediaBrowser.Model.Drawing;
  13. using MediaBrowser.Model.Session;
  14. namespace MediaBrowser.Controller.MediaEncoding
  15. {
  16. // For now, a common base class until the API and MediaEncoding classes are unified
  17. public abstract class EncodingJobInfo
  18. {
  19. private readonly ILogger _logger;
  20. public MediaStream VideoStream { get; set; }
  21. public VideoType VideoType { get; set; }
  22. public Dictionary<string, string> RemoteHttpHeaders { get; set; }
  23. public string OutputVideoCodec { get; set; }
  24. public MediaProtocol InputProtocol { get; set; }
  25. public string MediaPath { get; set; }
  26. public bool IsInputVideo { get; set; }
  27. public IIsoMount IsoMount { get; set; }
  28. public List<string> PlayableStreamFileNames { get; set; }
  29. public string OutputAudioCodec { get; set; }
  30. public int? OutputVideoBitrate { get; set; }
  31. public MediaStream SubtitleStream { get; set; }
  32. public SubtitleDeliveryMethod SubtitleDeliveryMethod { get; set; }
  33. public List<string> SupportedSubtitleCodecs { get; set; }
  34. public int InternalSubtitleStreamOffset { get; set; }
  35. public MediaSourceInfo MediaSource { get; set; }
  36. public User User { get; set; }
  37. public long? RunTimeTicks { get; set; }
  38. public bool ReadInputAtNativeFramerate { get; set; }
  39. private List<TranscodeReason> _transcodeReasons = null;
  40. public List<TranscodeReason> TranscodeReasons
  41. {
  42. get
  43. {
  44. if (_transcodeReasons == null)
  45. {
  46. _transcodeReasons = (BaseRequest.TranscodeReasons ?? string.Empty)
  47. .Split(',')
  48. .Where(i => !string.IsNullOrWhiteSpace(i))
  49. .Select(v => (TranscodeReason)Enum.Parse(typeof(TranscodeReason), v, true))
  50. .ToList();
  51. }
  52. return _transcodeReasons;
  53. }
  54. }
  55. public bool IgnoreInputDts
  56. {
  57. get
  58. {
  59. return MediaSource.IgnoreDts;
  60. }
  61. }
  62. public bool IgnoreInputIndex
  63. {
  64. get
  65. {
  66. return MediaSource.IgnoreIndex;
  67. }
  68. }
  69. public bool GenPtsInput
  70. {
  71. get
  72. {
  73. return MediaSource.GenPtsInput;
  74. }
  75. }
  76. public bool DiscardCorruptFramesInput
  77. {
  78. get
  79. {
  80. return false;
  81. }
  82. }
  83. public bool EnableFastSeekInput
  84. {
  85. get
  86. {
  87. return false;
  88. }
  89. }
  90. public bool GenPtsOutput
  91. {
  92. get
  93. {
  94. return false;
  95. }
  96. }
  97. public string OutputContainer { get; set; }
  98. public string OutputVideoSync
  99. {
  100. get
  101. {
  102. // For live tv + in progress recordings
  103. if (string.Equals(InputContainer, "mpegts", StringComparison.OrdinalIgnoreCase) || string.Equals(InputContainer, "ts", StringComparison.OrdinalIgnoreCase))
  104. {
  105. if (!MediaSource.RunTimeTicks.HasValue)
  106. {
  107. return "cfr";
  108. }
  109. }
  110. return "-1";
  111. }
  112. }
  113. public string AlbumCoverPath { get; set; }
  114. public string InputAudioSync { get; set; }
  115. public string InputVideoSync { get; set; }
  116. public TransportStreamTimestamp InputTimestamp { get; set; }
  117. public MediaStream AudioStream { get; set; }
  118. public List<string> SupportedAudioCodecs { get; set; }
  119. public List<string> SupportedVideoCodecs { get; set; }
  120. public string InputContainer { get; set; }
  121. public IsoType? IsoType { get; set; }
  122. public bool EnableMpegtsM2TsMode { get; set; }
  123. public BaseEncodingJobOptions BaseRequest { get; set; }
  124. public long? StartTimeTicks
  125. {
  126. get { return BaseRequest.StartTimeTicks; }
  127. }
  128. public bool CopyTimestamps
  129. {
  130. get { return BaseRequest.CopyTimestamps; }
  131. }
  132. public int? OutputAudioBitrate;
  133. public int? OutputAudioChannels;
  134. public bool DeInterlace { get; set; }
  135. public bool IsVideoRequest { get; set; }
  136. public TranscodingJobType TranscodingType { get; set; }
  137. public EncodingJobInfo(ILogger logger, TranscodingJobType jobType)
  138. {
  139. _logger = logger;
  140. TranscodingType = jobType;
  141. RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
  142. PlayableStreamFileNames = new List<string>();
  143. SupportedAudioCodecs = new List<string>();
  144. SupportedVideoCodecs = new List<string>();
  145. SupportedSubtitleCodecs = new List<string>();
  146. }
  147. public bool IsSegmentedLiveStream
  148. {
  149. get
  150. {
  151. return TranscodingType != TranscodingJobType.Progressive && !RunTimeTicks.HasValue;
  152. }
  153. }
  154. public bool EnableBreakOnNonKeyFrames(string videoCodec)
  155. {
  156. if (TranscodingType != TranscodingJobType.Progressive)
  157. {
  158. if (IsSegmentedLiveStream)
  159. {
  160. return false;
  161. }
  162. return BaseRequest.BreakOnNonKeyFrames && string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase);
  163. }
  164. return false;
  165. }
  166. public int? TotalOutputBitrate
  167. {
  168. get
  169. {
  170. return (OutputAudioBitrate ?? 0) + (OutputVideoBitrate ?? 0);
  171. }
  172. }
  173. public int? OutputWidth
  174. {
  175. get
  176. {
  177. if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
  178. {
  179. var size = new ImageSize
  180. {
  181. Width = VideoStream.Width.Value,
  182. Height = VideoStream.Height.Value
  183. };
  184. var newSize = DrawingUtils.Resize(size,
  185. BaseRequest.Width,
  186. BaseRequest.Height,
  187. BaseRequest.MaxWidth,
  188. BaseRequest.MaxHeight);
  189. return Convert.ToInt32(newSize.Width);
  190. }
  191. if (!IsVideoRequest)
  192. {
  193. return null;
  194. }
  195. return BaseRequest.MaxWidth ?? BaseRequest.Width;
  196. }
  197. }
  198. public int? OutputHeight
  199. {
  200. get
  201. {
  202. if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
  203. {
  204. var size = new ImageSize
  205. {
  206. Width = VideoStream.Width.Value,
  207. Height = VideoStream.Height.Value
  208. };
  209. var newSize = DrawingUtils.Resize(size,
  210. BaseRequest.Width,
  211. BaseRequest.Height,
  212. BaseRequest.MaxWidth,
  213. BaseRequest.MaxHeight);
  214. return Convert.ToInt32(newSize.Height);
  215. }
  216. if (!IsVideoRequest)
  217. {
  218. return null;
  219. }
  220. return BaseRequest.MaxHeight ?? BaseRequest.Height;
  221. }
  222. }
  223. public int? OutputAudioSampleRate
  224. {
  225. get
  226. {
  227. if (BaseRequest.Static || string.Equals(OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase))
  228. {
  229. if (AudioStream != null)
  230. {
  231. return AudioStream.SampleRate;
  232. }
  233. }
  234. else if (BaseRequest.AudioSampleRate.HasValue)
  235. {
  236. // Don't exceed what the encoder supports
  237. // Seeing issues of attempting to encode to 88200
  238. return Math.Min(44100, BaseRequest.AudioSampleRate.Value);
  239. }
  240. return null;
  241. }
  242. }
  243. /// <summary>
  244. /// Predicts the audio sample rate that will be in the output stream
  245. /// </summary>
  246. public double? TargetVideoLevel
  247. {
  248. get
  249. {
  250. var stream = VideoStream;
  251. var request = BaseRequest;
  252. return !string.IsNullOrEmpty(request.Level) && !request.Static
  253. ? double.Parse(request.Level, CultureInfo.InvariantCulture)
  254. : stream == null ? null : stream.Level;
  255. }
  256. }
  257. /// <summary>
  258. /// Predicts the audio sample rate that will be in the output stream
  259. /// </summary>
  260. public int? TargetVideoBitDepth
  261. {
  262. get
  263. {
  264. var stream = VideoStream;
  265. return stream == null || !BaseRequest.Static ? null : stream.BitDepth;
  266. }
  267. }
  268. /// <summary>
  269. /// Gets the target reference frames.
  270. /// </summary>
  271. /// <value>The target reference frames.</value>
  272. public int? TargetRefFrames
  273. {
  274. get
  275. {
  276. var stream = VideoStream;
  277. return stream == null || !BaseRequest.Static ? null : stream.RefFrames;
  278. }
  279. }
  280. /// <summary>
  281. /// Predicts the audio sample rate that will be in the output stream
  282. /// </summary>
  283. public float? TargetFramerate
  284. {
  285. get
  286. {
  287. var stream = VideoStream;
  288. var requestedFramerate = BaseRequest.MaxFramerate ?? BaseRequest.Framerate;
  289. return requestedFramerate.HasValue && !BaseRequest.Static
  290. ? requestedFramerate
  291. : stream == null ? null : stream.AverageFrameRate ?? stream.RealFrameRate;
  292. }
  293. }
  294. public TransportStreamTimestamp TargetTimestamp
  295. {
  296. get
  297. {
  298. var defaultValue = string.Equals(OutputContainer, "m2ts", StringComparison.OrdinalIgnoreCase) ?
  299. TransportStreamTimestamp.Valid :
  300. TransportStreamTimestamp.None;
  301. return !BaseRequest.Static
  302. ? defaultValue
  303. : InputTimestamp;
  304. }
  305. }
  306. /// <summary>
  307. /// Predicts the audio sample rate that will be in the output stream
  308. /// </summary>
  309. public int? TargetPacketLength
  310. {
  311. get
  312. {
  313. var stream = VideoStream;
  314. return !BaseRequest.Static
  315. ? null
  316. : stream == null ? null : stream.PacketLength;
  317. }
  318. }
  319. /// <summary>
  320. /// Predicts the audio sample rate that will be in the output stream
  321. /// </summary>
  322. public string TargetVideoProfile
  323. {
  324. get
  325. {
  326. var stream = VideoStream;
  327. return !string.IsNullOrEmpty(BaseRequest.Profile) && !BaseRequest.Static
  328. ? BaseRequest.Profile
  329. : stream == null ? null : stream.Profile;
  330. }
  331. }
  332. public string TargetVideoCodecTag
  333. {
  334. get
  335. {
  336. var stream = VideoStream;
  337. return !BaseRequest.Static
  338. ? null
  339. : stream == null ? null : stream.CodecTag;
  340. }
  341. }
  342. public bool? IsTargetAnamorphic
  343. {
  344. get
  345. {
  346. if (BaseRequest.Static)
  347. {
  348. return VideoStream == null ? null : VideoStream.IsAnamorphic;
  349. }
  350. return false;
  351. }
  352. }
  353. public bool? IsTargetInterlaced
  354. {
  355. get
  356. {
  357. if (BaseRequest.Static)
  358. {
  359. return VideoStream == null ? (bool?)null : VideoStream.IsInterlaced;
  360. }
  361. if (DeInterlace)
  362. {
  363. return false;
  364. }
  365. return VideoStream == null ? (bool?)null : VideoStream.IsInterlaced;
  366. }
  367. }
  368. public bool? IsTargetAVC
  369. {
  370. get
  371. {
  372. if (BaseRequest.Static)
  373. {
  374. return VideoStream == null ? null : VideoStream.IsAVC;
  375. }
  376. return false;
  377. }
  378. }
  379. public int? TargetVideoStreamCount
  380. {
  381. get
  382. {
  383. if (BaseRequest.Static)
  384. {
  385. return GetMediaStreamCount(MediaStreamType.Video, int.MaxValue);
  386. }
  387. return GetMediaStreamCount(MediaStreamType.Video, 1);
  388. }
  389. }
  390. public int? TargetAudioStreamCount
  391. {
  392. get
  393. {
  394. if (BaseRequest.Static)
  395. {
  396. return GetMediaStreamCount(MediaStreamType.Audio, int.MaxValue);
  397. }
  398. return GetMediaStreamCount(MediaStreamType.Audio, 1);
  399. }
  400. }
  401. private int? GetMediaStreamCount(MediaStreamType type, int limit)
  402. {
  403. var count = MediaSource.GetStreamCount(type);
  404. if (count.HasValue)
  405. {
  406. count = Math.Min(count.Value, limit);
  407. }
  408. return count;
  409. }
  410. protected void DisposeIsoMount()
  411. {
  412. if (IsoMount != null)
  413. {
  414. try
  415. {
  416. IsoMount.Dispose();
  417. }
  418. catch (Exception ex)
  419. {
  420. _logger.ErrorException("Error disposing iso mount", ex);
  421. }
  422. IsoMount = null;
  423. }
  424. }
  425. public abstract void ReportTranscodingProgress(TimeSpan? transcodingPosition, float? framerate, double? percentComplete, long? bytesTranscoded, int? bitRate);
  426. }
  427. /// <summary>
  428. /// Enum TranscodingJobType
  429. /// </summary>
  430. public enum TranscodingJobType
  431. {
  432. /// <summary>
  433. /// The progressive
  434. /// </summary>
  435. Progressive,
  436. /// <summary>
  437. /// The HLS
  438. /// </summary>
  439. Hls,
  440. /// <summary>
  441. /// The dash
  442. /// </summary>
  443. Dash
  444. }
  445. }