EncodingJobInfo.cs 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Globalization;
  4. using MediaBrowser.Controller.Entities;
  5. using MediaBrowser.Model.Dlna;
  6. using MediaBrowser.Model.Dto;
  7. using MediaBrowser.Model.Entities;
  8. using MediaBrowser.Model.IO;
  9. using MediaBrowser.Model.Logging;
  10. using MediaBrowser.Model.MediaInfo;
  11. using MediaBrowser.Model.Drawing;
  12. namespace MediaBrowser.Controller.MediaEncoding
  13. {
  14. // For now, a common base class until the API and MediaEncoding classes are unified
  15. public abstract class EncodingJobInfo
  16. {
  17. private readonly ILogger _logger;
  18. public MediaStream VideoStream { get; set; }
  19. public VideoType VideoType { get; set; }
  20. public Dictionary<string, string> RemoteHttpHeaders { get; set; }
  21. public string OutputVideoCodec { get; set; }
  22. public MediaProtocol InputProtocol { get; set; }
  23. public string MediaPath { get; set; }
  24. public bool IsInputVideo { get; set; }
  25. public IIsoMount IsoMount { get; set; }
  26. public List<string> PlayableStreamFileNames { get; set; }
  27. public string OutputAudioCodec { get; set; }
  28. public int? OutputVideoBitrate { get; set; }
  29. public MediaStream SubtitleStream { get; set; }
  30. public SubtitleDeliveryMethod SubtitleDeliveryMethod { get; set; }
  31. public List<string> SupportedSubtitleCodecs { get; set; }
  32. public int InternalSubtitleStreamOffset { get; set; }
  33. public MediaSourceInfo MediaSource { get; set; }
  34. public User User { get; set; }
  35. public long? RunTimeTicks { get; set; }
  36. public bool ReadInputAtNativeFramerate { get; set; }
  37. public bool IgnoreInputDts
  38. {
  39. get
  40. {
  41. return MediaSource.IgnoreDts;
  42. }
  43. }
  44. public bool IgnoreInputIndex
  45. {
  46. get
  47. {
  48. return MediaSource.IgnoreIndex;
  49. }
  50. }
  51. public bool GenPtsInput
  52. {
  53. get
  54. {
  55. return MediaSource.GenPtsInput;
  56. }
  57. }
  58. public bool DiscardCorruptFramesInput
  59. {
  60. get
  61. {
  62. return false;
  63. }
  64. }
  65. public bool EnableFastSeekInput
  66. {
  67. get
  68. {
  69. return false;
  70. }
  71. }
  72. public bool GenPtsOutput
  73. {
  74. get
  75. {
  76. return false;
  77. }
  78. }
  79. public string OutputContainer { get; set; }
  80. public string OutputVideoSync
  81. {
  82. get
  83. {
  84. // For live tv + in progress recordings
  85. if (string.Equals(InputContainer, "mpegts", StringComparison.OrdinalIgnoreCase) || string.Equals(InputContainer, "ts", StringComparison.OrdinalIgnoreCase))
  86. {
  87. if (!MediaSource.RunTimeTicks.HasValue)
  88. {
  89. return "cfr";
  90. }
  91. }
  92. return "-1";
  93. }
  94. }
  95. public string AlbumCoverPath { get; set; }
  96. public string InputAudioSync { get; set; }
  97. public string InputVideoSync { get; set; }
  98. public TransportStreamTimestamp InputTimestamp { get; set; }
  99. public MediaStream AudioStream { get; set; }
  100. public List<string> SupportedAudioCodecs { get; set; }
  101. public List<string> SupportedVideoCodecs { get; set; }
  102. public string InputContainer { get; set; }
  103. public IsoType? IsoType { get; set; }
  104. public bool EnableMpegtsM2TsMode { get; set; }
  105. public BaseEncodingJobOptions BaseRequest { get; set; }
  106. public long? StartTimeTicks
  107. {
  108. get { return BaseRequest.StartTimeTicks; }
  109. }
  110. public bool CopyTimestamps
  111. {
  112. get { return BaseRequest.CopyTimestamps; }
  113. }
  114. public int? OutputAudioBitrate;
  115. public int? OutputAudioChannels;
  116. public bool DeInterlace { get; set; }
  117. public bool IsVideoRequest { get; set; }
  118. public TranscodingJobType TranscodingType { get; set; }
  119. public EncodingJobInfo(ILogger logger, TranscodingJobType jobType)
  120. {
  121. _logger = logger;
  122. TranscodingType = jobType;
  123. RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
  124. PlayableStreamFileNames = new List<string>();
  125. SupportedAudioCodecs = new List<string>();
  126. SupportedVideoCodecs = new List<string>();
  127. SupportedSubtitleCodecs = new List<string>();
  128. }
  129. public bool IsSegmentedLiveStream
  130. {
  131. get
  132. {
  133. return TranscodingType != TranscodingJobType.Progressive && !RunTimeTicks.HasValue;
  134. }
  135. }
  136. public bool EnableBreakOnNonKeyFrames(string videoCodec)
  137. {
  138. if (TranscodingType != TranscodingJobType.Progressive)
  139. {
  140. if (IsSegmentedLiveStream)
  141. {
  142. return false;
  143. }
  144. return BaseRequest.BreakOnNonKeyFrames && string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase);
  145. }
  146. return false;
  147. }
  148. public int? TotalOutputBitrate
  149. {
  150. get
  151. {
  152. return (OutputAudioBitrate ?? 0) + (OutputVideoBitrate ?? 0);
  153. }
  154. }
  155. public int? OutputWidth
  156. {
  157. get
  158. {
  159. if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
  160. {
  161. var size = new ImageSize
  162. {
  163. Width = VideoStream.Width.Value,
  164. Height = VideoStream.Height.Value
  165. };
  166. var newSize = DrawingUtils.Resize(size,
  167. BaseRequest.Width,
  168. BaseRequest.Height,
  169. BaseRequest.MaxWidth,
  170. BaseRequest.MaxHeight);
  171. return Convert.ToInt32(newSize.Width);
  172. }
  173. if (!IsVideoRequest)
  174. {
  175. return null;
  176. }
  177. return BaseRequest.MaxWidth ?? BaseRequest.Width;
  178. }
  179. }
  180. public int? OutputHeight
  181. {
  182. get
  183. {
  184. if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
  185. {
  186. var size = new ImageSize
  187. {
  188. Width = VideoStream.Width.Value,
  189. Height = VideoStream.Height.Value
  190. };
  191. var newSize = DrawingUtils.Resize(size,
  192. BaseRequest.Width,
  193. BaseRequest.Height,
  194. BaseRequest.MaxWidth,
  195. BaseRequest.MaxHeight);
  196. return Convert.ToInt32(newSize.Height);
  197. }
  198. if (!IsVideoRequest)
  199. {
  200. return null;
  201. }
  202. return BaseRequest.MaxHeight ?? BaseRequest.Height;
  203. }
  204. }
  205. public int? OutputAudioSampleRate
  206. {
  207. get
  208. {
  209. if (BaseRequest.Static || string.Equals(OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase))
  210. {
  211. if (AudioStream != null)
  212. {
  213. return AudioStream.SampleRate;
  214. }
  215. }
  216. else if (BaseRequest.AudioSampleRate.HasValue)
  217. {
  218. // Don't exceed what the encoder supports
  219. // Seeing issues of attempting to encode to 88200
  220. return Math.Min(44100, BaseRequest.AudioSampleRate.Value);
  221. }
  222. return null;
  223. }
  224. }
  225. /// <summary>
  226. /// Predicts the audio sample rate that will be in the output stream
  227. /// </summary>
  228. public double? TargetVideoLevel
  229. {
  230. get
  231. {
  232. var stream = VideoStream;
  233. var request = BaseRequest;
  234. return !string.IsNullOrEmpty(request.Level) && !request.Static
  235. ? double.Parse(request.Level, CultureInfo.InvariantCulture)
  236. : stream == null ? null : stream.Level;
  237. }
  238. }
  239. /// <summary>
  240. /// Predicts the audio sample rate that will be in the output stream
  241. /// </summary>
  242. public int? TargetVideoBitDepth
  243. {
  244. get
  245. {
  246. var stream = VideoStream;
  247. return stream == null || !BaseRequest.Static ? null : stream.BitDepth;
  248. }
  249. }
  250. /// <summary>
  251. /// Gets the target reference frames.
  252. /// </summary>
  253. /// <value>The target reference frames.</value>
  254. public int? TargetRefFrames
  255. {
  256. get
  257. {
  258. var stream = VideoStream;
  259. return stream == null || !BaseRequest.Static ? null : stream.RefFrames;
  260. }
  261. }
  262. /// <summary>
  263. /// Predicts the audio sample rate that will be in the output stream
  264. /// </summary>
  265. public float? TargetFramerate
  266. {
  267. get
  268. {
  269. var stream = VideoStream;
  270. var requestedFramerate = BaseRequest.MaxFramerate ?? BaseRequest.Framerate;
  271. return requestedFramerate.HasValue && !BaseRequest.Static
  272. ? requestedFramerate
  273. : stream == null ? null : stream.AverageFrameRate ?? stream.RealFrameRate;
  274. }
  275. }
  276. public TransportStreamTimestamp TargetTimestamp
  277. {
  278. get
  279. {
  280. var defaultValue = string.Equals(OutputContainer, "m2ts", StringComparison.OrdinalIgnoreCase) ?
  281. TransportStreamTimestamp.Valid :
  282. TransportStreamTimestamp.None;
  283. return !BaseRequest.Static
  284. ? defaultValue
  285. : InputTimestamp;
  286. }
  287. }
  288. /// <summary>
  289. /// Predicts the audio sample rate that will be in the output stream
  290. /// </summary>
  291. public int? TargetPacketLength
  292. {
  293. get
  294. {
  295. var stream = VideoStream;
  296. return !BaseRequest.Static
  297. ? null
  298. : stream == null ? null : stream.PacketLength;
  299. }
  300. }
  301. /// <summary>
  302. /// Predicts the audio sample rate that will be in the output stream
  303. /// </summary>
  304. public string TargetVideoProfile
  305. {
  306. get
  307. {
  308. var stream = VideoStream;
  309. return !string.IsNullOrEmpty(BaseRequest.Profile) && !BaseRequest.Static
  310. ? BaseRequest.Profile
  311. : stream == null ? null : stream.Profile;
  312. }
  313. }
  314. public string TargetVideoCodecTag
  315. {
  316. get
  317. {
  318. var stream = VideoStream;
  319. return !BaseRequest.Static
  320. ? null
  321. : stream == null ? null : stream.CodecTag;
  322. }
  323. }
  324. public bool? IsTargetAnamorphic
  325. {
  326. get
  327. {
  328. if (BaseRequest.Static)
  329. {
  330. return VideoStream == null ? null : VideoStream.IsAnamorphic;
  331. }
  332. return false;
  333. }
  334. }
  335. public bool? IsTargetInterlaced
  336. {
  337. get
  338. {
  339. if (BaseRequest.Static)
  340. {
  341. return VideoStream == null ? (bool?)null : VideoStream.IsInterlaced;
  342. }
  343. if (DeInterlace)
  344. {
  345. return false;
  346. }
  347. return VideoStream == null ? (bool?)null : VideoStream.IsInterlaced;
  348. }
  349. }
  350. public bool? IsTargetAVC
  351. {
  352. get
  353. {
  354. if (BaseRequest.Static)
  355. {
  356. return VideoStream == null ? null : VideoStream.IsAVC;
  357. }
  358. return false;
  359. }
  360. }
  361. public int? TargetVideoStreamCount
  362. {
  363. get
  364. {
  365. if (BaseRequest.Static)
  366. {
  367. return GetMediaStreamCount(MediaStreamType.Video, int.MaxValue);
  368. }
  369. return GetMediaStreamCount(MediaStreamType.Video, 1);
  370. }
  371. }
  372. public int? TargetAudioStreamCount
  373. {
  374. get
  375. {
  376. if (BaseRequest.Static)
  377. {
  378. return GetMediaStreamCount(MediaStreamType.Audio, int.MaxValue);
  379. }
  380. return GetMediaStreamCount(MediaStreamType.Audio, 1);
  381. }
  382. }
  383. private int? GetMediaStreamCount(MediaStreamType type, int limit)
  384. {
  385. var count = MediaSource.GetStreamCount(type);
  386. if (count.HasValue)
  387. {
  388. count = Math.Min(count.Value, limit);
  389. }
  390. return count;
  391. }
  392. protected void DisposeIsoMount()
  393. {
  394. if (IsoMount != null)
  395. {
  396. try
  397. {
  398. IsoMount.Dispose();
  399. }
  400. catch (Exception ex)
  401. {
  402. _logger.ErrorException("Error disposing iso mount", ex);
  403. }
  404. IsoMount = null;
  405. }
  406. }
  407. public abstract void ReportTranscodingProgress(TimeSpan? transcodingPosition, float? framerate, double? percentComplete, long? bytesTranscoded, int? bitRate);
  408. }
  409. /// <summary>
  410. /// Enum TranscodingJobType
  411. /// </summary>
  412. public enum TranscodingJobType
  413. {
  414. /// <summary>
  415. /// The progressive
  416. /// </summary>
  417. Progressive,
  418. /// <summary>
  419. /// The HLS
  420. /// </summary>
  421. Hls,
  422. /// <summary>
  423. /// The dash
  424. /// </summary>
  425. Dash
  426. }
  427. }