UniversalAudioController.cs 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364
  1. using System;
  2. using System.Collections.Generic;
  3. using System.ComponentModel.DataAnnotations;
  4. using System.Globalization;
  5. using System.Linq;
  6. using System.Threading.Tasks;
  7. using Jellyfin.Api.Attributes;
  8. using Jellyfin.Api.Helpers;
  9. using Jellyfin.Api.ModelBinders;
  10. using Jellyfin.Api.Models.StreamingDtos;
  11. using Jellyfin.Data.Enums;
  12. using Jellyfin.Extensions;
  13. using MediaBrowser.Common.Extensions;
  14. using MediaBrowser.Controller.Entities;
  15. using MediaBrowser.Controller.Library;
  16. using MediaBrowser.Controller.MediaEncoding;
  17. using MediaBrowser.Controller.Streaming;
  18. using MediaBrowser.Model.Dlna;
  19. using MediaBrowser.Model.MediaInfo;
  20. using MediaBrowser.Model.Session;
  21. using Microsoft.AspNetCore.Authorization;
  22. using Microsoft.AspNetCore.Http;
  23. using Microsoft.AspNetCore.Mvc;
  24. using Microsoft.Extensions.Logging;
  25. namespace Jellyfin.Api.Controllers;
  26. /// <summary>
  27. /// The universal audio controller.
  28. /// </summary>
  29. [Route("")]
  30. public class UniversalAudioController : BaseJellyfinApiController
  31. {
  32. private readonly ILibraryManager _libraryManager;
  33. private readonly ILogger<UniversalAudioController> _logger;
  34. private readonly MediaInfoHelper _mediaInfoHelper;
  35. private readonly AudioHelper _audioHelper;
  36. private readonly DynamicHlsHelper _dynamicHlsHelper;
  37. private readonly IUserManager _userManager;
  38. /// <summary>
  39. /// Initializes a new instance of the <see cref="UniversalAudioController"/> class.
  40. /// </summary>
  41. /// <param name="libraryManager">Instance of the <see cref="ILibraryManager"/> interface.</param>
  42. /// <param name="logger">Instance of the <see cref="ILogger{UniversalAudioController}"/> interface.</param>
  43. /// <param name="mediaInfoHelper">Instance of <see cref="MediaInfoHelper"/>.</param>
  44. /// <param name="audioHelper">Instance of <see cref="AudioHelper"/>.</param>
  45. /// <param name="dynamicHlsHelper">Instance of <see cref="DynamicHlsHelper"/>.</param>
  46. /// <param name="userManager">Instance of the <see cref="IUserManager"/> interface.</param>
  47. public UniversalAudioController(
  48. ILibraryManager libraryManager,
  49. ILogger<UniversalAudioController> logger,
  50. MediaInfoHelper mediaInfoHelper,
  51. AudioHelper audioHelper,
  52. DynamicHlsHelper dynamicHlsHelper,
  53. IUserManager userManager)
  54. {
  55. _libraryManager = libraryManager;
  56. _logger = logger;
  57. _mediaInfoHelper = mediaInfoHelper;
  58. _audioHelper = audioHelper;
  59. _dynamicHlsHelper = dynamicHlsHelper;
  60. _userManager = userManager;
  61. }
  62. /// <summary>
  63. /// Gets an audio stream.
  64. /// </summary>
  65. /// <param name="itemId">The item id.</param>
  66. /// <param name="container">Optional. The audio container.</param>
  67. /// <param name="mediaSourceId">The media version id, if playing an alternate version.</param>
  68. /// <param name="deviceId">The device id of the client requesting. Used to stop encoding processes when needed.</param>
  69. /// <param name="userId">Optional. The user id.</param>
  70. /// <param name="audioCodec">Optional. The audio codec to transcode to.</param>
  71. /// <param name="maxAudioChannels">Optional. The maximum number of audio channels.</param>
  72. /// <param name="transcodingAudioChannels">Optional. The number of how many audio channels to transcode to.</param>
  73. /// <param name="maxStreamingBitrate">Optional. The maximum streaming bitrate.</param>
  74. /// <param name="audioBitRate">Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults.</param>
  75. /// <param name="startTimeTicks">Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms.</param>
  76. /// <param name="transcodingContainer">Optional. The container to transcode to.</param>
  77. /// <param name="transcodingProtocol">Optional. The transcoding protocol.</param>
  78. /// <param name="maxAudioSampleRate">Optional. The maximum audio sample rate.</param>
  79. /// <param name="maxAudioBitDepth">Optional. The maximum audio bit depth.</param>
  80. /// <param name="enableRemoteMedia">Optional. Whether to enable remote media.</param>
  81. /// <param name="enableAudioVbrEncoding">Optional. Whether to enable Audio Encoding.</param>
  82. /// <param name="breakOnNonKeyFrames">Optional. Whether to break on non key frames.</param>
  83. /// <param name="enableRedirection">Whether to enable redirection. Defaults to true.</param>
  84. /// <response code="200">Audio stream returned.</response>
  85. /// <response code="302">Redirected to remote audio stream.</response>
  86. /// <response code="404">Item not found.</response>
  87. /// <returns>A <see cref="Task"/> containing the audio file.</returns>
  88. [HttpGet("Audio/{itemId}/universal")]
  89. [HttpHead("Audio/{itemId}/universal", Name = "HeadUniversalAudioStream")]
  90. [Authorize]
  91. [ProducesResponseType(StatusCodes.Status200OK)]
  92. [ProducesResponseType(StatusCodes.Status302Found)]
  93. [ProducesResponseType(StatusCodes.Status404NotFound)]
  94. [ProducesAudioFile]
  95. public async Task<ActionResult> GetUniversalAudioStream(
  96. [FromRoute, Required] Guid itemId,
  97. [FromQuery, ModelBinder(typeof(CommaDelimitedArrayModelBinder))] string[] container,
  98. [FromQuery] string? mediaSourceId,
  99. [FromQuery] string? deviceId,
  100. [FromQuery] Guid? userId,
  101. [FromQuery] [RegularExpression(EncodingHelper.ValidationRegex)] string? audioCodec,
  102. [FromQuery] int? maxAudioChannels,
  103. [FromQuery] int? transcodingAudioChannels,
  104. [FromQuery] int? maxStreamingBitrate,
  105. [FromQuery] int? audioBitRate,
  106. [FromQuery] long? startTimeTicks,
  107. [FromQuery] [RegularExpression(EncodingHelper.ValidationRegex)] string? transcodingContainer,
  108. [FromQuery] MediaStreamProtocol? transcodingProtocol,
  109. [FromQuery] int? maxAudioSampleRate,
  110. [FromQuery] int? maxAudioBitDepth,
  111. [FromQuery] bool? enableRemoteMedia,
  112. [FromQuery] bool enableAudioVbrEncoding = true,
  113. [FromQuery] bool breakOnNonKeyFrames = false,
  114. [FromQuery] bool enableRedirection = true)
  115. {
  116. userId = RequestHelpers.GetUserId(User, userId);
  117. var user = userId.IsNullOrEmpty()
  118. ? null
  119. : _userManager.GetUserById(userId.Value);
  120. var item = _libraryManager.GetItemById<BaseItem>(itemId, user);
  121. if (item is null)
  122. {
  123. return NotFound();
  124. }
  125. var deviceProfile = GetDeviceProfile(container, transcodingContainer, audioCodec, transcodingProtocol, breakOnNonKeyFrames, transcodingAudioChannels, maxAudioSampleRate, maxAudioBitDepth, maxAudioChannels);
  126. _logger.LogInformation("GetPostedPlaybackInfo profile: {@Profile}", deviceProfile);
  127. var info = await _mediaInfoHelper.GetPlaybackInfo(
  128. item,
  129. user,
  130. mediaSourceId)
  131. .ConfigureAwait(false);
  132. // set device specific data
  133. foreach (var sourceInfo in info.MediaSources)
  134. {
  135. sourceInfo.TranscodingContainer = transcodingContainer;
  136. sourceInfo.TranscodingSubProtocol = transcodingProtocol ?? sourceInfo.TranscodingSubProtocol;
  137. _mediaInfoHelper.SetDeviceSpecificData(
  138. item,
  139. sourceInfo,
  140. deviceProfile,
  141. User,
  142. maxStreamingBitrate ?? deviceProfile.MaxStreamingBitrate,
  143. startTimeTicks ?? 0,
  144. mediaSourceId ?? string.Empty,
  145. null,
  146. null,
  147. maxAudioChannels,
  148. info.PlaySessionId!,
  149. userId ?? Guid.Empty,
  150. true,
  151. true,
  152. true,
  153. true,
  154. true,
  155. false,
  156. Request.HttpContext.GetNormalizedRemoteIP());
  157. }
  158. _mediaInfoHelper.SortMediaSources(info, maxStreamingBitrate);
  159. foreach (var source in info.MediaSources)
  160. {
  161. _mediaInfoHelper.NormalizeMediaSourceContainer(source, deviceProfile, DlnaProfileType.Video);
  162. }
  163. var mediaSource = info.MediaSources[0];
  164. if (mediaSource.SupportsDirectPlay && mediaSource.Protocol == MediaProtocol.Http && enableRedirection && mediaSource.IsRemote && enableRemoteMedia.HasValue && enableRemoteMedia.Value)
  165. {
  166. return Redirect(mediaSource.Path);
  167. }
  168. // This one is currently very misleading as the SupportsDirectStream actually means "can direct play"
  169. // The definition of DirectStream also seems changed during development
  170. var isStatic = mediaSource.SupportsDirectStream;
  171. if (!isStatic && mediaSource.TranscodingSubProtocol == MediaStreamProtocol.hls)
  172. {
  173. // hls segment container can only be mpegts or fmp4 per ffmpeg documentation
  174. // ffmpeg option -> file extension
  175. // mpegts -> ts
  176. // fmp4 -> mp4
  177. var supportedHlsContainers = new[] { "ts", "mp4" };
  178. // fallback to mpegts if device reports some weird value unsupported by hls
  179. var requestedSegmentContainer = Array.Exists(
  180. supportedHlsContainers,
  181. element => string.Equals(element, transcodingContainer, StringComparison.OrdinalIgnoreCase)) ? transcodingContainer : "ts";
  182. var segmentContainer = Array.Exists(
  183. supportedHlsContainers,
  184. element => string.Equals(element, mediaSource.TranscodingContainer, StringComparison.OrdinalIgnoreCase)) ? mediaSource.TranscodingContainer : requestedSegmentContainer;
  185. var dynamicHlsRequestDto = new HlsAudioRequestDto
  186. {
  187. Id = itemId,
  188. Container = ".m3u8",
  189. Static = isStatic,
  190. PlaySessionId = info.PlaySessionId,
  191. SegmentContainer = segmentContainer,
  192. MediaSourceId = mediaSourceId,
  193. DeviceId = deviceId,
  194. AudioCodec = mediaSource.TranscodeReasons == TranscodeReason.ContainerNotSupported ? "copy" : audioCodec,
  195. EnableAutoStreamCopy = true,
  196. AllowAudioStreamCopy = true,
  197. AllowVideoStreamCopy = true,
  198. BreakOnNonKeyFrames = breakOnNonKeyFrames,
  199. AudioSampleRate = maxAudioSampleRate,
  200. MaxAudioChannels = maxAudioChannels,
  201. MaxAudioBitDepth = maxAudioBitDepth,
  202. AudioBitRate = audioBitRate ?? maxStreamingBitrate,
  203. StartTimeTicks = startTimeTicks,
  204. SubtitleMethod = SubtitleDeliveryMethod.Hls,
  205. RequireAvc = false,
  206. DeInterlace = false,
  207. RequireNonAnamorphic = false,
  208. EnableMpegtsM2TsMode = false,
  209. TranscodeReasons = mediaSource.TranscodeReasons == 0 ? null : mediaSource.TranscodeReasons.ToString(),
  210. Context = EncodingContext.Static,
  211. StreamOptions = new Dictionary<string, string>(),
  212. EnableAdaptiveBitrateStreaming = true,
  213. EnableAudioVbrEncoding = enableAudioVbrEncoding
  214. };
  215. return await _dynamicHlsHelper.GetMasterHlsPlaylist(TranscodingJobType.Hls, dynamicHlsRequestDto, true)
  216. .ConfigureAwait(false);
  217. }
  218. var audioStreamingDto = new StreamingRequestDto
  219. {
  220. Id = itemId,
  221. Container = isStatic ? null : ("." + mediaSource.TranscodingContainer),
  222. Static = isStatic,
  223. PlaySessionId = info.PlaySessionId,
  224. MediaSourceId = mediaSourceId,
  225. DeviceId = deviceId,
  226. AudioCodec = audioCodec,
  227. EnableAutoStreamCopy = true,
  228. AllowAudioStreamCopy = true,
  229. AllowVideoStreamCopy = true,
  230. BreakOnNonKeyFrames = breakOnNonKeyFrames,
  231. AudioSampleRate = maxAudioSampleRate,
  232. MaxAudioChannels = maxAudioChannels,
  233. AudioBitRate = isStatic ? null : (audioBitRate ?? maxStreamingBitrate),
  234. MaxAudioBitDepth = maxAudioBitDepth,
  235. AudioChannels = maxAudioChannels,
  236. CopyTimestamps = true,
  237. StartTimeTicks = startTimeTicks,
  238. SubtitleMethod = SubtitleDeliveryMethod.Embed,
  239. TranscodeReasons = mediaSource.TranscodeReasons == 0 ? null : mediaSource.TranscodeReasons.ToString(),
  240. Context = EncodingContext.Static
  241. };
  242. return await _audioHelper.GetAudioStream(TranscodingJobType.Progressive, audioStreamingDto).ConfigureAwait(false);
  243. }
  244. private DeviceProfile GetDeviceProfile(
  245. string[] containers,
  246. string? transcodingContainer,
  247. string? audioCodec,
  248. MediaStreamProtocol? transcodingProtocol,
  249. bool? breakOnNonKeyFrames,
  250. int? transcodingAudioChannels,
  251. int? maxAudioSampleRate,
  252. int? maxAudioBitDepth,
  253. int? maxAudioChannels)
  254. {
  255. var deviceProfile = new DeviceProfile();
  256. int len = containers.Length;
  257. var directPlayProfiles = new DirectPlayProfile[len];
  258. for (int i = 0; i < len; i++)
  259. {
  260. var parts = containers[i].Split('|', StringSplitOptions.RemoveEmptyEntries);
  261. var audioCodecs = parts.Length == 1 ? null : string.Join(',', parts.Skip(1));
  262. directPlayProfiles[i] = new DirectPlayProfile
  263. {
  264. Type = DlnaProfileType.Audio,
  265. Container = parts[0],
  266. AudioCodec = audioCodecs
  267. };
  268. }
  269. deviceProfile.DirectPlayProfiles = directPlayProfiles;
  270. deviceProfile.TranscodingProfiles = new[]
  271. {
  272. new TranscodingProfile
  273. {
  274. Type = DlnaProfileType.Audio,
  275. Context = EncodingContext.Streaming,
  276. Container = transcodingContainer ?? "mp3",
  277. AudioCodec = audioCodec ?? "mp3",
  278. Protocol = transcodingProtocol ?? MediaStreamProtocol.http,
  279. BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false,
  280. MaxAudioChannels = transcodingAudioChannels?.ToString(CultureInfo.InvariantCulture)
  281. }
  282. };
  283. var codecProfiles = new List<CodecProfile>();
  284. var conditions = new List<ProfileCondition>();
  285. if (maxAudioSampleRate.HasValue)
  286. {
  287. // codec profile
  288. conditions.Add(
  289. new ProfileCondition
  290. {
  291. Condition = ProfileConditionType.LessThanEqual,
  292. IsRequired = false,
  293. Property = ProfileConditionValue.AudioSampleRate,
  294. Value = maxAudioSampleRate.Value.ToString(CultureInfo.InvariantCulture)
  295. });
  296. }
  297. if (maxAudioBitDepth.HasValue)
  298. {
  299. // codec profile
  300. conditions.Add(
  301. new ProfileCondition
  302. {
  303. Condition = ProfileConditionType.LessThanEqual,
  304. IsRequired = false,
  305. Property = ProfileConditionValue.AudioBitDepth,
  306. Value = maxAudioBitDepth.Value.ToString(CultureInfo.InvariantCulture)
  307. });
  308. }
  309. if (maxAudioChannels.HasValue)
  310. {
  311. // codec profile
  312. conditions.Add(
  313. new ProfileCondition
  314. {
  315. Condition = ProfileConditionType.LessThanEqual,
  316. IsRequired = false,
  317. Property = ProfileConditionValue.AudioChannels,
  318. Value = maxAudioChannels.Value.ToString(CultureInfo.InvariantCulture)
  319. });
  320. }
  321. if (conditions.Count > 0)
  322. {
  323. // codec profile
  324. codecProfiles.Add(
  325. new CodecProfile
  326. {
  327. Type = CodecType.Audio,
  328. Container = string.Join(',', containers),
  329. Conditions = conditions.ToArray()
  330. });
  331. }
  332. deviceProfile.CodecProfiles = codecProfiles.ToArray();
  333. return deviceProfile;
  334. }
  335. }