Source: lib/hls/hls_parser.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.hls.HlsParser');
  7. goog.require('goog.Uri');
  8. goog.require('goog.asserts');
  9. goog.require('shaka.abr.Ewma');
  10. goog.require('shaka.hls.ManifestTextParser');
  11. goog.require('shaka.hls.Playlist');
  12. goog.require('shaka.hls.PlaylistType');
  13. goog.require('shaka.hls.Tag');
  14. goog.require('shaka.hls.Utils');
  15. goog.require('shaka.log');
  16. goog.require('shaka.media.DrmEngine');
  17. goog.require('shaka.media.InitSegmentReference');
  18. goog.require('shaka.media.ManifestParser');
  19. goog.require('shaka.media.PresentationTimeline');
  20. goog.require('shaka.media.SegmentIndex');
  21. goog.require('shaka.media.SegmentReference');
  22. goog.require('shaka.net.DataUriPlugin');
  23. goog.require('shaka.net.NetworkingEngine');
  24. goog.require('shaka.util.ArrayUtils');
  25. goog.require('shaka.util.BufferUtils');
  26. goog.require('shaka.util.ContentSteeringManager');
  27. goog.require('shaka.util.Error');
  28. goog.require('shaka.util.FakeEvent');
  29. goog.require('shaka.util.LanguageUtils');
  30. goog.require('shaka.util.ManifestParserUtils');
  31. goog.require('shaka.util.MimeUtils');
  32. goog.require('shaka.util.OperationManager');
  33. goog.require('shaka.util.Pssh');
  34. goog.require('shaka.media.SegmentUtils');
  35. goog.require('shaka.util.Timer');
  36. goog.require('shaka.util.Platform');
  37. goog.require('shaka.util.Uint8ArrayUtils');
  38. goog.require('shaka.util.XmlUtils');
  39. goog.requireType('shaka.hls.Segment');
  40. /**
  41. * HLS parser.
  42. *
  43. * @implements {shaka.extern.ManifestParser}
  44. * @export
  45. */
  46. shaka.hls.HlsParser = class {
  47. /**
  48. * Creates an Hls Parser object.
  49. */
  50. constructor() {
  51. /** @private {?shaka.extern.ManifestParser.PlayerInterface} */
  52. this.playerInterface_ = null;
  53. /** @private {?shaka.extern.ManifestConfiguration} */
  54. this.config_ = null;
  55. /** @private {number} */
  56. this.globalId_ = 1;
  57. /** @private {!Map.<string, string>} */
  58. this.globalVariables_ = new Map();
  59. /**
  60. * A map from group id to stream infos created from the media tags.
  61. * @private {!Map.<string, !Array.<?shaka.hls.HlsParser.StreamInfo>>}
  62. */
  63. this.groupIdToStreamInfosMap_ = new Map();
  64. /**
  65. * For media playlist lazy-loading to work in livestreams, we have to assume
  66. * that each stream of a type (video, audio, etc) has the same mappings of
  67. * sequence number to start time.
  68. * This map stores those relationships.
  69. * Only used during livestreams; we do not assume that VOD content is
  70. * aligned in that way.
  71. * @private {!Map.<string, !Map.<number, number>>}
  72. */
  73. this.mediaSequenceToStartTimeByType_ = new Map();
  74. // Set initial maps.
  75. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  76. this.mediaSequenceToStartTimeByType_.set(ContentType.VIDEO, new Map());
  77. this.mediaSequenceToStartTimeByType_.set(ContentType.AUDIO, new Map());
  78. this.mediaSequenceToStartTimeByType_.set(ContentType.TEXT, new Map());
  79. this.mediaSequenceToStartTimeByType_.set(ContentType.IMAGE, new Map());
  80. /**
  81. * The values are strings of the form "<VIDEO URI> - <AUDIO URI>",
  82. * where the URIs are the verbatim media playlist URIs as they appeared in
  83. * the master playlist.
  84. *
  85. * Used to avoid duplicates that vary only in their text stream.
  86. *
  87. * @private {!Set.<string>}
  88. */
  89. this.variantUriSet_ = new Set();
  90. /**
  91. * A map from (verbatim) media playlist URI to stream infos representing the
  92. * playlists.
  93. *
  94. * On update, used to iterate through and update from media playlists.
  95. *
  96. * On initial parse, used to iterate through and determine minimum
  97. * timestamps, offsets, and to handle TS rollover.
  98. *
  99. * During parsing, used to avoid duplicates in the async methods
  100. * createStreamInfoFromMediaTags_, createStreamInfoFromImageTag_ and
  101. * createStreamInfoFromVariantTags_.
  102. *
  103. * @private {!Map.<string, shaka.hls.HlsParser.StreamInfo>}
  104. */
  105. this.uriToStreamInfosMap_ = new Map();
  106. /** @private {?shaka.media.PresentationTimeline} */
  107. this.presentationTimeline_ = null;
  108. /**
  109. * The master playlist URI, after redirects.
  110. *
  111. * @private {string}
  112. */
  113. this.masterPlaylistUri_ = '';
  114. /** @private {shaka.hls.ManifestTextParser} */
  115. this.manifestTextParser_ = new shaka.hls.ManifestTextParser();
  116. /**
  117. * The minimum sequence number for generated segments, when ignoring
  118. * EXT-X-PROGRAM-DATE-TIME.
  119. *
  120. * @private {number}
  121. */
  122. this.minSequenceNumber_ = -1;
  123. /**
  124. * The lowest time value for any of the streams, as defined by the
  125. * EXT-X-PROGRAM-DATE-TIME value. Measured in seconds since January 1, 1970.
  126. *
  127. * @private {number}
  128. */
  129. this.lowestSyncTime_ = Infinity;
  130. /**
  131. * Whether the streams have previously been "finalized"; that is to say,
  132. * whether we have loaded enough streams to know information about the asset
  133. * such as timing information, live status, etc.
  134. *
  135. * @private {boolean}
  136. */
  137. this.streamsFinalized_ = false;
  138. /**
  139. * Whether the manifest informs about the codec to use.
  140. *
  141. * @private
  142. */
  143. this.codecInfoInManifest_ = false;
  144. /**
  145. * This timer is used to trigger the start of a manifest update. A manifest
  146. * update is async. Once the update is finished, the timer will be restarted
  147. * to trigger the next update. The timer will only be started if the content
  148. * is live content.
  149. *
  150. * @private {shaka.util.Timer}
  151. */
  152. this.updatePlaylistTimer_ = new shaka.util.Timer(() => {
  153. this.onUpdate_();
  154. });
  155. /** @private {shaka.hls.HlsParser.PresentationType_} */
  156. this.presentationType_ = shaka.hls.HlsParser.PresentationType_.VOD;
  157. /** @private {?shaka.extern.Manifest} */
  158. this.manifest_ = null;
  159. /** @private {number} */
  160. this.maxTargetDuration_ = 0;
  161. /** @private {number} */
  162. this.lastTargetDuration_ = Infinity;
  163. /** Partial segments target duration.
  164. * @private {number}
  165. */
  166. this.partialTargetDuration_ = 0;
  167. /** @private {number} */
  168. this.presentationDelay_ = 0;
  169. /** @private {number} */
  170. this.lowLatencyPresentationDelay_ = 0;
  171. /** @private {shaka.util.OperationManager} */
  172. this.operationManager_ = new shaka.util.OperationManager();
  173. /** A map from closed captions' group id, to a map of closed captions info.
  174. * {group id -> {closed captions channel id -> language}}
  175. * @private {Map.<string, Map.<string, string>>}
  176. */
  177. this.groupIdToClosedCaptionsMap_ = new Map();
  178. /** @private {Map.<string, string>} */
  179. this.groupIdToCodecsMap_ = new Map();
  180. /** A cache mapping EXT-X-MAP tag info to the InitSegmentReference created
  181. * from the tag.
  182. * The key is a string combining the EXT-X-MAP tag's absolute uri, and
  183. * its BYTERANGE if available.
  184. * {!Map.<string, !shaka.media.InitSegmentReference>} */
  185. this.mapTagToInitSegmentRefMap_ = new Map();
  186. /** @private {boolean} */
  187. this.lowLatencyMode_ = false;
  188. /** @private {boolean} */
  189. this.lowLatencyByterangeOptimization_ = false;
  190. /**
  191. * An ewma that tracks how long updates take.
  192. * This is to mitigate issues caused by slow parsing on embedded devices.
  193. * @private {!shaka.abr.Ewma}
  194. */
  195. this.averageUpdateDuration_ = new shaka.abr.Ewma(5);
  196. /** @private {?shaka.util.ContentSteeringManager} */
  197. this.contentSteeringManager_ = null;
  198. }
  199. /**
  200. * @override
  201. * @exportInterface
  202. */
  203. configure(config) {
  204. this.config_ = config;
  205. if (this.contentSteeringManager_) {
  206. this.contentSteeringManager_.configure(this.config_);
  207. }
  208. }
  209. /**
  210. * @override
  211. * @exportInterface
  212. */
  213. async start(uri, playerInterface) {
  214. goog.asserts.assert(this.config_, 'Must call configure() before start()!');
  215. this.playerInterface_ = playerInterface;
  216. this.lowLatencyMode_ = playerInterface.isLowLatencyMode();
  217. const response = await this.requestManifest_([uri]);
  218. // Record the master playlist URI after redirects.
  219. this.masterPlaylistUri_ = response.uri;
  220. goog.asserts.assert(response.data, 'Response data should be non-null!');
  221. await this.parseManifest_(response.data, uri);
  222. goog.asserts.assert(this.manifest_, 'Manifest should be non-null');
  223. return this.manifest_;
  224. }
  225. /**
  226. * @override
  227. * @exportInterface
  228. */
  229. stop() {
  230. // Make sure we don't update the manifest again. Even if the timer is not
  231. // running, this is safe to call.
  232. if (this.updatePlaylistTimer_) {
  233. this.updatePlaylistTimer_.stop();
  234. this.updatePlaylistTimer_ = null;
  235. }
  236. /** @type {!Array.<!Promise>} */
  237. const pending = [];
  238. if (this.operationManager_) {
  239. pending.push(this.operationManager_.destroy());
  240. this.operationManager_ = null;
  241. }
  242. this.playerInterface_ = null;
  243. this.config_ = null;
  244. this.variantUriSet_.clear();
  245. this.manifest_ = null;
  246. this.uriToStreamInfosMap_.clear();
  247. this.groupIdToStreamInfosMap_.clear();
  248. this.groupIdToCodecsMap_.clear();
  249. this.globalVariables_.clear();
  250. if (this.contentSteeringManager_) {
  251. this.contentSteeringManager_.destroy();
  252. }
  253. return Promise.all(pending);
  254. }
  255. /**
  256. * @override
  257. * @exportInterface
  258. */
  259. async update() {
  260. if (!this.isLive_()) {
  261. return;
  262. }
  263. /** @type {!Array.<!Promise>} */
  264. const updates = [];
  265. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  266. // This is necessary to calculate correctly the update time.
  267. this.lastTargetDuration_ = Infinity;
  268. // Only update active streams.
  269. const activeStreamInfos = streamInfos.filter((s) => s.stream.segmentIndex);
  270. for (const streamInfo of activeStreamInfos) {
  271. updates.push(this.updateStream_(streamInfo));
  272. }
  273. await Promise.all(updates);
  274. // Now that streams have been updated, notify the presentation timeline.
  275. this.notifySegmentsForStreams_(activeStreamInfos.map((s) => s.stream));
  276. // If any hasEndList is false, the stream is still live.
  277. const stillLive = activeStreamInfos.some((s) => s.hasEndList == false);
  278. if (activeStreamInfos.length && !stillLive) {
  279. // Convert the presentation to VOD and set the duration.
  280. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  281. this.setPresentationType_(PresentationType.VOD);
  282. // The duration is the minimum of the end times of all active streams.
  283. // Non-active streams are not guaranteed to have useful maxTimestamp
  284. // values, due to the lazy-loading system, so they are ignored.
  285. const maxTimestamps = activeStreamInfos.map((s) => s.maxTimestamp);
  286. // The duration is the minimum of the end times of all streams.
  287. this.presentationTimeline_.setDuration(Math.min(...maxTimestamps));
  288. this.playerInterface_.updateDuration();
  289. }
  290. if (stillLive) {
  291. this.determineDuration_();
  292. }
  293. }
  294. /**
  295. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  296. * @return {!Map.<number, number>}
  297. * @private
  298. */
  299. getMediaSequenceToStartTimeFor_(streamInfo) {
  300. if (this.isLive_()) {
  301. return this.mediaSequenceToStartTimeByType_.get(streamInfo.type);
  302. } else {
  303. return streamInfo.mediaSequenceToStartTime;
  304. }
  305. }
  306. /**
  307. * Updates a stream.
  308. *
  309. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  310. * @return {!Promise}
  311. * @private
  312. */
  313. async updateStream_(streamInfo) {
  314. const manifestUris = [];
  315. for (const uri of streamInfo.getUris()) {
  316. const uriObj = new goog.Uri(uri);
  317. const queryData = uriObj.getQueryData();
  318. if (streamInfo.canBlockReload) {
  319. if (streamInfo.nextMediaSequence >= 0) {
  320. // Indicates that the server must hold the request until a Playlist
  321. // contains a Media Segment with Media Sequence
  322. queryData.add('_HLS_msn', String(streamInfo.nextMediaSequence));
  323. }
  324. if (streamInfo.nextPart >= 0) {
  325. // Indicates, in combination with _HLS_msn, that the server must hold
  326. // the request until a Playlist contains Partial Segment N of Media
  327. // Sequence Number M or later.
  328. queryData.add('_HLS_part', String(streamInfo.nextPart));
  329. }
  330. }
  331. if (streamInfo.canSkipSegments) {
  332. // Enable delta updates. This will replace older segments with
  333. // 'EXT-X-SKIP' tag in the media playlist.
  334. queryData.add('_HLS_skip', 'YES');
  335. }
  336. if (queryData.getCount()) {
  337. uriObj.setQueryData(queryData);
  338. }
  339. manifestUris.push(uriObj.toString());
  340. }
  341. const response =
  342. await this.requestManifest_(manifestUris, /* isPlaylist= */ true);
  343. if (!streamInfo.stream.segmentIndex) {
  344. // The stream was closed since the update was first requested.
  345. return;
  346. }
  347. /** @type {shaka.hls.Playlist} */
  348. const playlist = this.manifestTextParser_.parsePlaylist(response.data);
  349. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  350. throw new shaka.util.Error(
  351. shaka.util.Error.Severity.CRITICAL,
  352. shaka.util.Error.Category.MANIFEST,
  353. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  354. }
  355. // Record the final URI after redirects.
  356. const responseUri = response.uri;
  357. if (responseUri != response.originalUri &&
  358. !streamInfo.getUris().includes(responseUri)) {
  359. streamInfo.redirectUris.push(responseUri);
  360. }
  361. /** @type {!Array.<!shaka.hls.Tag>} */
  362. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  363. 'EXT-X-DEFINE');
  364. const mediaVariables = this.parseMediaVariables_(
  365. variablesTags, responseUri);
  366. const stream = streamInfo.stream;
  367. const mediaSequenceToStartTime =
  368. this.getMediaSequenceToStartTimeFor_(streamInfo);
  369. const {keyIds, drmInfos} = this.parseDrmInfo_(playlist, stream.mimeType);
  370. const keysAreEqual =
  371. (a, b) => a.size === b.size && [...a].every((value) => b.has(value));
  372. if (!keysAreEqual(stream.keyIds, keyIds)) {
  373. stream.keyIds = keyIds;
  374. stream.drmInfos = drmInfos;
  375. this.playerInterface_.newDrmInfo(stream);
  376. }
  377. const {segments, bandwidth} = this.createSegments_(
  378. playlist, stream, mediaSequenceToStartTime, mediaVariables,
  379. streamInfo.getUris);
  380. stream.bandwidth = bandwidth;
  381. stream.segmentIndex.mergeAndEvict(
  382. segments, this.presentationTimeline_.getSegmentAvailabilityStart());
  383. if (segments.length) {
  384. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  385. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  386. const skipTag = shaka.hls.Utils.getFirstTagWithName(
  387. playlist.tags, 'EXT-X-SKIP');
  388. const skippedSegments =
  389. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  390. const {nextMediaSequence, nextPart} =
  391. this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
  392. streamInfo.nextMediaSequence = nextMediaSequence + skippedSegments;
  393. streamInfo.nextPart = nextPart;
  394. const playlistStartTime = mediaSequenceToStartTime.get(
  395. mediaSequenceNumber);
  396. stream.segmentIndex.evict(playlistStartTime);
  397. }
  398. const oldSegment = segments[0];
  399. goog.asserts.assert(oldSegment, 'Should have segments!');
  400. streamInfo.minTimestamp = oldSegment.startTime;
  401. const newestSegment = segments[segments.length - 1];
  402. goog.asserts.assert(newestSegment, 'Should have segments!');
  403. streamInfo.maxTimestamp = newestSegment.endTime;
  404. // Once the last segment has been added to the playlist,
  405. // #EXT-X-ENDLIST tag will be appended.
  406. // If that happened, treat the rest of the EVENT presentation as VOD.
  407. const endListTag =
  408. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  409. if (endListTag) {
  410. // Flag this for later. We don't convert the whole presentation into VOD
  411. // until we've seen the ENDLIST tag for all active playlists.
  412. streamInfo.hasEndList = true;
  413. }
  414. this.determineLastTargetDuration_(playlist);
  415. }
  416. /**
  417. * @override
  418. * @exportInterface
  419. */
  420. onExpirationUpdated(sessionId, expiration) {
  421. // No-op
  422. }
  423. /**
  424. * @override
  425. * @exportInterface
  426. */
  427. onInitialVariantChosen(variant) {
  428. // No-op
  429. }
  430. /**
  431. * @override
  432. * @exportInterface
  433. */
  434. banLocation(uri) {
  435. if (this.contentSteeringManager_) {
  436. this.contentSteeringManager_.banLocation(uri);
  437. }
  438. }
  439. /**
  440. * Align the streams by sequence number by dropping early segments. Then
  441. * offset the streams to begin at presentation time 0.
  442. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  443. * @private
  444. */
  445. syncStreamsWithSequenceNumber_(streamInfos) {
  446. // We assume that, when this is first called, we have enough info to
  447. // determine how to use the program date times (e.g. we have both a video
  448. // and an audio, and all other videos and audios match those).
  449. // Thus, we only need to calculate this once.
  450. const updateMinSequenceNumber = this.minSequenceNumber_ == -1;
  451. // Sync using media sequence number. Find the highest starting sequence
  452. // number among all streams. Later, we will drop any references to
  453. // earlier segments in other streams, then offset everything back to 0.
  454. for (const streamInfo of streamInfos) {
  455. const segmentIndex = streamInfo.stream.segmentIndex;
  456. goog.asserts.assert(segmentIndex,
  457. 'Only loaded streams should be synced');
  458. const mediaSequenceToStartTime =
  459. this.getMediaSequenceToStartTimeFor_(streamInfo);
  460. const segment0 = segmentIndex.earliestReference();
  461. if (segment0) {
  462. // This looks inefficient, but iteration order is insertion order.
  463. // So the very first entry should be the one we want.
  464. // We assert that this holds true so that we are alerted by debug
  465. // builds and tests if it changes. We still do a loop, though, so
  466. // that the code functions correctly in production no matter what.
  467. if (goog.DEBUG) {
  468. const firstSequenceStartTime =
  469. mediaSequenceToStartTime.values().next().value;
  470. goog.asserts.assert(
  471. firstSequenceStartTime == segment0.startTime,
  472. 'Sequence number map is not ordered as expected!');
  473. }
  474. for (const [sequence, start] of mediaSequenceToStartTime) {
  475. if (start == segment0.startTime) {
  476. if (updateMinSequenceNumber) {
  477. this.minSequenceNumber_ = Math.max(
  478. this.minSequenceNumber_, sequence);
  479. }
  480. // Even if we already have decided on a value for
  481. // |this.minSequenceNumber_|, we still need to determine the first
  482. // sequence number for the stream, to offset it in the code below.
  483. streamInfo.firstSequenceNumber = sequence;
  484. break;
  485. }
  486. }
  487. }
  488. }
  489. if (this.minSequenceNumber_ < 0) {
  490. // Nothing to sync.
  491. return;
  492. }
  493. shaka.log.debug('Syncing HLS streams against base sequence number:',
  494. this.minSequenceNumber_);
  495. for (const streamInfo of streamInfos) {
  496. const segmentIndex = streamInfo.stream.segmentIndex;
  497. if (segmentIndex) {
  498. // Drop any earlier references.
  499. const numSegmentsToDrop =
  500. this.minSequenceNumber_ - streamInfo.firstSequenceNumber;
  501. segmentIndex.dropFirstReferences(numSegmentsToDrop);
  502. // Now adjust timestamps back to begin at 0.
  503. const segmentN = segmentIndex.earliestReference();
  504. if (segmentN) {
  505. const streamOffset = -segmentN.startTime;
  506. // Modify all SegmentReferences equally.
  507. streamInfo.stream.segmentIndex.offset(streamOffset);
  508. // Update other parts of streamInfo the same way.
  509. this.offsetStreamInfo_(streamInfo, streamOffset);
  510. }
  511. }
  512. }
  513. }
  514. /**
  515. * Synchronize streams by the EXT-X-PROGRAM-DATE-TIME tags attached to their
  516. * segments. Also normalizes segment times so that the earliest segment in
  517. * any stream is at time 0.
  518. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  519. * @private
  520. */
  521. syncStreamsWithProgramDateTime_(streamInfos) {
  522. // We assume that, when this is first called, we have enough info to
  523. // determine how to use the program date times (e.g. we have both a video
  524. // and an audio, and all other videos and audios match those).
  525. // Thus, we only need to calculate this once.
  526. if (this.lowestSyncTime_ == Infinity) {
  527. for (const streamInfo of streamInfos) {
  528. const segmentIndex = streamInfo.stream.segmentIndex;
  529. goog.asserts.assert(segmentIndex,
  530. 'Only loaded streams should be synced');
  531. const segment0 = segmentIndex.earliestReference();
  532. if (segment0 != null && segment0.syncTime != null) {
  533. this.lowestSyncTime_ =
  534. Math.min(this.lowestSyncTime_, segment0.syncTime);
  535. }
  536. }
  537. }
  538. const lowestSyncTime = this.lowestSyncTime_;
  539. if (lowestSyncTime == Infinity) {
  540. // Nothing to sync.
  541. return;
  542. }
  543. shaka.log.debug('Syncing HLS streams against base time:', lowestSyncTime);
  544. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  545. const segmentIndex = streamInfo.stream.segmentIndex;
  546. if (segmentIndex != null) {
  547. // A segment's startTime should be based on its syncTime vs the lowest
  548. // syncTime across all streams. The earliest segment sync time from
  549. // any stream will become presentation time 0. If two streams start
  550. // e.g. 6 seconds apart in syncTime, then their first segments will
  551. // also start 6 seconds apart in presentation time.
  552. const segment0 = segmentIndex.earliestReference();
  553. if (segment0.syncTime == null) {
  554. shaka.log.alwaysError('Missing EXT-X-PROGRAM-DATE-TIME for stream',
  555. streamInfo.getUris(),
  556. 'Expect AV sync issues!');
  557. } else {
  558. // Stream metadata are offset by a fixed amount based on the
  559. // first segment.
  560. const segment0TargetTime = segment0.syncTime - lowestSyncTime;
  561. const streamOffset = segment0TargetTime - segment0.startTime;
  562. this.offsetStreamInfo_(streamInfo, streamOffset);
  563. // This is computed across all segments separately to manage
  564. // accumulated drift in durations.
  565. for (const segment of segmentIndex) {
  566. segment.syncAgainst(lowestSyncTime);
  567. }
  568. }
  569. }
  570. }
  571. }
  572. /**
  573. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  574. * @param {number} offset
  575. * @private
  576. */
  577. offsetStreamInfo_(streamInfo, offset) {
  578. // Adjust our accounting of the minimum timestamp.
  579. streamInfo.minTimestamp += offset;
  580. // Adjust our accounting of the maximum timestamp.
  581. streamInfo.maxTimestamp += offset;
  582. goog.asserts.assert(streamInfo.maxTimestamp >= 0,
  583. 'Negative maxTimestamp after adjustment!');
  584. // Update our map from sequence number to start time.
  585. const mediaSequenceToStartTime =
  586. this.getMediaSequenceToStartTimeFor_(streamInfo);
  587. for (const [key, value] of mediaSequenceToStartTime) {
  588. mediaSequenceToStartTime.set(key, value + offset);
  589. }
  590. shaka.log.debug('Offset', offset, 'applied to',
  591. streamInfo.getUris());
  592. }
  593. /**
  594. * Parses the manifest.
  595. *
  596. * @param {BufferSource} data
  597. * @param {string} uri
  598. * @return {!Promise}
  599. * @private
  600. */
  601. async parseManifest_(data, uri) {
  602. const Utils = shaka.hls.Utils;
  603. goog.asserts.assert(this.masterPlaylistUri_,
  604. 'Master playlist URI must be set before calling parseManifest_!');
  605. const playlist = this.manifestTextParser_.parsePlaylist(data);
  606. /** @type {!Array.<!shaka.hls.Tag>} */
  607. const variablesTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE');
  608. /** @type {!Array.<!shaka.extern.Variant>} */
  609. let variants = [];
  610. /** @type {!Array.<!shaka.extern.Stream>} */
  611. let textStreams = [];
  612. /** @type {!Array.<!shaka.extern.Stream>} */
  613. let imageStreams = [];
  614. // Parsing a media playlist results in a single-variant stream.
  615. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  616. const getUris = () => {
  617. return [uri];
  618. };
  619. // Get necessary info for this stream. These are things we would normally
  620. // find from the master playlist (e.g. from values on EXT-X-MEDIA tags).
  621. const basicInfo =
  622. await this.getMediaPlaylistBasicInfo_(playlist, getUris);
  623. const type = basicInfo.type;
  624. const mimeType = basicInfo.mimeType;
  625. const codecs = basicInfo.codecs;
  626. const languageValue = basicInfo.language;
  627. const height = basicInfo.height;
  628. const width = basicInfo.width;
  629. const channelsCount = basicInfo.channelCount;
  630. const sampleRate = basicInfo.sampleRate;
  631. const closedCaptions = basicInfo.closedCaptions;
  632. // Some values we cannot figure out, and aren't important enough to ask
  633. // the user to provide through config values. A lot of these are only
  634. // relevant to ABR, which isn't necessary if there's only one variant.
  635. // So these unknowns should be set to false or null, largely.
  636. const spatialAudio = false;
  637. const characteristics = null;
  638. const forced = false; // Only relevant for text.
  639. const primary = true; // This is the only stream!
  640. const name = 'Media Playlist';
  641. // Make the stream info, with those values.
  642. const streamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  643. this.globalId_++, playlist, getUris, uri, codecs, type,
  644. languageValue, primary, name, channelsCount, closedCaptions,
  645. characteristics, forced, sampleRate, spatialAudio, mimeType);
  646. this.uriToStreamInfosMap_.set(uri, streamInfo);
  647. if (type == 'video') {
  648. this.addVideoAttributes_(streamInfo.stream, width, height,
  649. /* frameRate= */ null, /* videoRange= */ null,
  650. /* videoLayout= */ null);
  651. }
  652. // Wrap the stream from that stream info with a variant.
  653. variants.push({
  654. id: 0,
  655. language: this.getLanguage_(languageValue),
  656. disabledUntilTime: 0,
  657. primary: true,
  658. audio: type == 'audio' ? streamInfo.stream : null,
  659. video: type == 'video' ? streamInfo.stream : null,
  660. bandwidth: streamInfo.stream.bandwidth || 0,
  661. allowedByApplication: true,
  662. allowedByKeySystem: true,
  663. decodingInfos: [],
  664. });
  665. } else {
  666. this.parseMasterVariables_(variablesTags);
  667. /** @type {!Array.<!shaka.hls.Tag>} */
  668. const mediaTags = Utils.filterTagsByName(
  669. playlist.tags, 'EXT-X-MEDIA');
  670. /** @type {!Array.<!shaka.hls.Tag>} */
  671. const variantTags = Utils.filterTagsByName(
  672. playlist.tags, 'EXT-X-STREAM-INF');
  673. /** @type {!Array.<!shaka.hls.Tag>} */
  674. const imageTags = Utils.filterTagsByName(
  675. playlist.tags, 'EXT-X-IMAGE-STREAM-INF');
  676. /** @type {!Array.<!shaka.hls.Tag>} */
  677. const iFrameTags = Utils.filterTagsByName(
  678. playlist.tags, 'EXT-X-I-FRAME-STREAM-INF');
  679. /** @type {!Array.<!shaka.hls.Tag>} */
  680. const sessionKeyTags = Utils.filterTagsByName(
  681. playlist.tags, 'EXT-X-SESSION-KEY');
  682. /** @type {!Array.<!shaka.hls.Tag>} */
  683. const sessionDataTags = Utils.filterTagsByName(
  684. playlist.tags, 'EXT-X-SESSION-DATA');
  685. /** @type {!Array.<!shaka.hls.Tag>} */
  686. const contentSteeringTags = Utils.filterTagsByName(
  687. playlist.tags, 'EXT-X-CONTENT-STEERING');
  688. this.processSessionData_(sessionDataTags);
  689. await this.processContentSteering_(contentSteeringTags);
  690. this.parseCodecs_(variantTags);
  691. this.parseClosedCaptions_(mediaTags);
  692. variants =
  693. this.createVariantsForTags_(variantTags, sessionKeyTags, mediaTags);
  694. textStreams = this.parseTexts_(mediaTags);
  695. imageStreams = await this.parseImages_(imageTags, iFrameTags);
  696. }
  697. // Make sure that the parser has not been destroyed.
  698. if (!this.playerInterface_) {
  699. throw new shaka.util.Error(
  700. shaka.util.Error.Severity.CRITICAL,
  701. shaka.util.Error.Category.PLAYER,
  702. shaka.util.Error.Code.OPERATION_ABORTED);
  703. }
  704. // This assert is our own sanity check.
  705. goog.asserts.assert(this.presentationTimeline_ == null,
  706. 'Presentation timeline created early!');
  707. // We don't know if the presentation is VOD or live until we parse at least
  708. // one media playlist, so make a VOD-style presentation timeline for now
  709. // and change the type later if we discover this is live.
  710. // Since the player will load the first variant chosen early in the process,
  711. // there isn't a window during playback where the live-ness is unknown.
  712. this.presentationTimeline_ = new shaka.media.PresentationTimeline(
  713. /* presentationStartTime= */ null, /* delay= */ 0);
  714. this.presentationTimeline_.setStatic(true);
  715. // Single-variant streams aren't lazy-loaded, so for them we already have
  716. // enough info here to determine the presentation type and duration.
  717. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  718. if (this.isLive_()) {
  719. this.changePresentationTimelineToLive_(playlist);
  720. const delay = this.getUpdatePlaylistDelay_();
  721. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  722. }
  723. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  724. this.finalizeStreams_(streamInfos);
  725. this.determineDuration_();
  726. }
  727. this.manifest_ = {
  728. presentationTimeline: this.presentationTimeline_,
  729. variants,
  730. textStreams,
  731. imageStreams,
  732. offlineSessionIds: [],
  733. minBufferTime: 0,
  734. sequenceMode: this.config_.hls.sequenceMode,
  735. ignoreManifestTimestampsInSegmentsMode:
  736. this.config_.hls.ignoreManifestTimestampsInSegmentsMode,
  737. type: shaka.media.ManifestParser.HLS,
  738. serviceDescription: null,
  739. };
  740. // If there is no 'CODECS' attribute in the manifest and codec guessing is
  741. // disabled, we need to create the segment indexes now so that missing info
  742. // can be parsed from the media data and added to the stream objects.
  743. if (!this.codecInfoInManifest_ && this.config_.hls.disableCodecGuessing) {
  744. const createIndexes = [];
  745. for (const variant of this.manifest_.variants) {
  746. if (variant.audio && variant.audio.codecs === '') {
  747. createIndexes.push(variant.audio.createSegmentIndex());
  748. }
  749. if (variant.video && variant.video.codecs === '') {
  750. createIndexes.push(variant.video.createSegmentIndex());
  751. }
  752. }
  753. await Promise.all(createIndexes);
  754. }
  755. this.playerInterface_.makeTextStreamsForClosedCaptions(this.manifest_);
  756. }
  757. /**
  758. * @param {shaka.hls.Playlist} playlist
  759. * @param {function():!Array.<string>} getUris
  760. * @return {!Promise.<shaka.media.SegmentUtils.BasicInfo>}
  761. * @private
  762. */
  763. async getMediaPlaylistBasicInfo_(playlist, getUris) {
  764. const HlsParser = shaka.hls.HlsParser;
  765. const defaultBasicInfo = shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  766. this.config_.hls.mediaPlaylistFullMimeType);
  767. if (!playlist.segments.length) {
  768. return defaultBasicInfo;
  769. }
  770. const middleSegmentIdx = Math.trunc((playlist.segments.length - 1) / 2);
  771. const middleSegment = playlist.segments[middleSegmentIdx];
  772. const middleSegmentUris = shaka.hls.Utils.constructSegmentUris(
  773. getUris(),
  774. playlist.segments[0].verbatimSegmentUri);
  775. const middleSegmentUri = middleSegmentUris[0];
  776. const parsedUri = new goog.Uri(middleSegmentUri);
  777. const extension = parsedUri.getPath().split('.').pop();
  778. const rawMimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  779. if (rawMimeType) {
  780. return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  781. rawMimeType);
  782. }
  783. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  784. let initData = null;
  785. const initSegmentRef = this.getInitSegmentReference_(
  786. playlist, middleSegment.tags, getUris);
  787. if (initSegmentRef) {
  788. const initSegmentRequest = shaka.net.NetworkingEngine.makeRequest(
  789. initSegmentRef.getUris(), this.config_.retryParameters);
  790. const initType =
  791. shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT;
  792. const initResponse = await this.makeNetworkRequest_(
  793. initSegmentRequest, requestType, {type: initType});
  794. initData = initResponse.data;
  795. }
  796. const segmentRequest = shaka.net.NetworkingEngine.makeRequest(
  797. middleSegmentUris, this.config_.retryParameters);
  798. const type = shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT;
  799. const response = await this.makeNetworkRequest_(
  800. segmentRequest, requestType, {type});
  801. let contentMimeType = response.headers['content-type'];
  802. if (contentMimeType) {
  803. // Split the MIME type in case the server sent additional parameters.
  804. contentMimeType = contentMimeType.split(';')[0].toLowerCase();
  805. }
  806. if (extension == 'ts' || contentMimeType == 'video/mp2t') {
  807. const basicInfo =
  808. shaka.media.SegmentUtils.getBasicInfoFromTs(response.data);
  809. if (basicInfo) {
  810. return basicInfo;
  811. }
  812. } else if (extension == 'mp4' || extension == 'cmfv' ||
  813. contentMimeType == 'video/mp4' || contentMimeType == 'audio/mp4' ||
  814. contentMimeType == 'video/iso.segment') {
  815. const basicInfo = shaka.media.SegmentUtils.getBasicInfoFromMp4(
  816. initData, response.data);
  817. if (basicInfo) {
  818. return basicInfo;
  819. }
  820. }
  821. return defaultBasicInfo;
  822. }
  823. /** @private */
  824. determineDuration_() {
  825. goog.asserts.assert(this.presentationTimeline_,
  826. 'Presentation timeline not created!');
  827. if (this.isLive_()) {
  828. // The spec says nothing much about seeking in live content, but Safari's
  829. // built-in HLS implementation does not allow it. Therefore we will set
  830. // the availability window equal to the presentation delay. The player
  831. // will be able to buffer ahead three segments, but the seek window will
  832. // be zero-sized.
  833. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  834. if (this.presentationType_ == PresentationType.LIVE) {
  835. let segmentAvailabilityDuration = this.getLiveDuration_();
  836. // This defaults to the presentation delay, which has the effect of
  837. // making the live stream unseekable. This is consistent with Apple's
  838. // HLS implementation.
  839. if (this.config_.hls.useSafariBehaviorForLive) {
  840. segmentAvailabilityDuration = this.presentationTimeline_.getDelay();
  841. }
  842. // The app can override that with a longer duration, to allow seeking.
  843. if (!isNaN(this.config_.availabilityWindowOverride)) {
  844. segmentAvailabilityDuration = this.config_.availabilityWindowOverride;
  845. }
  846. this.presentationTimeline_.setSegmentAvailabilityDuration(
  847. segmentAvailabilityDuration);
  848. }
  849. } else {
  850. // Use the minimum duration as the presentation duration.
  851. this.presentationTimeline_.setDuration(this.getMinDuration_());
  852. }
  853. // This is the first point where we have a meaningful presentation start
  854. // time, and we need to tell PresentationTimeline that so that it can
  855. // maintain consistency from here on.
  856. this.presentationTimeline_.lockStartTime();
  857. // This asserts that the live edge is being calculated from segment times.
  858. // For VOD and event streams, this check should still pass.
  859. goog.asserts.assert(
  860. !this.presentationTimeline_.usingPresentationStartTime(),
  861. 'We should not be using the presentation start time in HLS!');
  862. }
  863. /**
  864. * Get the variables of each variant tag, and store in a map.
  865. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  866. * @private
  867. */
  868. parseMasterVariables_(tags) {
  869. const queryParams = new goog.Uri(this.masterPlaylistUri_).getQueryData();
  870. for (const variableTag of tags) {
  871. const name = variableTag.getAttributeValue('NAME');
  872. const value = variableTag.getAttributeValue('VALUE');
  873. const queryParam = variableTag.getAttributeValue('QUERYPARAM');
  874. if (name && value) {
  875. if (!this.globalVariables_.has(name)) {
  876. this.globalVariables_.set(name, value);
  877. }
  878. }
  879. if (queryParam) {
  880. const queryParamValue = queryParams.get(queryParam)[0];
  881. if (queryParamValue && !this.globalVariables_.has(queryParamValue)) {
  882. this.globalVariables_.set(queryParam, queryParamValue);
  883. }
  884. }
  885. }
  886. }
  887. /**
  888. * Get the variables of each variant tag, and store in a map.
  889. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  890. * @param {string} uri Media playlist URI.
  891. * @return {!Map.<string, string>}
  892. * @private
  893. */
  894. parseMediaVariables_(tags, uri) {
  895. const queryParams = new goog.Uri(uri).getQueryData();
  896. const mediaVariables = new Map();
  897. for (const variableTag of tags) {
  898. const name = variableTag.getAttributeValue('NAME');
  899. const value = variableTag.getAttributeValue('VALUE');
  900. const queryParam = variableTag.getAttributeValue('QUERYPARAM');
  901. const mediaImport = variableTag.getAttributeValue('IMPORT');
  902. if (name && value) {
  903. if (!mediaVariables.has(name)) {
  904. mediaVariables.set(name, value);
  905. }
  906. }
  907. if (queryParam) {
  908. const queryParamValue = queryParams.get(queryParam)[0];
  909. if (queryParamValue && !mediaVariables.has(queryParamValue)) {
  910. mediaVariables.set(queryParam, queryParamValue);
  911. }
  912. }
  913. if (mediaImport) {
  914. const globalValue = this.globalVariables_.get(mediaImport);
  915. if (globalValue) {
  916. mediaVariables.set(mediaImport, globalValue);
  917. }
  918. }
  919. }
  920. return mediaVariables;
  921. }
  922. /**
  923. * Get the codecs of each variant tag, and store in a map from
  924. * audio/video/subtitle group id to the codecs arraylist.
  925. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  926. * @private
  927. */
  928. parseCodecs_(tags) {
  929. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  930. for (const variantTag of tags) {
  931. const audioGroupId = variantTag.getAttributeValue('AUDIO');
  932. const videoGroupId = variantTag.getAttributeValue('VIDEO');
  933. const subGroupId = variantTag.getAttributeValue('SUBTITLES');
  934. const allCodecs = this.getCodecsForVariantTag_(variantTag);
  935. if (subGroupId) {
  936. const textCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  937. ContentType.TEXT, allCodecs);
  938. goog.asserts.assert(textCodecs != null, 'Text codecs should be valid.');
  939. this.groupIdToCodecsMap_.set(subGroupId, textCodecs);
  940. shaka.util.ArrayUtils.remove(allCodecs, textCodecs);
  941. }
  942. if (audioGroupId) {
  943. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  944. ContentType.AUDIO, allCodecs);
  945. if (!codecs) {
  946. codecs = this.config_.hls.defaultAudioCodec;
  947. }
  948. this.groupIdToCodecsMap_.set(audioGroupId, codecs);
  949. }
  950. if (videoGroupId) {
  951. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  952. ContentType.VIDEO, allCodecs);
  953. if (!codecs) {
  954. codecs = this.config_.hls.defaultVideoCodec;
  955. }
  956. this.groupIdToCodecsMap_.set(videoGroupId, codecs);
  957. }
  958. }
  959. }
  960. /**
  961. * Process EXT-X-SESSION-DATA tags.
  962. *
  963. * @param {!Array.<!shaka.hls.Tag>} tags
  964. * @private
  965. */
  966. processSessionData_(tags) {
  967. for (const tag of tags) {
  968. const id = tag.getAttributeValue('DATA-ID');
  969. const uri = tag.getAttributeValue('URI');
  970. const language = tag.getAttributeValue('LANGUAGE');
  971. const value = tag.getAttributeValue('VALUE');
  972. const data = (new Map()).set('id', id);
  973. if (uri) {
  974. data.set('uri', shaka.hls.Utils.constructSegmentUris(
  975. [this.masterPlaylistUri_], uri, this.globalVariables_)[0]);
  976. }
  977. if (language) {
  978. data.set('language', language);
  979. }
  980. if (value) {
  981. data.set('value', value);
  982. }
  983. const event = new shaka.util.FakeEvent('sessiondata', data);
  984. if (this.playerInterface_) {
  985. this.playerInterface_.onEvent(event);
  986. }
  987. }
  988. }
  989. /**
  990. * Process EXT-X-CONTENT-STEERING tags.
  991. *
  992. * @param {!Array.<!shaka.hls.Tag>} tags
  993. * @return {!Promise}
  994. * @private
  995. */
  996. async processContentSteering_(tags) {
  997. if (!this.playerInterface_ || !this.config_) {
  998. return;
  999. }
  1000. let contentSteeringPromise;
  1001. for (const tag of tags) {
  1002. const defaultPathwayId = tag.getAttributeValue('PATHWAY-ID');
  1003. const uri = tag.getAttributeValue('SERVER-URI');
  1004. if (!defaultPathwayId || !uri) {
  1005. continue;
  1006. }
  1007. this.contentSteeringManager_ =
  1008. new shaka.util.ContentSteeringManager(this.playerInterface_);
  1009. this.contentSteeringManager_.configure(this.config_);
  1010. this.contentSteeringManager_.setBaseUris([this.masterPlaylistUri_]);
  1011. this.contentSteeringManager_.setManifestType(
  1012. shaka.media.ManifestParser.HLS);
  1013. this.contentSteeringManager_.setDefaultPathwayId(defaultPathwayId);
  1014. contentSteeringPromise =
  1015. this.contentSteeringManager_.requestInfo(uri);
  1016. break;
  1017. }
  1018. await contentSteeringPromise;
  1019. }
  1020. /**
  1021. * Parse Subtitles and Closed Captions from 'EXT-X-MEDIA' tags.
  1022. * Create text streams for Subtitles, but not Closed Captions.
  1023. *
  1024. * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  1025. * @return {!Array.<!shaka.extern.Stream>}
  1026. * @private
  1027. */
  1028. parseTexts_(mediaTags) {
  1029. // Create text stream for each Subtitle media tag.
  1030. const subtitleTags =
  1031. shaka.hls.Utils.filterTagsByType(mediaTags, 'SUBTITLES');
  1032. const textStreams = subtitleTags.map((tag) => {
  1033. const disableText = this.config_.disableText;
  1034. if (disableText) {
  1035. return null;
  1036. }
  1037. try {
  1038. return this.createStreamInfoFromMediaTags_([tag], new Map()).stream;
  1039. } catch (e) {
  1040. if (this.config_.hls.ignoreTextStreamFailures) {
  1041. return null;
  1042. }
  1043. throw e;
  1044. }
  1045. });
  1046. const type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1047. // Set the codecs for text streams.
  1048. for (const tag of subtitleTags) {
  1049. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1050. const codecs = this.groupIdToCodecsMap_.get(groupId);
  1051. if (codecs) {
  1052. const textStreamInfos = this.groupIdToStreamInfosMap_.get(groupId);
  1053. if (textStreamInfos) {
  1054. for (const textStreamInfo of textStreamInfos) {
  1055. textStreamInfo.stream.codecs = codecs;
  1056. textStreamInfo.stream.mimeType =
  1057. this.guessMimeTypeBeforeLoading_(type, codecs) ||
  1058. this.guessMimeTypeFallback_(type);
  1059. }
  1060. }
  1061. }
  1062. }
  1063. // Do not create text streams for Closed captions.
  1064. return textStreams.filter((s) => s);
  1065. }
  1066. /**
  1067. * @param {!Array.<!shaka.hls.Tag>} imageTags from the playlist.
  1068. * @param {!Array.<!shaka.hls.Tag>} iFrameTags from the playlist.
  1069. * @return {!Promise.<!Array.<!shaka.extern.Stream>>}
  1070. * @private
  1071. */
  1072. async parseImages_(imageTags, iFrameTags) {
  1073. // Create image stream for each image tag.
  1074. const imageStreamPromises = imageTags.map(async (tag) => {
  1075. const disableThumbnails = this.config_.disableThumbnails;
  1076. if (disableThumbnails) {
  1077. return null;
  1078. }
  1079. try {
  1080. const streamInfo = await this.createStreamInfoFromImageTag_(tag);
  1081. return streamInfo.stream;
  1082. } catch (e) {
  1083. if (this.config_.hls.ignoreImageStreamFailures) {
  1084. return null;
  1085. }
  1086. throw e;
  1087. }
  1088. }).concat(iFrameTags.map((tag) => {
  1089. const disableThumbnails = this.config_.disableThumbnails;
  1090. if (disableThumbnails) {
  1091. return null;
  1092. }
  1093. try {
  1094. const streamInfo = this.createStreamInfoFromIframeTag_(tag);
  1095. if (streamInfo.stream.codecs !== 'mjpg') {
  1096. return null;
  1097. }
  1098. return streamInfo.stream;
  1099. } catch (e) {
  1100. if (this.config_.hls.ignoreImageStreamFailures) {
  1101. return null;
  1102. }
  1103. throw e;
  1104. }
  1105. }));
  1106. const imageStreams = await Promise.all(imageStreamPromises);
  1107. return imageStreams.filter((s) => s);
  1108. }
  1109. /**
  1110. * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  1111. * @param {!Map.<string, string>} groupIdPathwayIdMapping
  1112. * @private
  1113. */
  1114. createStreamInfosFromMediaTags_(mediaTags, groupIdPathwayIdMapping) {
  1115. // Filter out subtitles and media tags without uri.
  1116. mediaTags = mediaTags.filter((tag) => {
  1117. const uri = tag.getAttributeValue('URI') || '';
  1118. const type = tag.getAttributeValue('TYPE');
  1119. return type != 'SUBTITLES' && uri != '';
  1120. });
  1121. const groupedTags = {};
  1122. for (const tag of mediaTags) {
  1123. const key = tag.getTagKey();
  1124. if (!groupedTags[key]) {
  1125. groupedTags[key] = [tag];
  1126. } else {
  1127. groupedTags[key].push(tag);
  1128. }
  1129. }
  1130. for (const key in groupedTags) {
  1131. // Create stream info for each audio / video media grouped tag.
  1132. this.createStreamInfoFromMediaTags_(
  1133. groupedTags[key], groupIdPathwayIdMapping);
  1134. }
  1135. }
  1136. /**
  1137. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1138. * @param {!Array.<!shaka.hls.Tag>} sessionKeyTags EXT-X-SESSION-KEY tags
  1139. * from the playlist.
  1140. * @param {!Array.<!shaka.hls.Tag>} mediaTags EXT-X-MEDIA tags from the
  1141. * playlist.
  1142. * @return {!Array.<!shaka.extern.Variant>}
  1143. * @private
  1144. */
  1145. createVariantsForTags_(tags, sessionKeyTags, mediaTags) {
  1146. // EXT-X-SESSION-KEY processing
  1147. const drmInfos = [];
  1148. const keyIds = new Set();
  1149. if (sessionKeyTags.length > 0) {
  1150. for (const drmTag of sessionKeyTags) {
  1151. const method = drmTag.getRequiredAttrValue('METHOD');
  1152. if (method != 'NONE' && method != 'AES-128') {
  1153. // According to the HLS spec, KEYFORMAT is optional and implicitly
  1154. // defaults to "identity".
  1155. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  1156. const keyFormat =
  1157. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  1158. const drmParser =
  1159. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat];
  1160. const drmInfo = drmParser ?
  1161. drmParser(drmTag, /* mimeType= */ '') : null;
  1162. if (drmInfo) {
  1163. if (drmInfo.keyIds) {
  1164. for (const keyId of drmInfo.keyIds) {
  1165. keyIds.add(keyId);
  1166. }
  1167. }
  1168. drmInfos.push(drmInfo);
  1169. } else {
  1170. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  1171. }
  1172. }
  1173. }
  1174. }
  1175. const groupedTags = {};
  1176. for (const tag of tags) {
  1177. const key = tag.getTagKey();
  1178. if (!groupedTags[key]) {
  1179. groupedTags[key] = [tag];
  1180. } else {
  1181. groupedTags[key].push(tag);
  1182. }
  1183. }
  1184. const allVariants = [];
  1185. // Create variants for each group of variant tag.
  1186. for (const key in groupedTags) {
  1187. const tags = groupedTags[key];
  1188. const firstTag = tags[0];
  1189. const frameRate = firstTag.getAttributeValue('FRAME-RATE');
  1190. const bandwidth =
  1191. Number(firstTag.getAttributeValue('AVERAGE-BANDWIDTH')) ||
  1192. Number(firstTag.getRequiredAttrValue('BANDWIDTH'));
  1193. const resolution = firstTag.getAttributeValue('RESOLUTION');
  1194. const [width, height] = resolution ? resolution.split('x') : [null, null];
  1195. const videoRange = firstTag.getAttributeValue('VIDEO-RANGE');
  1196. let videoLayout = firstTag.getAttributeValue('REQ-VIDEO-LAYOUT');
  1197. if (videoLayout && videoLayout.includes(',')) {
  1198. // If multiple video layout strings are present, pick the first valid
  1199. // one.
  1200. const layoutStrings = videoLayout.split(',').filter((layoutString) => {
  1201. return layoutString == 'CH-STEREO' || layoutString == 'CH-MONO';
  1202. });
  1203. videoLayout = layoutStrings[0];
  1204. }
  1205. // According to the HLS spec:
  1206. // By default a video variant is monoscopic, so an attribute
  1207. // consisting entirely of REQ-VIDEO-LAYOUT="CH-MONO" is unnecessary
  1208. // and SHOULD NOT be present.
  1209. videoLayout = videoLayout || 'CH-MONO';
  1210. const streamInfos = this.createStreamInfosForVariantTags_(tags,
  1211. mediaTags, resolution, frameRate);
  1212. goog.asserts.assert(streamInfos.audio.length ||
  1213. streamInfos.video.length, 'We should have created a stream!');
  1214. allVariants.push(...this.createVariants_(
  1215. streamInfos.audio,
  1216. streamInfos.video,
  1217. bandwidth,
  1218. width,
  1219. height,
  1220. frameRate,
  1221. videoRange,
  1222. videoLayout,
  1223. drmInfos,
  1224. keyIds));
  1225. }
  1226. return allVariants.filter((variant) => variant != null);
  1227. }
  1228. /**
  1229. * Create audio and video streamInfos from an 'EXT-X-STREAM-INF' tag and its
  1230. * related media tags.
  1231. *
  1232. * @param {!Array.<!shaka.hls.Tag>} tags
  1233. * @param {!Array.<!shaka.hls.Tag>} mediaTags
  1234. * @param {?string} resolution
  1235. * @param {?string} frameRate
  1236. * @return {!shaka.hls.HlsParser.StreamInfos}
  1237. * @private
  1238. */
  1239. createStreamInfosForVariantTags_(tags, mediaTags, resolution, frameRate) {
  1240. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1241. /** @type {shaka.hls.HlsParser.StreamInfos} */
  1242. const res = {
  1243. audio: [],
  1244. video: [],
  1245. };
  1246. const groupIdPathwayIdMapping = new Map();
  1247. const globalGroupIds = [];
  1248. let isAudioGroup = false;
  1249. let isVideoGroup = false;
  1250. for (const tag of tags) {
  1251. const audioGroupId = tag.getAttributeValue('AUDIO');
  1252. const videoGroupId = tag.getAttributeValue('VIDEO');
  1253. goog.asserts.assert(audioGroupId == null || videoGroupId == null,
  1254. 'Unexpected: both video and audio described by media tags!');
  1255. const groupId = audioGroupId || videoGroupId;
  1256. if (!groupId) {
  1257. continue;
  1258. }
  1259. if (!globalGroupIds.includes(groupId)) {
  1260. globalGroupIds.push(groupId);
  1261. }
  1262. const pathwayId = tag.getAttributeValue('PATHWAY-ID');
  1263. if (pathwayId) {
  1264. groupIdPathwayIdMapping.set(groupId, pathwayId);
  1265. }
  1266. if (audioGroupId) {
  1267. isAudioGroup = true;
  1268. } else if (videoGroupId) {
  1269. isVideoGroup = true;
  1270. }
  1271. // Make an educated guess about the stream type.
  1272. shaka.log.debug('Guessing stream type for', tag.toString());
  1273. }
  1274. if (globalGroupIds.length && mediaTags.length) {
  1275. const mediaTagsForVariant = mediaTags.filter((tag) => {
  1276. return globalGroupIds.includes(tag.getRequiredAttrValue('GROUP-ID'));
  1277. });
  1278. this.createStreamInfosFromMediaTags_(
  1279. mediaTagsForVariant, groupIdPathwayIdMapping);
  1280. }
  1281. const globalGroupId = globalGroupIds.sort().join(',');
  1282. const streamInfos =
  1283. (globalGroupId && this.groupIdToStreamInfosMap_.has(globalGroupId)) ?
  1284. this.groupIdToStreamInfosMap_.get(globalGroupId) : [];
  1285. if (isAudioGroup) {
  1286. res.audio.push(...streamInfos);
  1287. } else if (isVideoGroup) {
  1288. res.video.push(...streamInfos);
  1289. }
  1290. let type;
  1291. let ignoreStream = false;
  1292. // The Microsoft HLS manifest generators will make audio-only variants
  1293. // that link to their URI both directly and through an audio tag.
  1294. // In that case, ignore the local URI and use the version in the
  1295. // AUDIO tag, so you inherit its language.
  1296. // As an example, see the manifest linked in issue #860.
  1297. const allStreamUris = tags.map((tag) => tag.getRequiredAttrValue('URI'));
  1298. const hasSameUri = res.audio.find((audio) => {
  1299. return audio && audio.getUris().find((uri) => {
  1300. return allStreamUris.includes(uri);
  1301. });
  1302. });
  1303. /** @type {!Array.<string>} */
  1304. let allCodecs = this.getCodecsForVariantTag_(tags[0]);
  1305. const videoCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1306. ContentType.VIDEO, allCodecs);
  1307. const audioCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1308. ContentType.AUDIO, allCodecs);
  1309. if (audioCodecs && !videoCodecs) {
  1310. // There are no associated media tags, and there's only audio codec,
  1311. // and no video codec, so it should be audio.
  1312. type = ContentType.AUDIO;
  1313. shaka.log.debug('Guessing audio-only.');
  1314. ignoreStream = res.audio.length > 0;
  1315. } else if (!res.audio.length && !res.video.length &&
  1316. audioCodecs && videoCodecs) {
  1317. // There are both audio and video codecs, so assume multiplexed content.
  1318. // Note that the default used when CODECS is missing assumes multiple
  1319. // (and therefore multiplexed).
  1320. // Recombine the codec strings into one so that MediaSource isn't
  1321. // lied to later. (That would trigger an error in Chrome.)
  1322. shaka.log.debug('Guessing multiplexed audio+video.');
  1323. type = ContentType.VIDEO;
  1324. allCodecs = [[videoCodecs, audioCodecs].join(',')];
  1325. } else if (res.audio.length && hasSameUri) {
  1326. shaka.log.debug('Guessing audio-only.');
  1327. type = ContentType.AUDIO;
  1328. ignoreStream = true;
  1329. } else if (res.video.length && !res.audio.length) {
  1330. // There are associated video streams. Assume this is audio.
  1331. shaka.log.debug('Guessing audio-only.');
  1332. type = ContentType.AUDIO;
  1333. } else {
  1334. shaka.log.debug('Guessing video-only.');
  1335. type = ContentType.VIDEO;
  1336. }
  1337. if (!ignoreStream) {
  1338. let language = null;
  1339. let name = null;
  1340. let channelsCount = null;
  1341. let spatialAudio = false;
  1342. let characteristics = null;
  1343. let sampleRate = null;
  1344. if (!streamInfos.length) {
  1345. const mediaTag = mediaTags.find((tag) => {
  1346. const uri = tag.getAttributeValue('URI') || '';
  1347. const type = tag.getAttributeValue('TYPE');
  1348. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1349. return type != 'SUBTITLES' && uri == '' &&
  1350. globalGroupIds.includes(groupId);
  1351. });
  1352. if (mediaTag) {
  1353. language = mediaTag.getAttributeValue('LANGUAGE');
  1354. name = mediaTag.getAttributeValue('NAME');
  1355. channelsCount = this.getChannelsCount_(mediaTag);
  1356. spatialAudio = this.isSpatialAudio_(mediaTag);
  1357. characteristics = mediaTag.getAttributeValue('CHARACTERISTICS');
  1358. sampleRate = this.getSampleRate_(mediaTag);
  1359. }
  1360. }
  1361. const streamInfo = this.createStreamInfoFromVariantTags_(
  1362. tags, allCodecs, type, language, name, channelsCount,
  1363. characteristics, sampleRate, spatialAudio);
  1364. if (globalGroupId) {
  1365. streamInfo.stream.groupId = globalGroupId;
  1366. }
  1367. res[streamInfo.stream.type] = [streamInfo];
  1368. }
  1369. return res;
  1370. }
  1371. /**
  1372. * Get the codecs from the 'EXT-X-STREAM-INF' tag.
  1373. *
  1374. * @param {!shaka.hls.Tag} tag
  1375. * @return {!Array.<string>} codecs
  1376. * @private
  1377. */
  1378. getCodecsForVariantTag_(tag) {
  1379. let codecsString = tag.getAttributeValue('CODECS') || '';
  1380. this.codecInfoInManifest_ = codecsString.length > 0;
  1381. if (!this.codecInfoInManifest_ && !this.config_.hls.disableCodecGuessing) {
  1382. // These are the default codecs to assume if none are specified.
  1383. const defaultCodecsArray = [];
  1384. if (!this.config_.disableVideo) {
  1385. defaultCodecsArray.push(this.config_.hls.defaultVideoCodec);
  1386. }
  1387. if (!this.config_.disableAudio) {
  1388. defaultCodecsArray.push(this.config_.hls.defaultAudioCodec);
  1389. }
  1390. codecsString = defaultCodecsArray.join(',');
  1391. }
  1392. // Strip out internal whitespace while splitting on commas:
  1393. /** @type {!Array.<string>} */
  1394. const codecs = codecsString.split(/\s*,\s*/);
  1395. return this.filterDuplicateCodecs_(codecs);
  1396. }
  1397. /**
  1398. * @param {!Array.<string>} codecs
  1399. * @return {!Array.<string>} codecs
  1400. * @private
  1401. */
  1402. filterDuplicateCodecs_(codecs) {
  1403. // Filter out duplicate codecs.
  1404. const seen = new Set();
  1405. const ret = [];
  1406. for (const codec of codecs) {
  1407. // HLS says the CODECS field needs to include all codecs that appear in
  1408. // the content. This means that if the content changes profiles, it should
  1409. // include both. Since all known browsers support changing profiles
  1410. // without any other work, just ignore them. See also:
  1411. // https://github.com/shaka-project/shaka-player/issues/1817
  1412. const shortCodec = shaka.util.MimeUtils.getCodecBase(codec);
  1413. if (!seen.has(shortCodec)) {
  1414. ret.push(codec);
  1415. seen.add(shortCodec);
  1416. } else {
  1417. shaka.log.debug('Ignoring duplicate codec');
  1418. }
  1419. }
  1420. return ret;
  1421. }
  1422. /**
  1423. * Get the channel count information for an HLS audio track.
  1424. * CHANNELS specifies an ordered, "/" separated list of parameters.
  1425. * If the type is audio, the first parameter will be a decimal integer
  1426. * specifying the number of independent, simultaneous audio channels.
  1427. * No other channels parameters are currently defined.
  1428. *
  1429. * @param {!shaka.hls.Tag} tag
  1430. * @return {?number}
  1431. * @private
  1432. */
  1433. getChannelsCount_(tag) {
  1434. const channels = tag.getAttributeValue('CHANNELS');
  1435. if (!channels) {
  1436. return null;
  1437. }
  1438. const channelcountstring = channels.split('/')[0];
  1439. const count = parseInt(channelcountstring, 10);
  1440. return count;
  1441. }
  1442. /**
  1443. * Get the sample rate information for an HLS audio track.
  1444. *
  1445. * @param {!shaka.hls.Tag} tag
  1446. * @return {?number}
  1447. * @private
  1448. */
  1449. getSampleRate_(tag) {
  1450. const sampleRate = tag.getAttributeValue('SAMPLE-RATE');
  1451. if (!sampleRate) {
  1452. return null;
  1453. }
  1454. return parseInt(sampleRate, 10);
  1455. }
  1456. /**
  1457. * Get the spatial audio information for an HLS audio track.
  1458. * In HLS the channels field indicates the number of audio channels that the
  1459. * stream has (eg: 2). In the case of Dolby Atmos, the complexity is
  1460. * expressed with the number of channels followed by the word JOC
  1461. * (eg: 16/JOC), so 16 would be the number of channels (eg: 7.3.6 layout),
  1462. * and JOC indicates that the stream has spatial audio.
  1463. * @see https://developer.apple.com/documentation/http_live_streaming/hls_authoring_specification_for_apple_devices/hls_authoring_specification_for_apple_devices_appendixes
  1464. *
  1465. * @param {!shaka.hls.Tag} tag
  1466. * @return {boolean}
  1467. * @private
  1468. */
  1469. isSpatialAudio_(tag) {
  1470. const channels = tag.getAttributeValue('CHANNELS');
  1471. if (!channels) {
  1472. return false;
  1473. }
  1474. return channels.includes('/JOC');
  1475. }
  1476. /**
  1477. * Get the closed captions map information for the EXT-X-STREAM-INF tag, to
  1478. * create the stream info.
  1479. * @param {!shaka.hls.Tag} tag
  1480. * @param {string} type
  1481. * @return {Map.<string, string>} closedCaptions
  1482. * @private
  1483. */
  1484. getClosedCaptions_(tag, type) {
  1485. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1486. // The attribute of closed captions is optional, and the value may be
  1487. // 'NONE'.
  1488. const closedCaptionsAttr = tag.getAttributeValue('CLOSED-CAPTIONS');
  1489. // EXT-X-STREAM-INF tags may have CLOSED-CAPTIONS attributes.
  1490. // The value can be either a quoted-string or an enumerated-string with
  1491. // the value NONE. If the value is a quoted-string, it MUST match the
  1492. // value of the GROUP-ID attribute of an EXT-X-MEDIA tag elsewhere in the
  1493. // Playlist whose TYPE attribute is CLOSED-CAPTIONS.
  1494. if (type == ContentType.VIDEO && closedCaptionsAttr &&
  1495. closedCaptionsAttr != 'NONE') {
  1496. return this.groupIdToClosedCaptionsMap_.get(closedCaptionsAttr);
  1497. }
  1498. return null;
  1499. }
  1500. /**
  1501. * Get the normalized language value.
  1502. *
  1503. * @param {?string} languageValue
  1504. * @return {string}
  1505. * @private
  1506. */
  1507. getLanguage_(languageValue) {
  1508. const LanguageUtils = shaka.util.LanguageUtils;
  1509. return LanguageUtils.normalize(languageValue || 'und');
  1510. }
  1511. /**
  1512. * Get the type value.
  1513. * Shaka recognizes the content types 'audio', 'video', 'text', and 'image'.
  1514. * The HLS 'subtitles' type needs to be mapped to 'text'.
  1515. * @param {!shaka.hls.Tag} tag
  1516. * @return {string}
  1517. * @private
  1518. */
  1519. getType_(tag) {
  1520. let type = tag.getRequiredAttrValue('TYPE').toLowerCase();
  1521. if (type == 'subtitles') {
  1522. type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1523. }
  1524. return type;
  1525. }
  1526. /**
  1527. * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} audioInfos
  1528. * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} videoInfos
  1529. * @param {number} bandwidth
  1530. * @param {?string} width
  1531. * @param {?string} height
  1532. * @param {?string} frameRate
  1533. * @param {?string} videoRange
  1534. * @param {?string} videoLayout
  1535. * @param {!Array.<shaka.extern.DrmInfo>} drmInfos
  1536. * @param {!Set.<string>} keyIds
  1537. * @return {!Array.<!shaka.extern.Variant>}
  1538. * @private
  1539. */
  1540. createVariants_(
  1541. audioInfos, videoInfos, bandwidth, width, height, frameRate, videoRange,
  1542. videoLayout, drmInfos, keyIds) {
  1543. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1544. const DrmEngine = shaka.media.DrmEngine;
  1545. for (const info of videoInfos) {
  1546. this.addVideoAttributes_(
  1547. info.stream, width, height, frameRate, videoRange, videoLayout);
  1548. }
  1549. // In case of audio-only or video-only content or the audio/video is
  1550. // disabled by the config, we create an array of one item containing
  1551. // a null. This way, the double-loop works for all kinds of content.
  1552. // NOTE: we currently don't have support for audio-only content.
  1553. const disableAudio = this.config_.disableAudio;
  1554. if (!audioInfos.length || disableAudio) {
  1555. audioInfos = [null];
  1556. }
  1557. const disableVideo = this.config_.disableVideo;
  1558. if (!videoInfos.length || disableVideo) {
  1559. videoInfos = [null];
  1560. }
  1561. const variants = [];
  1562. for (const audioInfo of audioInfos) {
  1563. for (const videoInfo of videoInfos) {
  1564. const audioStream = audioInfo ? audioInfo.stream : null;
  1565. if (audioStream) {
  1566. audioStream.drmInfos = drmInfos;
  1567. audioStream.keyIds = keyIds;
  1568. }
  1569. const videoStream = videoInfo ? videoInfo.stream : null;
  1570. if (videoStream) {
  1571. videoStream.drmInfos = drmInfos;
  1572. videoStream.keyIds = keyIds;
  1573. }
  1574. const audioDrmInfos = audioInfo ? audioInfo.stream.drmInfos : null;
  1575. const videoDrmInfos = videoInfo ? videoInfo.stream.drmInfos : null;
  1576. const videoStreamUri =
  1577. videoInfo ? videoInfo.getUris().sort().join(',') : '';
  1578. const audioStreamUri =
  1579. audioInfo ? audioInfo.getUris().sort().join(',') : '';
  1580. const variantUriKey = videoStreamUri + ' - ' + audioStreamUri;
  1581. if (audioStream && videoStream) {
  1582. if (!DrmEngine.areDrmCompatible(audioDrmInfos, videoDrmInfos)) {
  1583. shaka.log.warning(
  1584. 'Incompatible DRM info in HLS variant. Skipping.');
  1585. continue;
  1586. }
  1587. }
  1588. if (this.variantUriSet_.has(variantUriKey)) {
  1589. // This happens when two variants only differ in their text streams.
  1590. shaka.log.debug(
  1591. 'Skipping variant which only differs in text streams.');
  1592. continue;
  1593. }
  1594. // Since both audio and video are of the same type, this assertion will
  1595. // catch certain mistakes at runtime that the compiler would miss.
  1596. goog.asserts.assert(!audioStream ||
  1597. audioStream.type == ContentType.AUDIO, 'Audio parameter mismatch!');
  1598. goog.asserts.assert(!videoStream ||
  1599. videoStream.type == ContentType.VIDEO, 'Video parameter mismatch!');
  1600. const variant = {
  1601. id: this.globalId_++,
  1602. language: audioStream ? audioStream.language : 'und',
  1603. disabledUntilTime: 0,
  1604. primary: (!!audioStream && audioStream.primary) ||
  1605. (!!videoStream && videoStream.primary),
  1606. audio: audioStream,
  1607. video: videoStream,
  1608. bandwidth,
  1609. allowedByApplication: true,
  1610. allowedByKeySystem: true,
  1611. decodingInfos: [],
  1612. };
  1613. variants.push(variant);
  1614. this.variantUriSet_.add(variantUriKey);
  1615. }
  1616. }
  1617. return variants;
  1618. }
  1619. /**
  1620. * Parses an array of EXT-X-MEDIA tags, then stores the values of all tags
  1621. * with TYPE="CLOSED-CAPTIONS" into a map of group id to closed captions.
  1622. *
  1623. * @param {!Array.<!shaka.hls.Tag>} mediaTags
  1624. * @private
  1625. */
  1626. parseClosedCaptions_(mediaTags) {
  1627. const closedCaptionsTags =
  1628. shaka.hls.Utils.filterTagsByType(mediaTags, 'CLOSED-CAPTIONS');
  1629. for (const tag of closedCaptionsTags) {
  1630. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1631. 'Should only be called on media tags!');
  1632. const languageValue = tag.getAttributeValue('LANGUAGE');
  1633. let language = this.getLanguage_(languageValue);
  1634. if (!languageValue) {
  1635. const nameValue = tag.getAttributeValue('NAME');
  1636. if (nameValue) {
  1637. language = nameValue;
  1638. }
  1639. }
  1640. // The GROUP-ID value is a quoted-string that specifies the group to which
  1641. // the Rendition belongs.
  1642. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1643. // The value of INSTREAM-ID is a quoted-string that specifies a Rendition
  1644. // within the segments in the Media Playlist. This attribute is REQUIRED
  1645. // if the TYPE attribute is CLOSED-CAPTIONS.
  1646. // We need replace SERVICE string by our internal svc string.
  1647. const instreamId = tag.getRequiredAttrValue('INSTREAM-ID')
  1648. .replace('SERVICE', 'svc');
  1649. if (!this.groupIdToClosedCaptionsMap_.get(groupId)) {
  1650. this.groupIdToClosedCaptionsMap_.set(groupId, new Map());
  1651. }
  1652. this.groupIdToClosedCaptionsMap_.get(groupId).set(instreamId, language);
  1653. }
  1654. }
  1655. /**
  1656. * Parse EXT-X-MEDIA media tag into a Stream object.
  1657. *
  1658. * @param {!Array.<!shaka.hls.Tag>} tags
  1659. * @param {!Map.<string, string>} groupIdPathwayIdMapping
  1660. * @return {!shaka.hls.HlsParser.StreamInfo}
  1661. * @private
  1662. */
  1663. createStreamInfoFromMediaTags_(tags, groupIdPathwayIdMapping) {
  1664. const verbatimMediaPlaylistUris = [];
  1665. const globalGroupIds = [];
  1666. const groupIdUriMappping = new Map();
  1667. for (const tag of tags) {
  1668. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1669. 'Should only be called on media tags!');
  1670. const uri = tag.getRequiredAttrValue('URI');
  1671. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1672. verbatimMediaPlaylistUris.push(uri);
  1673. globalGroupIds.push(groupId);
  1674. groupIdUriMappping.set(groupId, uri);
  1675. }
  1676. const globalGroupId = globalGroupIds.sort().join(',');
  1677. const firstTag = tags[0];
  1678. let codecs = '';
  1679. /** @type {string} */
  1680. const type = this.getType_(firstTag);
  1681. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  1682. codecs = firstTag.getAttributeValue('CODECS') || '';
  1683. } else {
  1684. for (const groupId of globalGroupIds) {
  1685. if (this.groupIdToCodecsMap_.has(groupId)) {
  1686. codecs = this.groupIdToCodecsMap_.get(groupId);
  1687. break;
  1688. }
  1689. }
  1690. }
  1691. // Check if the stream has already been created as part of another Variant
  1692. // and return it if it has.
  1693. const key = verbatimMediaPlaylistUris.sort().join(',');
  1694. if (this.uriToStreamInfosMap_.has(key)) {
  1695. return this.uriToStreamInfosMap_.get(key);
  1696. }
  1697. const streamId = this.globalId_++;
  1698. if (this.contentSteeringManager_) {
  1699. for (const [groupId, uri] of groupIdUriMappping) {
  1700. const pathwayId = groupIdPathwayIdMapping.get(groupId);
  1701. if (pathwayId) {
  1702. this.contentSteeringManager_.addLocation(streamId, pathwayId, uri);
  1703. }
  1704. }
  1705. }
  1706. const language = firstTag.getAttributeValue('LANGUAGE');
  1707. const name = firstTag.getAttributeValue('NAME');
  1708. // NOTE: According to the HLS spec, "DEFAULT=YES" requires "AUTOSELECT=YES".
  1709. // However, we don't bother to validate "AUTOSELECT", since we don't
  1710. // actually use it in our streaming model, and we treat everything as
  1711. // "AUTOSELECT=YES". A value of "AUTOSELECT=NO" would imply that it may
  1712. // only be selected explicitly by the user, and we don't have a way to
  1713. // represent that in our model.
  1714. const defaultAttrValue = firstTag.getAttributeValue('DEFAULT');
  1715. const primary = defaultAttrValue == 'YES';
  1716. const channelsCount =
  1717. type == 'audio' ? this.getChannelsCount_(firstTag) : null;
  1718. const spatialAudio =
  1719. type == 'audio' ? this.isSpatialAudio_(firstTag) : false;
  1720. const characteristics = firstTag.getAttributeValue('CHARACTERISTICS');
  1721. const forcedAttrValue = firstTag.getAttributeValue('FORCED');
  1722. const forced = forcedAttrValue == 'YES';
  1723. const sampleRate = type == 'audio' ? this.getSampleRate_(firstTag) : null;
  1724. // TODO: Should we take into account some of the currently ignored
  1725. // attributes: INSTREAM-ID, Attribute descriptions: https://bit.ly/2lpjOhj
  1726. const streamInfo = this.createStreamInfo_(
  1727. streamId, verbatimMediaPlaylistUris, codecs, type, language,
  1728. primary, name, channelsCount, /* closedCaptions= */ null,
  1729. characteristics, forced, sampleRate, spatialAudio);
  1730. if (streamInfo.stream) {
  1731. streamInfo.stream.groupId = globalGroupId;
  1732. }
  1733. if (this.groupIdToStreamInfosMap_.has(globalGroupId)) {
  1734. this.groupIdToStreamInfosMap_.get(globalGroupId).push(streamInfo);
  1735. } else {
  1736. this.groupIdToStreamInfosMap_.set(globalGroupId, [streamInfo]);
  1737. }
  1738. this.uriToStreamInfosMap_.set(key, streamInfo);
  1739. return streamInfo;
  1740. }
  1741. /**
  1742. * Parse EXT-X-IMAGE-STREAM-INF media tag into a Stream object.
  1743. *
  1744. * @param {shaka.hls.Tag} tag
  1745. * @return {!Promise.<!shaka.hls.HlsParser.StreamInfo>}
  1746. * @private
  1747. */
  1748. async createStreamInfoFromImageTag_(tag) {
  1749. goog.asserts.assert(tag.name == 'EXT-X-IMAGE-STREAM-INF',
  1750. 'Should only be called on image tags!');
  1751. /** @type {string} */
  1752. const type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  1753. const verbatimImagePlaylistUri = tag.getRequiredAttrValue('URI');
  1754. const codecs = tag.getAttributeValue('CODECS', 'jpeg') || '';
  1755. // Check if the stream has already been created as part of another Variant
  1756. // and return it if it has.
  1757. if (this.uriToStreamInfosMap_.has(verbatimImagePlaylistUri)) {
  1758. return this.uriToStreamInfosMap_.get(verbatimImagePlaylistUri);
  1759. }
  1760. const language = tag.getAttributeValue('LANGUAGE');
  1761. const name = tag.getAttributeValue('NAME');
  1762. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  1763. const streamInfo = this.createStreamInfo_(
  1764. this.globalId_++, [verbatimImagePlaylistUri], codecs, type, language,
  1765. /* primary= */ false, name, /* channelsCount= */ null,
  1766. /* closedCaptions= */ null, characteristics, /* forced= */ false,
  1767. /* sampleRate= */ null, /* spatialAudio= */ false);
  1768. // Parse misc attributes.
  1769. const resolution = tag.getAttributeValue('RESOLUTION');
  1770. if (resolution) {
  1771. // The RESOLUTION tag represents the resolution of a single thumbnail, not
  1772. // of the entire sheet at once (like we expect in the output).
  1773. // So multiply by the layout size.
  1774. // Since we need to have generated the segment index for this, we can't
  1775. // lazy-load in this situation.
  1776. await streamInfo.stream.createSegmentIndex();
  1777. const reference = streamInfo.stream.segmentIndex.get(0);
  1778. const layout = reference.getTilesLayout();
  1779. if (layout) {
  1780. streamInfo.stream.width =
  1781. Number(resolution.split('x')[0]) * Number(layout.split('x')[0]);
  1782. streamInfo.stream.height =
  1783. Number(resolution.split('x')[1]) * Number(layout.split('x')[1]);
  1784. // TODO: What happens if there are multiple grids, with different
  1785. // layout sizes, inside this image stream?
  1786. }
  1787. }
  1788. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  1789. if (bandwidth) {
  1790. streamInfo.stream.bandwidth = Number(bandwidth);
  1791. }
  1792. this.uriToStreamInfosMap_.set(verbatimImagePlaylistUri, streamInfo);
  1793. return streamInfo;
  1794. }
  1795. /**
  1796. * Parse EXT-X-I-FRAME-STREAM-INF media tag into a Stream object.
  1797. *
  1798. * @param {shaka.hls.Tag} tag
  1799. * @return {!shaka.hls.HlsParser.StreamInfo}
  1800. * @private
  1801. */
  1802. createStreamInfoFromIframeTag_(tag) {
  1803. goog.asserts.assert(tag.name == 'EXT-X-I-FRAME-STREAM-INF',
  1804. 'Should only be called on iframe tags!');
  1805. /** @type {string} */
  1806. const type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  1807. const verbatimIFramePlaylistUri = tag.getRequiredAttrValue('URI');
  1808. const codecs = tag.getAttributeValue('CODECS') || '';
  1809. // Check if the stream has already been created as part of another Variant
  1810. // and return it if it has.
  1811. if (this.uriToStreamInfosMap_.has(verbatimIFramePlaylistUri)) {
  1812. return this.uriToStreamInfosMap_.get(verbatimIFramePlaylistUri);
  1813. }
  1814. const language = tag.getAttributeValue('LANGUAGE');
  1815. const name = tag.getAttributeValue('NAME');
  1816. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  1817. const streamInfo = this.createStreamInfo_(
  1818. this.globalId_++, [verbatimIFramePlaylistUri], codecs, type, language,
  1819. /* primary= */ false, name, /* channelsCount= */ null,
  1820. /* closedCaptions= */ null, characteristics, /* forced= */ false,
  1821. /* sampleRate= */ null, /* spatialAudio= */ false);
  1822. // Parse misc attributes.
  1823. const resolution = tag.getAttributeValue('RESOLUTION');
  1824. const [width, height] = resolution ? resolution.split('x') : [null, null];
  1825. streamInfo.stream.width = Number(width) || undefined;
  1826. streamInfo.stream.height = Number(height) || undefined;
  1827. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  1828. if (bandwidth) {
  1829. streamInfo.stream.bandwidth = Number(bandwidth);
  1830. }
  1831. this.uriToStreamInfosMap_.set(verbatimIFramePlaylistUri, streamInfo);
  1832. return streamInfo;
  1833. }
  1834. /**
  1835. * Parse an EXT-X-STREAM-INF media tag into a Stream object.
  1836. *
  1837. * @param {!Array.<!shaka.hls.Tag>} tags
  1838. * @param {!Array.<string>} allCodecs
  1839. * @param {string} type
  1840. * @param {?string} language
  1841. * @param {?string} name
  1842. * @param {?number} channelsCount
  1843. * @param {?string} characteristics
  1844. * @param {?number} sampleRate
  1845. * @param {boolean} spatialAudio
  1846. * @return {!shaka.hls.HlsParser.StreamInfo}
  1847. * @private
  1848. */
  1849. createStreamInfoFromVariantTags_(tags, allCodecs, type, language, name,
  1850. channelsCount, characteristics, sampleRate, spatialAudio) {
  1851. const streamId = this.globalId_++;
  1852. const verbatimMediaPlaylistUris = [];
  1853. for (const tag of tags) {
  1854. goog.asserts.assert(tag.name == 'EXT-X-STREAM-INF',
  1855. 'Should only be called on variant tags!');
  1856. const uri = tag.getRequiredAttrValue('URI');
  1857. const pathwayId = tag.getAttributeValue('PATHWAY-ID');
  1858. if (this.contentSteeringManager_ && pathwayId) {
  1859. this.contentSteeringManager_.addLocation(streamId, pathwayId, uri);
  1860. }
  1861. verbatimMediaPlaylistUris.push(uri);
  1862. }
  1863. const key = verbatimMediaPlaylistUris.sort().join(',');
  1864. if (this.uriToStreamInfosMap_.has(key)) {
  1865. return this.uriToStreamInfosMap_.get(key);
  1866. }
  1867. const closedCaptions = this.getClosedCaptions_(tags[0], type);
  1868. const codecs = shaka.util.ManifestParserUtils.guessCodecs(type, allCodecs);
  1869. const streamInfo = this.createStreamInfo_(
  1870. streamId, verbatimMediaPlaylistUris, codecs, type, language,
  1871. /* primary= */ false, name, channelsCount, closedCaptions,
  1872. characteristics, /* forced= */ false, sampleRate,
  1873. /* spatialAudio= */ false);
  1874. this.uriToStreamInfosMap_.set(key, streamInfo);
  1875. return streamInfo;
  1876. }
  1877. /**
  1878. * @param {number} streamId
  1879. * @param {!Array.<string>} verbatimMediaPlaylistUris
  1880. * @param {string} codecs
  1881. * @param {string} type
  1882. * @param {?string} languageValue
  1883. * @param {boolean} primary
  1884. * @param {?string} name
  1885. * @param {?number} channelsCount
  1886. * @param {Map.<string, string>} closedCaptions
  1887. * @param {?string} characteristics
  1888. * @param {boolean} forced
  1889. * @param {?number} sampleRate
  1890. * @param {boolean} spatialAudio
  1891. * @return {!shaka.hls.HlsParser.StreamInfo}
  1892. * @private
  1893. */
  1894. createStreamInfo_(streamId, verbatimMediaPlaylistUris, codecs, type,
  1895. languageValue, primary, name, channelsCount, closedCaptions,
  1896. characteristics, forced, sampleRate, spatialAudio) {
  1897. // TODO: Refactor, too many parameters
  1898. // This stream is lazy-loaded inside the createSegmentIndex function.
  1899. // So we start out with a stream object that does not contain the actual
  1900. // segment index, then download when createSegmentIndex is called.
  1901. const stream = this.makeStreamObject_(streamId, codecs, type,
  1902. languageValue, primary, name, channelsCount, closedCaptions,
  1903. characteristics, forced, sampleRate, spatialAudio);
  1904. const redirectUris = [];
  1905. const getUris = () => {
  1906. if (this.contentSteeringManager_ &&
  1907. verbatimMediaPlaylistUris.length > 1) {
  1908. return this.contentSteeringManager_.getLocations(streamId);
  1909. }
  1910. return redirectUris.concat(shaka.hls.Utils.constructUris(
  1911. [this.masterPlaylistUri_], verbatimMediaPlaylistUris,
  1912. this.globalVariables_));
  1913. };
  1914. const streamInfo = {
  1915. stream,
  1916. type,
  1917. redirectUris,
  1918. getUris,
  1919. // These values are filled out or updated after lazy-loading:
  1920. minTimestamp: 0,
  1921. maxTimestamp: 0,
  1922. mediaSequenceToStartTime: new Map(),
  1923. canSkipSegments: false,
  1924. canBlockReload: false,
  1925. hasEndList: false,
  1926. firstSequenceNumber: -1,
  1927. nextMediaSequence: -1,
  1928. nextPart: -1,
  1929. loadedOnce: false,
  1930. };
  1931. /** @param {!AbortSignal} abortSignal */
  1932. const downloadSegmentIndex = async (abortSignal) => {
  1933. const uris = streamInfo.getUris();
  1934. // Download the actual manifest.
  1935. const response = await this.requestManifest_(
  1936. streamInfo.getUris(), /* isPlaylist= */ true);
  1937. if (abortSignal.aborted) {
  1938. return;
  1939. }
  1940. // Record the final URI after redirects.
  1941. const responseUri = response.uri;
  1942. if (responseUri != response.originalUri && !uris.includes(responseUri)) {
  1943. redirectUris.push(responseUri);
  1944. }
  1945. // Record the redirected, final URI of this media playlist when we parse
  1946. // it.
  1947. /** @type {!shaka.hls.Playlist} */
  1948. const playlist = this.manifestTextParser_.parsePlaylist(response.data);
  1949. let mimeType = undefined;
  1950. // If no codec info was provided in the manifest and codec guessing is
  1951. // disabled we try to get necessary info from the media data.
  1952. if (!this.codecInfoInManifest_ && this.config_.hls.disableCodecGuessing) {
  1953. const basicInfo =
  1954. await this.getMediaPlaylistBasicInfo_(playlist, getUris);
  1955. goog.asserts.assert(
  1956. type === basicInfo.type, 'Media types should match!');
  1957. mimeType = basicInfo.mimeType;
  1958. codecs = basicInfo.codecs;
  1959. }
  1960. const wasLive = this.isLive_();
  1961. const realStreamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  1962. streamId, playlist, getUris, responseUri, codecs,
  1963. type, languageValue, primary, name, channelsCount, closedCaptions,
  1964. characteristics, forced, sampleRate, spatialAudio, mimeType);
  1965. if (abortSignal.aborted) {
  1966. return;
  1967. }
  1968. const realStream = realStreamInfo.stream;
  1969. if (this.isLive_() && !wasLive) {
  1970. // Now that we know that the presentation is live, convert the timeline
  1971. // to live.
  1972. this.changePresentationTimelineToLive_(playlist);
  1973. }
  1974. // Copy values from the real stream info to our initial one.
  1975. streamInfo.minTimestamp = realStreamInfo.minTimestamp;
  1976. streamInfo.maxTimestamp = realStreamInfo.maxTimestamp;
  1977. streamInfo.canSkipSegments = realStreamInfo.canSkipSegments;
  1978. streamInfo.canBlockReload = realStreamInfo.canBlockReload;
  1979. streamInfo.hasEndList = realStreamInfo.hasEndList;
  1980. streamInfo.mediaSequenceToStartTime =
  1981. realStreamInfo.mediaSequenceToStartTime;
  1982. streamInfo.nextMediaSequence = realStreamInfo.nextMediaSequence;
  1983. streamInfo.nextPart = realStreamInfo.nextPart;
  1984. streamInfo.loadedOnce = true;
  1985. stream.segmentIndex = realStream.segmentIndex;
  1986. stream.encrypted = realStream.encrypted;
  1987. stream.drmInfos = realStream.drmInfos;
  1988. stream.keyIds = realStream.keyIds;
  1989. stream.mimeType = realStream.mimeType;
  1990. stream.bandwidth = realStream.bandwidth;
  1991. stream.codecs = realStream.codecs || stream.codecs;
  1992. // Since we lazy-loaded this content, the player may need to create new
  1993. // sessions for the DRM info in this stream.
  1994. if (stream.drmInfos.length) {
  1995. this.playerInterface_.newDrmInfo(stream);
  1996. }
  1997. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1998. if (type == ContentType.VIDEO || type == ContentType.AUDIO) {
  1999. for (const otherStreamInfo of this.uriToStreamInfosMap_.values()) {
  2000. if (!otherStreamInfo.loadedOnce && otherStreamInfo.type == type) {
  2001. // To aid manifest filtering, assume before loading that all video
  2002. // renditions have the same MIME type. (And likewise for audio.)
  2003. otherStreamInfo.stream.mimeType = realStream.mimeType;
  2004. }
  2005. }
  2006. }
  2007. if (type == ContentType.TEXT) {
  2008. const firstSegment = realStream.segmentIndex.get(0);
  2009. if (firstSegment && firstSegment.initSegmentReference) {
  2010. stream.mimeType = 'application/mp4';
  2011. }
  2012. }
  2013. // Add finishing touches to the stream that can only be done once we have
  2014. // more full context on the media as a whole.
  2015. if (this.hasEnoughInfoToFinalizeStreams_()) {
  2016. if (!this.streamsFinalized_) {
  2017. // Mark this manifest as having been finalized, so we don't go through
  2018. // this whole process of finishing touches a second time.
  2019. this.streamsFinalized_ = true;
  2020. // Finalize all of the currently-loaded streams.
  2021. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  2022. const activeStreamInfos =
  2023. streamInfos.filter((s) => s.stream.segmentIndex);
  2024. this.finalizeStreams_(activeStreamInfos);
  2025. // With the addition of this new stream, we now have enough info to
  2026. // figure out how long the streams should be. So process all streams
  2027. // we have downloaded up until this point.
  2028. this.determineDuration_();
  2029. // Finally, start the update timer, if this asset has been determined
  2030. // to be a livestream.
  2031. const delay = this.getUpdatePlaylistDelay_();
  2032. if (delay > 0) {
  2033. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  2034. }
  2035. } else {
  2036. // We don't need to go through the full process; just finalize this
  2037. // single stream.
  2038. this.finalizeStreams_([streamInfo]);
  2039. }
  2040. }
  2041. };
  2042. /** @type {Promise} */
  2043. let creationPromise = null;
  2044. /** @type {!AbortController} */
  2045. let abortController = new AbortController();
  2046. const safeCreateSegmentIndex = () => {
  2047. // An operation is already in progress. The second and subsequent
  2048. // callers receive the same Promise as the first caller, and only one
  2049. // download operation will occur.
  2050. if (creationPromise) {
  2051. return creationPromise;
  2052. }
  2053. // Create a new AbortController to be able to cancel this specific
  2054. // download.
  2055. abortController = new AbortController();
  2056. // Create a Promise tied to the outcome of downloadSegmentIndex(). If
  2057. // downloadSegmentIndex is rejected, creationPromise will also be
  2058. // rejected.
  2059. creationPromise = new Promise((resolve) => {
  2060. resolve(downloadSegmentIndex(abortController.signal));
  2061. });
  2062. return creationPromise;
  2063. };
  2064. stream.createSegmentIndex = safeCreateSegmentIndex;
  2065. stream.closeSegmentIndex = () => {
  2066. // If we're mid-creation, cancel it.
  2067. if (creationPromise && !stream.segmentIndex) {
  2068. abortController.abort();
  2069. }
  2070. // If we have a segment index, release it.
  2071. if (stream.segmentIndex) {
  2072. stream.segmentIndex.release();
  2073. stream.segmentIndex = null;
  2074. }
  2075. // Clear the creation Promise so that a new operation can begin.
  2076. creationPromise = null;
  2077. };
  2078. return streamInfo;
  2079. }
  2080. /**
  2081. * @return {number}
  2082. * @private
  2083. */
  2084. getMinDuration_() {
  2085. let minDuration = Infinity;
  2086. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  2087. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text') {
  2088. // Since everything is already offset to 0 (either by sync or by being
  2089. // VOD), only maxTimestamp is necessary to compute the duration.
  2090. minDuration = Math.min(minDuration, streamInfo.maxTimestamp);
  2091. }
  2092. }
  2093. return minDuration;
  2094. }
  2095. /**
  2096. * @return {number}
  2097. * @private
  2098. */
  2099. getLiveDuration_() {
  2100. let maxTimestamp = Infinity;
  2101. let minTimestamp = Infinity;
  2102. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  2103. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text') {
  2104. maxTimestamp = Math.min(maxTimestamp, streamInfo.maxTimestamp);
  2105. minTimestamp = Math.min(minTimestamp, streamInfo.minTimestamp);
  2106. }
  2107. }
  2108. return maxTimestamp - minTimestamp;
  2109. }
  2110. /**
  2111. * @param {!Array.<!shaka.extern.Stream>} streams
  2112. * @private
  2113. */
  2114. notifySegmentsForStreams_(streams) {
  2115. const references = [];
  2116. for (const stream of streams) {
  2117. if (!stream.segmentIndex) {
  2118. // The stream was closed since the list of streams was built.
  2119. continue;
  2120. }
  2121. stream.segmentIndex.forEachTopLevelReference((reference) => {
  2122. references.push(reference);
  2123. });
  2124. }
  2125. this.presentationTimeline_.notifySegments(references);
  2126. }
  2127. /**
  2128. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  2129. * @private
  2130. */
  2131. finalizeStreams_(streamInfos) {
  2132. if (!this.isLive_()) {
  2133. const minDuration = this.getMinDuration_();
  2134. for (const streamInfo of streamInfos) {
  2135. streamInfo.stream.segmentIndex.fit(/* periodStart= */ 0, minDuration);
  2136. }
  2137. }
  2138. this.notifySegmentsForStreams_(streamInfos.map((s) => s.stream));
  2139. if (this.config_.hls.ignoreManifestProgramDateTime) {
  2140. this.syncStreamsWithSequenceNumber_(streamInfos);
  2141. } else {
  2142. this.syncStreamsWithProgramDateTime_(streamInfos);
  2143. }
  2144. }
  2145. /**
  2146. * There are some values on streams that can only be set once we know about
  2147. * both the video and audio content, if present.
  2148. * This checks if there is at least one video downloaded (if the media has
  2149. * video), and that there is at least one audio downloaded (if the media has
  2150. * audio).
  2151. * @return {boolean}
  2152. * @private
  2153. */
  2154. hasEnoughInfoToFinalizeStreams_() {
  2155. if (!this.manifest_) {
  2156. return false;
  2157. }
  2158. const videos = [];
  2159. const audios = [];
  2160. for (const variant of this.manifest_.variants) {
  2161. if (variant.video) {
  2162. videos.push(variant.video);
  2163. }
  2164. if (variant.audio) {
  2165. audios.push(variant.audio);
  2166. }
  2167. }
  2168. if (videos.length > 0 && !videos.some((stream) => stream.segmentIndex)) {
  2169. return false;
  2170. }
  2171. if (audios.length > 0 && !audios.some((stream) => stream.segmentIndex)) {
  2172. return false;
  2173. }
  2174. return true;
  2175. }
  2176. /**
  2177. * @param {number} streamId
  2178. * @param {!shaka.hls.Playlist} playlist
  2179. * @param {function():!Array.<string>} getUris
  2180. * @param {string} responseUri
  2181. * @param {string} codecs
  2182. * @param {string} type
  2183. * @param {?string} languageValue
  2184. * @param {boolean} primary
  2185. * @param {?string} name
  2186. * @param {?number} channelsCount
  2187. * @param {Map.<string, string>} closedCaptions
  2188. * @param {?string} characteristics
  2189. * @param {boolean} forced
  2190. * @param {?number} sampleRate
  2191. * @param {boolean} spatialAudio
  2192. * @param {(string|undefined)} mimeType
  2193. * @return {!Promise.<!shaka.hls.HlsParser.StreamInfo>}
  2194. * @private
  2195. */
  2196. async convertParsedPlaylistIntoStreamInfo_(streamId, playlist,
  2197. getUris, responseUri, codecs, type, languageValue, primary, name,
  2198. channelsCount, closedCaptions, characteristics, forced, sampleRate,
  2199. spatialAudio, mimeType = undefined) {
  2200. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  2201. // EXT-X-MEDIA and EXT-X-IMAGE-STREAM-INF tags should point to media
  2202. // playlists.
  2203. throw new shaka.util.Error(
  2204. shaka.util.Error.Severity.CRITICAL,
  2205. shaka.util.Error.Category.MANIFEST,
  2206. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  2207. }
  2208. /** @type {!Array.<!shaka.hls.Tag>} */
  2209. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  2210. 'EXT-X-DEFINE');
  2211. const mediaVariables =
  2212. this.parseMediaVariables_(variablesTags, responseUri);
  2213. goog.asserts.assert(playlist.segments != null,
  2214. 'Media playlist should have segments!');
  2215. this.determinePresentationType_(playlist);
  2216. if (this.isLive_()) {
  2217. this.determineLastTargetDuration_(playlist);
  2218. }
  2219. if (!mimeType) {
  2220. mimeType = await this.guessMimeType_(type, codecs, playlist,
  2221. mediaVariables, getUris);
  2222. }
  2223. const {drmInfos, keyIds, encrypted, aesEncrypted} =
  2224. this.parseDrmInfo_(playlist, mimeType);
  2225. if (encrypted && !drmInfos.length && !aesEncrypted) {
  2226. throw new shaka.util.Error(
  2227. shaka.util.Error.Severity.CRITICAL,
  2228. shaka.util.Error.Category.MANIFEST,
  2229. shaka.util.Error.Code.HLS_KEYFORMATS_NOT_SUPPORTED);
  2230. }
  2231. const stream = this.makeStreamObject_(streamId, codecs, type,
  2232. languageValue, primary, name, channelsCount, closedCaptions,
  2233. characteristics, forced, sampleRate, spatialAudio);
  2234. stream.encrypted = encrypted;
  2235. stream.drmInfos = drmInfos;
  2236. stream.keyIds = keyIds;
  2237. stream.mimeType = mimeType;
  2238. const mediaSequenceToStartTime = this.isLive_() ?
  2239. this.mediaSequenceToStartTimeByType_.get(type) : new Map();
  2240. const {segments, bandwidth} = this.createSegments_(
  2241. playlist, stream, mediaSequenceToStartTime, mediaVariables, getUris);
  2242. stream.bandwidth = bandwidth;
  2243. // This new calculation is necessary for Low Latency streams.
  2244. if (this.isLive_()) {
  2245. this.determineLastTargetDuration_(playlist);
  2246. }
  2247. const firstStartTime = segments[0].startTime;
  2248. const lastSegment = segments[segments.length - 1];
  2249. const lastEndTime = lastSegment.endTime;
  2250. /** @type {!shaka.media.SegmentIndex} */
  2251. const segmentIndex = new shaka.media.SegmentIndex(segments);
  2252. stream.segmentIndex = segmentIndex;
  2253. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  2254. playlist.tags, 'EXT-X-SERVER-CONTROL');
  2255. const canSkipSegments = serverControlTag ?
  2256. serverControlTag.getAttribute('CAN-SKIP-UNTIL') != null : false;
  2257. const canBlockReload = serverControlTag ?
  2258. serverControlTag.getAttribute('CAN-BLOCK-RELOAD') != null : false;
  2259. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  2260. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  2261. const {nextMediaSequence, nextPart} =
  2262. this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
  2263. return {
  2264. stream,
  2265. type,
  2266. redirectUris: [],
  2267. getUris,
  2268. minTimestamp: firstStartTime,
  2269. maxTimestamp: lastEndTime,
  2270. canSkipSegments,
  2271. canBlockReload,
  2272. hasEndList: false,
  2273. firstSequenceNumber: -1,
  2274. nextMediaSequence,
  2275. nextPart,
  2276. mediaSequenceToStartTime,
  2277. loadedOnce: false,
  2278. };
  2279. }
  2280. /**
  2281. * Get the next msn and part
  2282. *
  2283. * @param {number} mediaSequenceNumber
  2284. * @param {!Array.<!shaka.media.SegmentReference>} segments
  2285. * @return {{nextMediaSequence: number, nextPart:number}}}
  2286. * @private
  2287. */
  2288. getNextMediaSequenceAndPart_(mediaSequenceNumber, segments) {
  2289. const currentMediaSequence = mediaSequenceNumber + segments.length - 1;
  2290. let nextMediaSequence = currentMediaSequence;
  2291. let nextPart = -1;
  2292. if (!segments.length) {
  2293. nextMediaSequence++;
  2294. return {
  2295. nextMediaSequence,
  2296. nextPart,
  2297. };
  2298. }
  2299. const lastSegment = segments[segments.length - 1];
  2300. const partialReferences = lastSegment.partialReferences;
  2301. if (!lastSegment.partialReferences.length) {
  2302. nextMediaSequence++;
  2303. if (lastSegment.hasByterangeOptimization()) {
  2304. nextPart = 0;
  2305. }
  2306. return {
  2307. nextMediaSequence,
  2308. nextPart,
  2309. };
  2310. }
  2311. nextPart = partialReferences.length - 1;
  2312. const lastPartialReference =
  2313. partialReferences[partialReferences.length - 1];
  2314. if (!lastPartialReference.isPreload()) {
  2315. nextMediaSequence++;
  2316. nextPart = 0;
  2317. }
  2318. return {
  2319. nextMediaSequence,
  2320. nextPart,
  2321. };
  2322. }
  2323. /**
  2324. * Creates a stream object with the given parameters.
  2325. * The parameters that are passed into here are only the things that can be
  2326. * known without downloading the media playlist; other values must be set
  2327. * manually on the object after creation.
  2328. * @param {number} id
  2329. * @param {string} codecs
  2330. * @param {string} type
  2331. * @param {?string} languageValue
  2332. * @param {boolean} primary
  2333. * @param {?string} name
  2334. * @param {?number} channelsCount
  2335. * @param {Map.<string, string>} closedCaptions
  2336. * @param {?string} characteristics
  2337. * @param {boolean} forced
  2338. * @param {?number} sampleRate
  2339. * @param {boolean} spatialAudio
  2340. * @return {!shaka.extern.Stream}
  2341. * @private
  2342. */
  2343. makeStreamObject_(id, codecs, type, languageValue, primary, name,
  2344. channelsCount, closedCaptions, characteristics, forced, sampleRate,
  2345. spatialAudio) {
  2346. // Fill out a "best-guess" mimeType, for now. It will be replaced once the
  2347. // stream is lazy-loaded.
  2348. const mimeType = this.guessMimeTypeBeforeLoading_(type, codecs) ||
  2349. this.guessMimeTypeFallback_(type);
  2350. const roles = [];
  2351. if (characteristics) {
  2352. for (const characteristic of characteristics.split(',')) {
  2353. roles.push(characteristic);
  2354. }
  2355. }
  2356. let kind = undefined;
  2357. let accessibilityPurpose = null;
  2358. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2359. if (roles.includes('public.accessibility.transcribes-spoken-dialog') &&
  2360. roles.includes('public.accessibility.describes-music-and-sound')) {
  2361. kind = shaka.util.ManifestParserUtils.TextStreamKind.CLOSED_CAPTION;
  2362. } else {
  2363. kind = shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE;
  2364. }
  2365. } else {
  2366. if (roles.includes('public.accessibility.describes-video')) {
  2367. accessibilityPurpose =
  2368. shaka.media.ManifestParser.AccessibilityPurpose.VISUALLY_IMPAIRED;
  2369. }
  2370. }
  2371. // If there are no roles, and we have defaulted to the subtitle "kind" for
  2372. // this track, add the implied subtitle role.
  2373. if (!roles.length &&
  2374. kind === shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE) {
  2375. roles.push(shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE);
  2376. }
  2377. return {
  2378. id: this.globalId_++,
  2379. originalId: name,
  2380. groupId: null,
  2381. createSegmentIndex: () => Promise.resolve(),
  2382. segmentIndex: null,
  2383. mimeType,
  2384. codecs,
  2385. kind: (type == shaka.util.ManifestParserUtils.ContentType.TEXT) ?
  2386. shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE : undefined,
  2387. encrypted: false,
  2388. drmInfos: [],
  2389. keyIds: new Set(),
  2390. language: this.getLanguage_(languageValue),
  2391. originalLanguage: languageValue,
  2392. label: name, // For historical reasons, since before "originalId".
  2393. type,
  2394. primary,
  2395. // TODO: trick mode
  2396. trickModeVideo: null,
  2397. emsgSchemeIdUris: null,
  2398. frameRate: undefined,
  2399. pixelAspectRatio: undefined,
  2400. width: undefined,
  2401. height: undefined,
  2402. bandwidth: undefined,
  2403. roles,
  2404. forced,
  2405. channelsCount,
  2406. audioSamplingRate: sampleRate,
  2407. spatialAudio,
  2408. closedCaptions,
  2409. hdr: undefined,
  2410. videoLayout: undefined,
  2411. tilesLayout: undefined,
  2412. accessibilityPurpose: accessibilityPurpose,
  2413. external: false,
  2414. fastSwitching: false,
  2415. };
  2416. }
  2417. /**
  2418. * @param {!shaka.hls.Playlist} playlist
  2419. * @param {string} mimeType
  2420. * @return {{
  2421. * drmInfos: !Array.<shaka.extern.DrmInfo>,
  2422. * keyIds: !Set.<string>,
  2423. * encrypted: boolean,
  2424. * aesEncrypted: boolean
  2425. * }}
  2426. * @private
  2427. */
  2428. parseDrmInfo_(playlist, mimeType) {
  2429. /** @type {!Array.<!shaka.hls.Tag>} */
  2430. const drmTags = [];
  2431. if (playlist.segments) {
  2432. for (const segment of playlist.segments) {
  2433. const segmentKeyTags = shaka.hls.Utils.filterTagsByName(segment.tags,
  2434. 'EXT-X-KEY');
  2435. drmTags.push(...segmentKeyTags);
  2436. }
  2437. }
  2438. let encrypted = false;
  2439. let aesEncrypted = false;
  2440. /** @type {!Array.<shaka.extern.DrmInfo>}*/
  2441. const drmInfos = [];
  2442. const keyIds = new Set();
  2443. for (const drmTag of drmTags) {
  2444. const method = drmTag.getRequiredAttrValue('METHOD');
  2445. if (method != 'NONE') {
  2446. encrypted = true;
  2447. if (method == 'AES-128') {
  2448. // These keys are handled separately.
  2449. aesEncrypted = true;
  2450. } else {
  2451. // According to the HLS spec, KEYFORMAT is optional and implicitly
  2452. // defaults to "identity".
  2453. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  2454. const keyFormat =
  2455. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  2456. const drmParser =
  2457. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat];
  2458. const drmInfo = drmParser ? drmParser(drmTag, mimeType) : null;
  2459. if (drmInfo) {
  2460. if (drmInfo.keyIds) {
  2461. for (const keyId of drmInfo.keyIds) {
  2462. keyIds.add(keyId);
  2463. }
  2464. }
  2465. drmInfos.push(drmInfo);
  2466. } else {
  2467. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  2468. }
  2469. }
  2470. }
  2471. }
  2472. return {drmInfos, keyIds, encrypted, aesEncrypted};
  2473. }
  2474. /**
  2475. * @param {!shaka.hls.Tag} drmTag
  2476. * @param {!shaka.hls.Playlist} playlist
  2477. * @param {function():!Array.<string>} getUris
  2478. * @param {?Map.<string, string>=} variables
  2479. * @return {!shaka.extern.aes128Key}
  2480. * @private
  2481. */
  2482. parseAES128DrmTag_(drmTag, playlist, getUris, variables) {
  2483. // Check if the Web Crypto API is available.
  2484. if (!window.crypto || !window.crypto.subtle) {
  2485. shaka.log.alwaysWarn('Web Crypto API is not available to decrypt ' +
  2486. 'AES-128. (Web Crypto only exists in secure origins like https)');
  2487. throw new shaka.util.Error(
  2488. shaka.util.Error.Severity.CRITICAL,
  2489. shaka.util.Error.Category.MANIFEST,
  2490. shaka.util.Error.Code.NO_WEB_CRYPTO_API);
  2491. }
  2492. // HLS RFC 8216 Section 5.2:
  2493. // An EXT-X-KEY tag with a KEYFORMAT of "identity" that does not have an IV
  2494. // attribute indicates that the Media Sequence Number is to be used as the
  2495. // IV when decrypting a Media Segment, by putting its big-endian binary
  2496. // representation into a 16-octet (128-bit) buffer and padding (on the left)
  2497. // with zeros.
  2498. let firstMediaSequenceNumber = 0;
  2499. let iv;
  2500. const ivHex = drmTag.getAttributeValue('IV', '');
  2501. if (!ivHex) {
  2502. // Media Sequence Number will be used as IV.
  2503. firstMediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  2504. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  2505. } else {
  2506. // Exclude 0x at the start of string.
  2507. iv = shaka.util.Uint8ArrayUtils.fromHex(ivHex.substr(2));
  2508. if (iv.byteLength != 16) {
  2509. throw new shaka.util.Error(
  2510. shaka.util.Error.Severity.CRITICAL,
  2511. shaka.util.Error.Category.MANIFEST,
  2512. shaka.util.Error.Code.AES_128_INVALID_IV_LENGTH);
  2513. }
  2514. }
  2515. const keyInfo = {method: 'AES-128', iv, firstMediaSequenceNumber};
  2516. // Don't download the key object until the segment is parsed, to avoid a
  2517. // startup delay for long manifests with lots of keys.
  2518. keyInfo.fetchKey = async () => {
  2519. const keyUri = shaka.hls.Utils.constructSegmentUris(
  2520. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  2521. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  2522. const request = shaka.net.NetworkingEngine.makeRequest(
  2523. [keyUri], this.config_.retryParameters);
  2524. const keyResponse = await this.makeNetworkRequest_(request, requestType);
  2525. // keyResponse.status is undefined when URI is "data:text/plain;base64,"
  2526. if (!keyResponse.data || keyResponse.data.byteLength != 16) {
  2527. throw new shaka.util.Error(
  2528. shaka.util.Error.Severity.CRITICAL,
  2529. shaka.util.Error.Category.MANIFEST,
  2530. shaka.util.Error.Code.AES_128_INVALID_KEY_LENGTH);
  2531. }
  2532. const algorithm = {
  2533. name: 'AES-CBC',
  2534. };
  2535. keyInfo.cryptoKey = await window.crypto.subtle.importKey(
  2536. 'raw', keyResponse.data, algorithm, true, ['decrypt']);
  2537. keyInfo.fetchKey = undefined; // No longer needed.
  2538. };
  2539. return keyInfo;
  2540. }
  2541. /**
  2542. * @param {!shaka.hls.Playlist} playlist
  2543. * @private
  2544. */
  2545. determinePresentationType_(playlist) {
  2546. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  2547. const presentationTypeTag =
  2548. shaka.hls.Utils.getFirstTagWithName(playlist.tags,
  2549. 'EXT-X-PLAYLIST-TYPE');
  2550. const endListTag =
  2551. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  2552. const isVod = (presentationTypeTag && presentationTypeTag.value == 'VOD') ||
  2553. endListTag;
  2554. const isEvent = presentationTypeTag &&
  2555. presentationTypeTag.value == 'EVENT' && !isVod;
  2556. const isLive = !isVod && !isEvent;
  2557. if (isVod) {
  2558. this.setPresentationType_(PresentationType.VOD);
  2559. } else {
  2560. // If it's not VOD, it must be presentation type LIVE or an ongoing EVENT.
  2561. if (isLive) {
  2562. this.setPresentationType_(PresentationType.LIVE);
  2563. } else {
  2564. this.setPresentationType_(PresentationType.EVENT);
  2565. }
  2566. }
  2567. }
  2568. /**
  2569. * @param {!shaka.hls.Playlist} playlist
  2570. * @private
  2571. */
  2572. determineLastTargetDuration_(playlist) {
  2573. let lastTargetDuration = Infinity;
  2574. const segments = playlist.segments;
  2575. if (segments.length) {
  2576. let segmentIndex = segments.length - 1;
  2577. while (segmentIndex >= 0) {
  2578. const segment = segments[segmentIndex];
  2579. const extinfTag =
  2580. shaka.hls.Utils.getFirstTagWithName(segment.tags, 'EXTINF');
  2581. if (extinfTag) {
  2582. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  2583. // We're interested in the duration part.
  2584. const extinfValues = extinfTag.value.split(',');
  2585. lastTargetDuration = Number(extinfValues[0]);
  2586. break;
  2587. }
  2588. segmentIndex--;
  2589. }
  2590. }
  2591. const targetDurationTag = this.getRequiredTag_(playlist.tags,
  2592. 'EXT-X-TARGETDURATION');
  2593. const targetDuration = Number(targetDurationTag.value);
  2594. const partialTargetDurationTag =
  2595. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-PART-INF');
  2596. if (partialTargetDurationTag) {
  2597. this.partialTargetDuration_ = Number(
  2598. partialTargetDurationTag.getRequiredAttrValue('PART-TARGET'));
  2599. }
  2600. // Get the server-recommended min distance from the live edge.
  2601. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  2602. playlist.tags, 'EXT-X-SERVER-CONTROL');
  2603. // According to the HLS spec, updates should not happen more often than
  2604. // once in targetDuration. It also requires us to only update the active
  2605. // variant. We might implement that later, but for now every variant
  2606. // will be updated. To get the update period, choose the smallest
  2607. // targetDuration value across all playlists.
  2608. // 1. Update the shortest one to use as update period and segment
  2609. // availability time (for LIVE).
  2610. if (this.lowLatencyMode_ && this.partialTargetDuration_) {
  2611. // For low latency streaming, use the partial segment target duration.
  2612. if (this.lowLatencyByterangeOptimization_) {
  2613. // We always have at least 1 partial segment part, and most servers
  2614. // allow you to make a request with _HLS_msn=X&_HLS_part=0 with a
  2615. // distance of 4 partial segments. With this we ensure that we
  2616. // obtain the minimum latency in this type of case.
  2617. if (this.partialTargetDuration_ * 5 <= lastTargetDuration) {
  2618. this.lastTargetDuration_ = Math.min(
  2619. this.partialTargetDuration_, this.lastTargetDuration_);
  2620. } else {
  2621. this.lastTargetDuration_ = Math.min(
  2622. lastTargetDuration, this.lastTargetDuration_);
  2623. }
  2624. } else {
  2625. this.lastTargetDuration_ = Math.min(
  2626. this.partialTargetDuration_, this.lastTargetDuration_);
  2627. }
  2628. // Use 'PART-HOLD-BACK' as the presentation delay for low latency mode.
  2629. this.lowLatencyPresentationDelay_ = serverControlTag ? Number(
  2630. serverControlTag.getRequiredAttrValue('PART-HOLD-BACK')) : 0;
  2631. } else {
  2632. this.lastTargetDuration_ = Math.min(
  2633. lastTargetDuration, this.lastTargetDuration_);
  2634. // Use 'HOLD-BACK' as the presentation delay for default if defined.
  2635. const holdBack = serverControlTag ?
  2636. serverControlTag.getAttribute('HOLD-BACK') : null;
  2637. this.presentationDelay_ = holdBack ? Number(holdBack.value) : 0;
  2638. }
  2639. // 2. Update the longest target duration if need be to use as a
  2640. // presentation delay later.
  2641. this.maxTargetDuration_ = Math.max(
  2642. targetDuration, this.maxTargetDuration_);
  2643. }
  2644. /**
  2645. * @param {!shaka.hls.Playlist} playlist
  2646. * @private
  2647. */
  2648. changePresentationTimelineToLive_(playlist) {
  2649. // The live edge will be calculated from segments, so we don't need to
  2650. // set a presentation start time. We will assert later that this is
  2651. // working as expected.
  2652. // The HLS spec (RFC 8216) states in 6.3.3:
  2653. //
  2654. // "The client SHALL choose which Media Segment to play first ... the
  2655. // client SHOULD NOT choose a segment that starts less than three target
  2656. // durations from the end of the Playlist file. Doing so can trigger
  2657. // playback stalls."
  2658. //
  2659. // We accomplish this in our DASH-y model by setting a presentation
  2660. // delay of configured value, or 3 segments duration if not configured.
  2661. // This will be the "live edge" of the presentation.
  2662. let presentationDelay;
  2663. if (this.config_.defaultPresentationDelay) {
  2664. presentationDelay = this.config_.defaultPresentationDelay;
  2665. } else if (this.lowLatencyPresentationDelay_) {
  2666. presentationDelay = this.lowLatencyPresentationDelay_;
  2667. } else if (this.presentationDelay_) {
  2668. presentationDelay = this.presentationDelay_;
  2669. } else {
  2670. const playlistSegments = playlist.segments.length;
  2671. let delaySegments = this.config_.hls.liveSegmentsDelay;
  2672. if (delaySegments > (playlistSegments - 2)) {
  2673. delaySegments = Math.max(1, playlistSegments - 2);
  2674. }
  2675. presentationDelay = this.maxTargetDuration_ * delaySegments;
  2676. }
  2677. this.presentationTimeline_.setPresentationStartTime(0);
  2678. this.presentationTimeline_.setDelay(presentationDelay);
  2679. this.presentationTimeline_.setStatic(false);
  2680. }
  2681. /**
  2682. * Get the InitSegmentReference for a segment if it has a EXT-X-MAP tag.
  2683. * @param {!shaka.hls.Playlist} playlist
  2684. * @param {!Array.<!shaka.hls.Tag>} tags Segment tags
  2685. * @param {function():!Array.<string>} getUris
  2686. * @param {?Map.<string, string>=} variables
  2687. * @return {shaka.media.InitSegmentReference}
  2688. * @private
  2689. */
  2690. getInitSegmentReference_(playlist, tags, getUris, variables) {
  2691. /** @type {?shaka.hls.Tag} */
  2692. const mapTag = shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-MAP');
  2693. if (!mapTag) {
  2694. return null;
  2695. }
  2696. // Map tag example: #EXT-X-MAP:URI="main.mp4",BYTERANGE="720@0"
  2697. const verbatimInitSegmentUri = mapTag.getRequiredAttrValue('URI');
  2698. const absoluteInitSegmentUris = shaka.hls.Utils.constructSegmentUris(
  2699. getUris(), verbatimInitSegmentUri, variables);
  2700. const mapTagKey = [
  2701. absoluteInitSegmentUris.toString(),
  2702. mapTag.getAttributeValue('BYTERANGE', ''),
  2703. ].join('-');
  2704. if (!this.mapTagToInitSegmentRefMap_.has(mapTagKey)) {
  2705. /** @type {shaka.extern.aes128Key|undefined} */
  2706. let aes128Key = undefined;
  2707. let byteRangeTag = null;
  2708. for (const tag of tags) {
  2709. if (tag.name == 'EXT-X-KEY') {
  2710. if (tag.getRequiredAttrValue('METHOD') == 'AES-128' &&
  2711. tag.id < mapTag.id) {
  2712. aes128Key =
  2713. this.parseAES128DrmTag_(tag, playlist, getUris, variables);
  2714. }
  2715. } else if (tag.name == 'EXT-X-BYTERANGE' && tag.id < mapTag.id) {
  2716. byteRangeTag = tag;
  2717. }
  2718. }
  2719. const initSegmentRef = this.createInitSegmentReference_(
  2720. absoluteInitSegmentUris, mapTag, byteRangeTag, aes128Key);
  2721. this.mapTagToInitSegmentRefMap_.set(mapTagKey, initSegmentRef);
  2722. }
  2723. return this.mapTagToInitSegmentRefMap_.get(mapTagKey);
  2724. }
  2725. /**
  2726. * Create an InitSegmentReference object for the EXT-X-MAP tag in the media
  2727. * playlist.
  2728. * @param {!Array.<string>} absoluteInitSegmentUris
  2729. * @param {!shaka.hls.Tag} mapTag EXT-X-MAP
  2730. * @param {shaka.hls.Tag=} byteRangeTag EXT-X-BYTERANGE
  2731. * @param {shaka.extern.aes128Key=} aes128Key
  2732. * @return {!shaka.media.InitSegmentReference}
  2733. * @private
  2734. */
  2735. createInitSegmentReference_(absoluteInitSegmentUris, mapTag, byteRangeTag,
  2736. aes128Key) {
  2737. let startByte = 0;
  2738. let endByte = null;
  2739. let byterange = mapTag.getAttributeValue('BYTERANGE');
  2740. if (!byterange && byteRangeTag) {
  2741. byterange = byteRangeTag.value;
  2742. }
  2743. // If a BYTERANGE attribute is not specified, the segment consists
  2744. // of the entire resource.
  2745. if (byterange) {
  2746. const blocks = byterange.split('@');
  2747. const byteLength = Number(blocks[0]);
  2748. startByte = Number(blocks[1]);
  2749. endByte = startByte + byteLength - 1;
  2750. if (aes128Key) {
  2751. // MAP segment encrypted with method 'AES-128', when served with
  2752. // HTTP Range, has the unencrypted size specified in the range.
  2753. // See: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
  2754. const length = (endByte + 1) - startByte;
  2755. if (length % 16) {
  2756. endByte += (16 - (length % 16));
  2757. }
  2758. }
  2759. }
  2760. const initSegmentRef = new shaka.media.InitSegmentReference(
  2761. () => absoluteInitSegmentUris,
  2762. startByte,
  2763. endByte,
  2764. /* mediaQuality= */ null,
  2765. /* timescale= */ null,
  2766. /* segmentData= */ null,
  2767. aes128Key);
  2768. return initSegmentRef;
  2769. }
  2770. /**
  2771. * Parses one shaka.hls.Segment object into a shaka.media.SegmentReference.
  2772. *
  2773. * @param {shaka.media.InitSegmentReference} initSegmentReference
  2774. * @param {shaka.media.SegmentReference} previousReference
  2775. * @param {!shaka.hls.Segment} hlsSegment
  2776. * @param {number} startTime
  2777. * @param {!Map.<string, string>} variables
  2778. * @param {!shaka.hls.Playlist} playlist
  2779. * @param {shaka.extern.Stream} stream
  2780. * @param {function():!Array.<string>} getUris
  2781. * @param {shaka.extern.aes128Key=} aes128Key
  2782. * @return {shaka.media.SegmentReference}
  2783. * @private
  2784. */
  2785. createSegmentReference_(
  2786. initSegmentReference, previousReference, hlsSegment, startTime,
  2787. variables, playlist, stream, getUris, aes128Key) {
  2788. const tags = hlsSegment.tags;
  2789. const extinfTag =
  2790. shaka.hls.Utils.getFirstTagWithName(tags, 'EXTINF');
  2791. let endTime = 0;
  2792. let startByte = 0;
  2793. let endByte = null;
  2794. if (hlsSegment.partialSegments.length && !this.lowLatencyMode_) {
  2795. shaka.log.alwaysWarn('Low-latency HLS live stream detected, but ' +
  2796. 'low-latency streaming mode is not enabled in Shaka ' +
  2797. 'Player. Set streaming.lowLatencyMode configuration to ' +
  2798. 'true, and see https://bit.ly/3clctcj for details.');
  2799. }
  2800. let syncTime = null;
  2801. if (!this.config_.hls.ignoreManifestProgramDateTime) {
  2802. const dateTimeTag =
  2803. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-PROGRAM-DATE-TIME');
  2804. if (dateTimeTag && dateTimeTag.value) {
  2805. syncTime = shaka.util.XmlUtils.parseDate(dateTimeTag.value);
  2806. goog.asserts.assert(syncTime != null,
  2807. 'EXT-X-PROGRAM-DATE-TIME format not valid');
  2808. }
  2809. }
  2810. let status = shaka.media.SegmentReference.Status.AVAILABLE;
  2811. if (shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-GAP')) {
  2812. status = shaka.media.SegmentReference.Status.MISSING;
  2813. }
  2814. if (!extinfTag) {
  2815. if (hlsSegment.partialSegments.length == 0) {
  2816. // EXTINF tag must be available if the segment has no partial segments.
  2817. throw new shaka.util.Error(
  2818. shaka.util.Error.Severity.CRITICAL,
  2819. shaka.util.Error.Category.MANIFEST,
  2820. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, 'EXTINF');
  2821. } else if (!this.lowLatencyMode_) {
  2822. // Without EXTINF and without low-latency mode, partial segments get
  2823. // ignored.
  2824. return null;
  2825. }
  2826. }
  2827. // Create SegmentReferences for the partial segments.
  2828. let partialSegmentRefs = [];
  2829. // Optimization for LL-HLS with byterange
  2830. // More info in https://tinyurl.com/hls-open-byte-range
  2831. let segmentWithByteRangeOptimization = false;
  2832. let getUrisOptimization = null;
  2833. let somePartialSegmentWithGap = false;
  2834. let isPreloadSegment = false;
  2835. if (this.lowLatencyMode_ && hlsSegment.partialSegments.length) {
  2836. const byterangeOptimizationSupport = (stream.mimeType == 'video/mp4' ||
  2837. stream.mimeType == 'audio/mp4') && window.ReadableStream &&
  2838. this.config_.hls.allowLowLatencyByteRangeOptimization;
  2839. let partialSyncTime = syncTime;
  2840. for (let i = 0; i < hlsSegment.partialSegments.length; i++) {
  2841. const item = hlsSegment.partialSegments[i];
  2842. const pPreviousReference = i == 0 ?
  2843. previousReference : partialSegmentRefs[partialSegmentRefs.length - 1];
  2844. const pStartTime = (i == 0) ? startTime : pPreviousReference.endTime;
  2845. // If DURATION is missing from this partial segment, use the target
  2846. // partial duration from the top of the playlist, which is a required
  2847. // attribute for content with partial segments.
  2848. const pDuration = Number(item.getAttributeValue('DURATION')) ||
  2849. this.partialTargetDuration_;
  2850. // If for some reason we have neither an explicit duration, nor a target
  2851. // partial duration, we should SKIP this partial segment to avoid
  2852. // duplicating content in the presentation timeline.
  2853. if (!pDuration) {
  2854. continue;
  2855. }
  2856. const pEndTime = pStartTime + pDuration;
  2857. let pStartByte = 0;
  2858. let pEndByte = null;
  2859. if (item.name == 'EXT-X-PRELOAD-HINT') {
  2860. // A preload hinted partial segment may have byterange start info.
  2861. const pByterangeStart = item.getAttributeValue('BYTERANGE-START');
  2862. pStartByte = pByterangeStart ? Number(pByterangeStart) : 0;
  2863. // A preload hinted partial segment may have byterange length info.
  2864. const pByterangeLength = item.getAttributeValue('BYTERANGE-LENGTH');
  2865. if (pByterangeLength) {
  2866. pEndByte = pStartByte + Number(pByterangeLength) - 1;
  2867. } else if (pStartByte) {
  2868. // If we have a non-zero start byte, but no end byte, follow the
  2869. // recommendation of https://tinyurl.com/hls-open-byte-range and
  2870. // set the end byte explicitly to a large integer.
  2871. pEndByte = Number.MAX_SAFE_INTEGER;
  2872. }
  2873. } else {
  2874. const pByterange = item.getAttributeValue('BYTERANGE');
  2875. [pStartByte, pEndByte] =
  2876. this.parseByteRange_(pPreviousReference, pByterange);
  2877. }
  2878. const pUri = item.getAttributeValue('URI');
  2879. if (!pUri) {
  2880. continue;
  2881. }
  2882. let partialStatus = shaka.media.SegmentReference.Status.AVAILABLE;
  2883. if (item.getAttributeValue('GAP') == 'YES') {
  2884. partialStatus = shaka.media.SegmentReference.Status.MISSING;
  2885. somePartialSegmentWithGap = true;
  2886. }
  2887. let uris = null;
  2888. const getPartialUris = () => {
  2889. if (uris == null) {
  2890. goog.asserts.assert(pUri, 'Partial uri should be defined!');
  2891. uris = shaka.hls.Utils.constructSegmentUris(
  2892. getUris(), pUri, variables);
  2893. }
  2894. return uris;
  2895. };
  2896. if (byterangeOptimizationSupport &&
  2897. pStartByte >= 0 && pEndByte != null) {
  2898. getUrisOptimization = getPartialUris;
  2899. segmentWithByteRangeOptimization = true;
  2900. }
  2901. const partial = new shaka.media.SegmentReference(
  2902. pStartTime,
  2903. pEndTime,
  2904. getPartialUris,
  2905. pStartByte,
  2906. pEndByte,
  2907. initSegmentReference,
  2908. /* timestampOffset= */ 0,
  2909. /* appendWindowStart= */ 0,
  2910. /* appendWindowEnd= */ Infinity,
  2911. /* partialReferences= */ [],
  2912. /* tilesLayout= */ '',
  2913. /* tileDuration= */ null,
  2914. partialSyncTime,
  2915. partialStatus,
  2916. aes128Key);
  2917. if (item.name == 'EXT-X-PRELOAD-HINT') {
  2918. partial.markAsPreload();
  2919. isPreloadSegment = true;
  2920. }
  2921. // The spec doesn't say that we can assume INDEPENDENT=YES for the
  2922. // first partial segment. It does call the flag "optional", though, and
  2923. // that cases where there are no such flags on any partial segments, it
  2924. // is sensible to assume the first one is independent.
  2925. if (item.getAttributeValue('INDEPENDENT') != 'YES' && i > 0) {
  2926. partial.markAsNonIndependent();
  2927. }
  2928. partialSegmentRefs.push(partial);
  2929. if (partialSyncTime) {
  2930. partialSyncTime += pDuration;
  2931. }
  2932. } // for-loop of hlsSegment.partialSegments
  2933. }
  2934. // If the segment has EXTINF tag, set the segment's end time, start byte
  2935. // and end byte based on the duration and byterange information.
  2936. // Otherwise, calculate the end time, start / end byte based on its partial
  2937. // segments.
  2938. // Note that the sum of partial segments durations may be slightly different
  2939. // from the parent segment's duration. In this case, use the duration from
  2940. // the parent segment tag.
  2941. if (extinfTag) {
  2942. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  2943. // We're interested in the duration part.
  2944. const extinfValues = extinfTag.value.split(',');
  2945. const duration = Number(extinfValues[0]);
  2946. // Skip segments without duration
  2947. if (duration == 0) {
  2948. return null;
  2949. }
  2950. endTime = startTime + duration;
  2951. } else if (partialSegmentRefs.length) {
  2952. endTime = partialSegmentRefs[partialSegmentRefs.length - 1].endTime;
  2953. } else {
  2954. // Skip segments without duration and without partialsegments
  2955. return null;
  2956. }
  2957. if (segmentWithByteRangeOptimization) {
  2958. // We cannot optimize segments with gaps, or with a start byte that is
  2959. // not 0.
  2960. if (somePartialSegmentWithGap || partialSegmentRefs[0].startByte != 0) {
  2961. segmentWithByteRangeOptimization = false;
  2962. getUrisOptimization = null;
  2963. } else {
  2964. partialSegmentRefs = [];
  2965. }
  2966. }
  2967. // If the segment has EXT-X-BYTERANGE tag, set the start byte and end byte
  2968. // base on the byterange information. If segment has no EXT-X-BYTERANGE tag
  2969. // and has partial segments, set the start byte and end byte base on the
  2970. // partial segments.
  2971. const byterangeTag =
  2972. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-BYTERANGE');
  2973. if (byterangeTag) {
  2974. [startByte, endByte] =
  2975. this.parseByteRange_(previousReference, byterangeTag.value);
  2976. } else if (partialSegmentRefs.length) {
  2977. startByte = partialSegmentRefs[0].startByte;
  2978. endByte = partialSegmentRefs[partialSegmentRefs.length - 1].endByte;
  2979. }
  2980. let tilesLayout = '';
  2981. let tileDuration = null;
  2982. if (stream.type == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  2983. // By default in HLS the tilesLayout is 1x1
  2984. tilesLayout = '1x1';
  2985. const tilesTag =
  2986. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-TILES');
  2987. if (tilesTag) {
  2988. tilesLayout = tilesTag.getRequiredAttrValue('LAYOUT');
  2989. const duration = tilesTag.getAttributeValue('DURATION');
  2990. if (duration) {
  2991. tileDuration = Number(duration);
  2992. }
  2993. }
  2994. }
  2995. let uris = null;
  2996. const getSegmentUris = () => {
  2997. if (getUrisOptimization) {
  2998. return getUrisOptimization();
  2999. }
  3000. if (uris == null) {
  3001. uris = shaka.hls.Utils.constructSegmentUris(getUris(),
  3002. hlsSegment.verbatimSegmentUri, variables);
  3003. }
  3004. return uris || [];
  3005. };
  3006. const allPartialSegments = partialSegmentRefs.length > 0 &&
  3007. !!hlsSegment.verbatimSegmentUri;
  3008. const reference = new shaka.media.SegmentReference(
  3009. startTime,
  3010. endTime,
  3011. getSegmentUris,
  3012. startByte,
  3013. endByte,
  3014. initSegmentReference,
  3015. /* timestampOffset= */ 0,
  3016. /* appendWindowStart= */ 0,
  3017. /* appendWindowEnd= */ Infinity,
  3018. partialSegmentRefs,
  3019. tilesLayout,
  3020. tileDuration,
  3021. syncTime,
  3022. status,
  3023. aes128Key,
  3024. allPartialSegments,
  3025. );
  3026. if (segmentWithByteRangeOptimization) {
  3027. this.lowLatencyByterangeOptimization_ = true;
  3028. reference.markAsByterangeOptimization();
  3029. if (isPreloadSegment) {
  3030. reference.markAsPreload();
  3031. }
  3032. }
  3033. return reference;
  3034. }
  3035. /**
  3036. * Parse the startByte and endByte.
  3037. * @param {shaka.media.SegmentReference} previousReference
  3038. * @param {?string} byterange
  3039. * @return {!Array.<number>} An array with the start byte and end byte.
  3040. * @private
  3041. */
  3042. parseByteRange_(previousReference, byterange) {
  3043. let startByte = 0;
  3044. let endByte = null;
  3045. // If BYTERANGE is not specified, the segment consists of the entire
  3046. // resource.
  3047. if (byterange) {
  3048. const blocks = byterange.split('@');
  3049. const byteLength = Number(blocks[0]);
  3050. if (blocks[1]) {
  3051. startByte = Number(blocks[1]);
  3052. } else {
  3053. goog.asserts.assert(previousReference,
  3054. 'Cannot refer back to previous HLS segment!');
  3055. startByte = previousReference.endByte + 1;
  3056. }
  3057. endByte = startByte + byteLength - 1;
  3058. }
  3059. return [startByte, endByte];
  3060. }
  3061. /**
  3062. * Parses shaka.hls.Segment objects into shaka.media.SegmentReferences and
  3063. * get the bandwidth necessary for this segments If it's defined in the
  3064. * playlist.
  3065. *
  3066. * @param {!shaka.hls.Playlist} playlist
  3067. * @param {shaka.extern.Stream} stream
  3068. * @param {!Map.<number, number>} mediaSequenceToStartTime
  3069. * @param {!Map.<string, string>} variables
  3070. * @param {function():!Array.<string>} getUris
  3071. * @return {{segments: !Array.<!shaka.media.SegmentReference>,
  3072. * bandwidth: (number|undefined)}}
  3073. * @private
  3074. */
  3075. createSegments_(playlist, stream, mediaSequenceToStartTime, variables,
  3076. getUris) {
  3077. /** @type {Array.<!shaka.hls.Segment>} */
  3078. const hlsSegments = playlist.segments;
  3079. goog.asserts.assert(hlsSegments.length, 'Playlist should have segments!');
  3080. /** @type {shaka.media.InitSegmentReference} */
  3081. let initSegmentRef;
  3082. /** @type {shaka.extern.aes128Key|undefined} */
  3083. let aes128Key = undefined;
  3084. let discontinuitySequence = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3085. playlist.tags, 'EXT-X-DISCONTINUITY-SEQUENCE', 0);
  3086. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3087. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  3088. const skipTag = shaka.hls.Utils.getFirstTagWithName(
  3089. playlist.tags, 'EXT-X-SKIP');
  3090. const skippedSegments =
  3091. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  3092. let position = mediaSequenceNumber + skippedSegments;
  3093. let firstStartTime = 0;
  3094. // For live stream, use the cached value in the mediaSequenceToStartTime
  3095. // map if available.
  3096. if (this.isLive_() && mediaSequenceToStartTime.has(position)) {
  3097. firstStartTime = mediaSequenceToStartTime.get(position);
  3098. }
  3099. /** @type {!Array.<!shaka.media.SegmentReference>} */
  3100. const references = [];
  3101. let previousReference = null;
  3102. /** @type {!Array.<{bitrate: number, duration: number}>} */
  3103. const bitrates = [];
  3104. for (let i = 0; i < hlsSegments.length; i++) {
  3105. const item = hlsSegments[i];
  3106. const startTime =
  3107. (i == 0) ? firstStartTime : previousReference.endTime;
  3108. position = mediaSequenceNumber + skippedSegments + i;
  3109. const discontinuityTag = shaka.hls.Utils.getFirstTagWithName(
  3110. item.tags, 'EXT-X-DISCONTINUITY');
  3111. if (discontinuityTag) {
  3112. discontinuitySequence++;
  3113. }
  3114. // Apply new AES-128 tags as you see them, keeping a running total.
  3115. for (const drmTag of item.tags) {
  3116. if (drmTag.name == 'EXT-X-KEY') {
  3117. if (drmTag.getRequiredAttrValue('METHOD') == 'AES-128') {
  3118. aes128Key =
  3119. this.parseAES128DrmTag_(drmTag, playlist, getUris, variables);
  3120. } else {
  3121. aes128Key = undefined;
  3122. }
  3123. }
  3124. }
  3125. mediaSequenceToStartTime.set(position, startTime);
  3126. initSegmentRef = this.getInitSegmentReference_(playlist,
  3127. item.tags, getUris, variables);
  3128. // If the stream is low latency and the user has not configured the
  3129. // lowLatencyMode, but if it has been configured to activate the
  3130. // lowLatencyMode if a stream of this type is detected, we automatically
  3131. // activate the lowLatencyMode.
  3132. if (!this.lowLatencyMode_) {
  3133. const autoLowLatencyMode = this.playerInterface_.isAutoLowLatencyMode();
  3134. if (autoLowLatencyMode) {
  3135. this.playerInterface_.enableLowLatencyMode();
  3136. this.lowLatencyMode_ = this.playerInterface_.isLowLatencyMode();
  3137. }
  3138. }
  3139. const reference = this.createSegmentReference_(
  3140. initSegmentRef,
  3141. previousReference,
  3142. item,
  3143. startTime,
  3144. variables,
  3145. playlist,
  3146. stream,
  3147. getUris,
  3148. aes128Key);
  3149. if (reference) {
  3150. const bitrate = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3151. item.tags, 'EXT-X-BITRATE');
  3152. if (bitrate) {
  3153. bitrates.push({
  3154. bitrate,
  3155. duration: reference.endTime - reference.startTime,
  3156. });
  3157. } else if (bitrates.length) {
  3158. // It applies to every segment between it and the next EXT-X-BITRATE,
  3159. // so we use the latest bitrate value
  3160. const prevBitrate = bitrates.pop();
  3161. prevBitrate.duration += reference.endTime - reference.startTime;
  3162. bitrates.push(prevBitrate);
  3163. }
  3164. previousReference = reference;
  3165. reference.discontinuitySequence = discontinuitySequence;
  3166. if (this.config_.hls.ignoreManifestProgramDateTime &&
  3167. this.minSequenceNumber_ != null &&
  3168. position < this.minSequenceNumber_) {
  3169. // This segment is ignored as part of our fallback synchronization
  3170. // method.
  3171. } else {
  3172. references.push(reference);
  3173. }
  3174. }
  3175. }
  3176. let bandwidth = undefined;
  3177. if (bitrates.length) {
  3178. const duration = bitrates.reduce((sum, value) => {
  3179. return sum + value.duration;
  3180. }, 0);
  3181. bandwidth = Math.round(bitrates.reduce((sum, value) => {
  3182. return sum + value.bitrate * value.duration;
  3183. }, 0) / duration * 1000);
  3184. }
  3185. // If some segments have sync times, but not all, extrapolate the sync
  3186. // times of the ones with none.
  3187. const someSyncTime = references.some((ref) => ref.syncTime != null);
  3188. if (someSyncTime) {
  3189. for (let i = 0; i < references.length; i++) {
  3190. const reference = references[i];
  3191. if (reference.syncTime != null) {
  3192. // No need to extrapolate.
  3193. continue;
  3194. }
  3195. // Find the nearest segment with syncTime, in either direction.
  3196. // This looks forward and backward simultaneously, keeping track of what
  3197. // to offset the syncTime it finds by as it goes.
  3198. let forwardAdd = 0;
  3199. let forwardI = i;
  3200. /**
  3201. * Look forwards one reference at a time, summing all durations as we
  3202. * go, until we find a reference with a syncTime to use as a basis.
  3203. * This DOES count the original reference, but DOESN'T count the first
  3204. * reference with a syncTime (as we approach it from behind).
  3205. * @return {?number}
  3206. */
  3207. const lookForward = () => {
  3208. const other = references[forwardI];
  3209. if (other) {
  3210. if (other.syncTime != null) {
  3211. return other.syncTime + forwardAdd;
  3212. }
  3213. forwardAdd -= other.endTime - other.startTime;
  3214. forwardI += 1;
  3215. }
  3216. return null;
  3217. };
  3218. let backwardAdd = 0;
  3219. let backwardI = i;
  3220. /**
  3221. * Look backwards one reference at a time, summing all durations as we
  3222. * go, until we find a reference with a syncTime to use as a basis.
  3223. * This DOESN'T count the original reference, but DOES count the first
  3224. * reference with a syncTime (as we approach it from ahead).
  3225. * @return {?number}
  3226. */
  3227. const lookBackward = () => {
  3228. const other = references[backwardI];
  3229. if (other) {
  3230. if (other != reference) {
  3231. backwardAdd += other.endTime - other.startTime;
  3232. }
  3233. if (other.syncTime != null) {
  3234. return other.syncTime + backwardAdd;
  3235. }
  3236. backwardI -= 1;
  3237. }
  3238. return null;
  3239. };
  3240. while (reference.syncTime == null) {
  3241. reference.syncTime = lookBackward();
  3242. if (reference.syncTime == null) {
  3243. reference.syncTime = lookForward();
  3244. }
  3245. }
  3246. }
  3247. }
  3248. // Split the sync times properly among partial segments.
  3249. if (someSyncTime) {
  3250. for (const reference of references) {
  3251. let syncTime = reference.syncTime;
  3252. for (const partial of reference.partialReferences) {
  3253. partial.syncTime = syncTime;
  3254. syncTime += partial.endTime - partial.startTime;
  3255. }
  3256. }
  3257. }
  3258. // lowestSyncTime is a value from a previous playlist update. Use it to
  3259. // set reference start times. If this is the first playlist parse, we will
  3260. // skip this step, and wait until we have sync time across stream types.
  3261. const lowestSyncTime = this.lowestSyncTime_;
  3262. if (someSyncTime && lowestSyncTime != Infinity) {
  3263. for (const reference of references) {
  3264. reference.syncAgainst(lowestSyncTime);
  3265. }
  3266. }
  3267. return {
  3268. segments: references,
  3269. bandwidth,
  3270. };
  3271. }
  3272. /**
  3273. * Attempts to guess stream's mime type based on content type and URI.
  3274. *
  3275. * @param {string} contentType
  3276. * @param {string} codecs
  3277. * @return {?string}
  3278. * @private
  3279. */
  3280. guessMimeTypeBeforeLoading_(contentType, codecs) {
  3281. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  3282. if (codecs == 'vtt' || codecs == 'wvtt') {
  3283. // If codecs is 'vtt', it's WebVTT.
  3284. return 'text/vtt';
  3285. } else if (codecs && codecs !== '') {
  3286. // Otherwise, assume MP4-embedded text, since text-based formats tend
  3287. // not to have a codecs string at all.
  3288. return 'application/mp4';
  3289. }
  3290. }
  3291. if (contentType == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  3292. if (!codecs || codecs == 'jpeg') {
  3293. return 'image/jpeg';
  3294. }
  3295. }
  3296. if (contentType == shaka.util.ManifestParserUtils.ContentType.AUDIO) {
  3297. // See: https://bugs.chromium.org/p/chromium/issues/detail?id=489520
  3298. if (codecs == 'mp4a.40.34') {
  3299. return 'audio/mpeg';
  3300. }
  3301. }
  3302. if (codecs == 'mjpg') {
  3303. return 'application/mp4';
  3304. }
  3305. // Not enough information to guess from the content type and codecs.
  3306. return null;
  3307. }
  3308. /**
  3309. * Get a fallback mime type for the content. Used if all the better methods
  3310. * for determining the mime type have failed.
  3311. *
  3312. * @param {string} contentType
  3313. * @return {string}
  3314. * @private
  3315. */
  3316. guessMimeTypeFallback_(contentType) {
  3317. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  3318. // If there was no codecs string and no content-type, assume HLS text
  3319. // streams are WebVTT.
  3320. return 'text/vtt';
  3321. }
  3322. // If the HLS content is lacking in both MIME type metadata and
  3323. // segment file extensions, we fall back to assuming it's MP4.
  3324. const map = shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[contentType];
  3325. return map['mp4'];
  3326. }
  3327. /**
  3328. * Attempts to guess stream's mime type based on content type, URI, and
  3329. * contents of the playlist.
  3330. *
  3331. * @param {string} contentType
  3332. * @param {string} codecs
  3333. * @param {!shaka.hls.Playlist} playlist
  3334. * @param {!Map.<string, string>} variables
  3335. * @param {function():!Array.<string>} getUris
  3336. * @return {!Promise.<string>}
  3337. * @private
  3338. */
  3339. async guessMimeType_(contentType, codecs, playlist, variables, getUris) {
  3340. const HlsParser = shaka.hls.HlsParser;
  3341. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  3342. // If you wait long enough, requesting the first segment can fail
  3343. // because it has fallen off the left edge of DVR, so to be safer,
  3344. // let's request the middle segment.
  3345. goog.asserts.assert(playlist.segments.length,
  3346. 'Playlist should have segments!');
  3347. const middleSegmentIdx = Math.trunc((playlist.segments.length - 1) / 2);
  3348. const middleSegmentUris = shaka.hls.Utils.constructSegmentUris(
  3349. getUris(),
  3350. playlist.segments[middleSegmentIdx].verbatimSegmentUri,
  3351. variables);
  3352. const parsedUri = new goog.Uri(middleSegmentUris[0]);
  3353. const extension = parsedUri.getPath().split('.').pop();
  3354. const map = HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[contentType];
  3355. let mimeType = map[extension];
  3356. if (mimeType) {
  3357. return mimeType;
  3358. }
  3359. mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  3360. if (mimeType) {
  3361. return mimeType;
  3362. }
  3363. // The extension map didn't work, so guess based on codecs.
  3364. mimeType = this.guessMimeTypeBeforeLoading_(contentType, codecs);
  3365. if (mimeType) {
  3366. return mimeType;
  3367. }
  3368. // If unable to guess mime type, request a segment and try getting it
  3369. // from the response.
  3370. const headRequest = shaka.net.NetworkingEngine.makeRequest(
  3371. middleSegmentUris, this.config_.retryParameters);
  3372. headRequest.method = 'HEAD';
  3373. const type = shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT;
  3374. const response = await this.makeNetworkRequest_(
  3375. headRequest, requestType, {type});
  3376. const contentMimeType = response.headers['content-type'];
  3377. if (contentMimeType) {
  3378. // Split the MIME type in case the server sent additional parameters.
  3379. return contentMimeType.split(';')[0];
  3380. }
  3381. return this.guessMimeTypeFallback_(contentType);
  3382. }
  3383. /**
  3384. * Returns a tag with a given name.
  3385. * Throws an error if tag was not found.
  3386. *
  3387. * @param {!Array.<shaka.hls.Tag>} tags
  3388. * @param {string} tagName
  3389. * @return {!shaka.hls.Tag}
  3390. * @private
  3391. */
  3392. getRequiredTag_(tags, tagName) {
  3393. const tag = shaka.hls.Utils.getFirstTagWithName(tags, tagName);
  3394. if (!tag) {
  3395. throw new shaka.util.Error(
  3396. shaka.util.Error.Severity.CRITICAL,
  3397. shaka.util.Error.Category.MANIFEST,
  3398. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, tagName);
  3399. }
  3400. return tag;
  3401. }
  3402. /**
  3403. * @param {shaka.extern.Stream} stream
  3404. * @param {?string} width
  3405. * @param {?string} height
  3406. * @param {?string} frameRate
  3407. * @param {?string} videoRange
  3408. * @param {?string} videoLayout
  3409. * @private
  3410. */
  3411. addVideoAttributes_(stream, width, height, frameRate, videoRange,
  3412. videoLayout) {
  3413. if (stream) {
  3414. stream.width = Number(width) || undefined;
  3415. stream.height = Number(height) || undefined;
  3416. stream.frameRate = Number(frameRate) || undefined;
  3417. stream.hdr = videoRange || undefined;
  3418. stream.videoLayout = videoLayout || undefined;
  3419. }
  3420. }
  3421. /**
  3422. * Makes a network request for the manifest and returns a Promise
  3423. * with the resulting data.
  3424. *
  3425. * @param {!Array.<string>} uris
  3426. * @param {boolean=} isPlaylist
  3427. * @return {!Promise.<!shaka.extern.Response>}
  3428. * @private
  3429. */
  3430. requestManifest_(uris, isPlaylist) {
  3431. const requestType = shaka.net.NetworkingEngine.RequestType.MANIFEST;
  3432. const request = shaka.net.NetworkingEngine.makeRequest(
  3433. uris, this.config_.retryParameters);
  3434. const type = isPlaylist ?
  3435. shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_PLAYLIST :
  3436. shaka.net.NetworkingEngine.AdvancedRequestType.MASTER_PLAYLIST;
  3437. return this.makeNetworkRequest_(request, requestType, {type});
  3438. }
  3439. /**
  3440. * Called when the update timer ticks. Because parsing a manifest is async,
  3441. * this method is async. To work with this, this method will schedule the next
  3442. * update when it finished instead of using a repeating-start.
  3443. *
  3444. * @return {!Promise}
  3445. * @private
  3446. */
  3447. async onUpdate_() {
  3448. shaka.log.info('Updating manifest...');
  3449. goog.asserts.assert(
  3450. this.getUpdatePlaylistDelay_() > 0,
  3451. 'We should only call |onUpdate_| when we are suppose to be updating.');
  3452. // Detect a call to stop()
  3453. if (!this.playerInterface_) {
  3454. return;
  3455. }
  3456. try {
  3457. const startTime = Date.now();
  3458. await this.update();
  3459. // Keep track of how long the longest manifest update took.
  3460. const endTime = Date.now();
  3461. // This may have converted to VOD, in which case we stop updating.
  3462. if (this.isLive_()) {
  3463. const updateDuration = (endTime - startTime) / 1000.0;
  3464. this.averageUpdateDuration_.sample(1, updateDuration);
  3465. const delay = this.getUpdatePlaylistDelay_();
  3466. const finalDelay = Math.max(0,
  3467. delay - this.averageUpdateDuration_.getEstimate());
  3468. this.updatePlaylistTimer_.tickAfter(/* seconds= */ finalDelay);
  3469. }
  3470. } catch (error) {
  3471. // Detect a call to stop() during this.update()
  3472. if (!this.playerInterface_) {
  3473. return;
  3474. }
  3475. goog.asserts.assert(error instanceof shaka.util.Error,
  3476. 'Should only receive a Shaka error');
  3477. if (this.config_.raiseFatalErrorOnManifestUpdateRequestFailure) {
  3478. this.playerInterface_.onError(error);
  3479. return;
  3480. }
  3481. // We will retry updating, so override the severity of the error.
  3482. error.severity = shaka.util.Error.Severity.RECOVERABLE;
  3483. this.playerInterface_.onError(error);
  3484. // Try again very soon.
  3485. this.updatePlaylistTimer_.tickAfter(/* seconds= */ 0.1);
  3486. }
  3487. // Detect a call to stop()
  3488. if (!this.playerInterface_) {
  3489. return;
  3490. }
  3491. this.playerInterface_.onManifestUpdated();
  3492. }
  3493. /**
  3494. * @return {boolean}
  3495. * @private
  3496. */
  3497. isLive_() {
  3498. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  3499. return this.presentationType_ != PresentationType.VOD;
  3500. }
  3501. /**
  3502. * @return {number}
  3503. * @private
  3504. */
  3505. getUpdatePlaylistDelay_() {
  3506. // The HLS spec (RFC 8216) states in 6.3.4:
  3507. // "the client MUST wait for at least the target duration before
  3508. // attempting to reload the Playlist file again".
  3509. // For LL-HLS, the server must add a new partial segment to the Playlist
  3510. // every part target duration.
  3511. return this.lastTargetDuration_;
  3512. }
  3513. /**
  3514. * @param {shaka.hls.HlsParser.PresentationType_} type
  3515. * @private
  3516. */
  3517. setPresentationType_(type) {
  3518. this.presentationType_ = type;
  3519. if (this.presentationTimeline_) {
  3520. this.presentationTimeline_.setStatic(!this.isLive_());
  3521. }
  3522. // If this manifest is not for live content, then we have no reason to
  3523. // update it.
  3524. if (!this.isLive_()) {
  3525. this.updatePlaylistTimer_.stop();
  3526. }
  3527. }
  3528. /**
  3529. * Create a networking request. This will manage the request using the
  3530. * parser's operation manager. If the parser has already been stopped, the
  3531. * request will not be made.
  3532. *
  3533. * @param {shaka.extern.Request} request
  3534. * @param {shaka.net.NetworkingEngine.RequestType} type
  3535. * @param {shaka.extern.RequestContext=} context
  3536. * @return {!Promise.<shaka.extern.Response>}
  3537. * @private
  3538. */
  3539. makeNetworkRequest_(request, type, context) {
  3540. if (!this.operationManager_) {
  3541. throw new shaka.util.Error(
  3542. shaka.util.Error.Severity.CRITICAL,
  3543. shaka.util.Error.Category.PLAYER,
  3544. shaka.util.Error.Code.OPERATION_ABORTED);
  3545. }
  3546. const op = this.playerInterface_.networkingEngine.request(
  3547. type, request, context);
  3548. this.operationManager_.manage(op);
  3549. return op.promise;
  3550. }
  3551. /**
  3552. * @param {!shaka.hls.Tag} drmTag
  3553. * @param {string} mimeType
  3554. * @return {?shaka.extern.DrmInfo}
  3555. * @private
  3556. */
  3557. static fairplayDrmParser_(drmTag, mimeType) {
  3558. if (mimeType == 'video/mp2t') {
  3559. throw new shaka.util.Error(
  3560. shaka.util.Error.Severity.CRITICAL,
  3561. shaka.util.Error.Category.MANIFEST,
  3562. shaka.util.Error.Code.HLS_MSE_ENCRYPTED_MP2T_NOT_SUPPORTED);
  3563. }
  3564. if (shaka.util.Platform.isMediaKeysPolyfilled()) {
  3565. throw new shaka.util.Error(
  3566. shaka.util.Error.Severity.CRITICAL,
  3567. shaka.util.Error.Category.MANIFEST,
  3568. shaka.util.Error.Code
  3569. .HLS_MSE_ENCRYPTED_LEGACY_APPLE_MEDIA_KEYS_NOT_SUPPORTED);
  3570. }
  3571. /*
  3572. * Even if we're not able to construct initData through the HLS tag, adding
  3573. * a DRMInfo will allow DRM Engine to request a media key system access
  3574. * with the correct keySystem and initDataType
  3575. */
  3576. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  3577. 'com.apple.fps', [
  3578. {initDataType: 'sinf', initData: new Uint8Array(0), keyId: null},
  3579. ]);
  3580. return drmInfo;
  3581. }
  3582. /**
  3583. * @param {!shaka.hls.Tag} drmTag
  3584. * @return {?shaka.extern.DrmInfo}
  3585. * @private
  3586. */
  3587. static widevineDrmParser_(drmTag) {
  3588. const method = drmTag.getRequiredAttrValue('METHOD');
  3589. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  3590. if (!VALID_METHODS.includes(method)) {
  3591. shaka.log.error('Widevine in HLS is only supported with [',
  3592. VALID_METHODS.join(', '), '], not', method);
  3593. return null;
  3594. }
  3595. const uri = drmTag.getRequiredAttrValue('URI');
  3596. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri);
  3597. // The data encoded in the URI is a PSSH box to be used as init data.
  3598. const pssh = shaka.util.BufferUtils.toUint8(parsedData.data);
  3599. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  3600. 'com.widevine.alpha', [
  3601. {initDataType: 'cenc', initData: pssh},
  3602. ]);
  3603. const keyId = drmTag.getAttributeValue('KEYID');
  3604. if (keyId) {
  3605. const keyIdLowerCase = keyId.toLowerCase();
  3606. // This value should begin with '0x':
  3607. goog.asserts.assert(
  3608. keyIdLowerCase.startsWith('0x'), 'Incorrect KEYID format!');
  3609. // But the output should not contain the '0x':
  3610. drmInfo.keyIds = new Set([keyIdLowerCase.substr(2)]);
  3611. }
  3612. return drmInfo;
  3613. }
  3614. /**
  3615. * See: https://docs.microsoft.com/en-us/playready/packaging/mp4-based-formats-supported-by-playready-clients?tabs=case4
  3616. *
  3617. * @param {!shaka.hls.Tag} drmTag
  3618. * @return {?shaka.extern.DrmInfo}
  3619. * @private
  3620. */
  3621. static playreadyDrmParser_(drmTag) {
  3622. const method = drmTag.getRequiredAttrValue('METHOD');
  3623. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  3624. if (!VALID_METHODS.includes(method)) {
  3625. shaka.log.error('PlayReady in HLS is only supported with [',
  3626. VALID_METHODS.join(', '), '], not', method);
  3627. return null;
  3628. }
  3629. const uri = drmTag.getRequiredAttrValue('URI');
  3630. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri);
  3631. // The data encoded in the URI is a PlayReady Pro Object, so we need
  3632. // convert it to pssh.
  3633. const data = shaka.util.BufferUtils.toUint8(parsedData.data);
  3634. const systemId = new Uint8Array([
  3635. 0x9a, 0x04, 0xf0, 0x79, 0x98, 0x40, 0x42, 0x86,
  3636. 0xab, 0x92, 0xe6, 0x5b, 0xe0, 0x88, 0x5f, 0x95,
  3637. ]);
  3638. const keyIds = new Set();
  3639. const psshVersion = 0;
  3640. const pssh =
  3641. shaka.util.Pssh.createPssh(data, systemId, keyIds, psshVersion);
  3642. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  3643. 'com.microsoft.playready', [
  3644. {initDataType: 'cenc', initData: pssh},
  3645. ]);
  3646. return drmInfo;
  3647. }
  3648. /**
  3649. * See: https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-5.1
  3650. *
  3651. * @param {!shaka.hls.Tag} drmTag
  3652. * @return {?shaka.extern.DrmInfo}
  3653. * @private
  3654. */
  3655. static identityDrmParser_(drmTag) {
  3656. const method = drmTag.getRequiredAttrValue('METHOD');
  3657. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  3658. if (!VALID_METHODS.includes(method)) {
  3659. shaka.log.error('Identity (ClearKey) in HLS is only supported with [',
  3660. VALID_METHODS.join(', '), '], not', method);
  3661. return null;
  3662. }
  3663. // NOTE: The ClearKey CDM requires a key-id to key mapping. HLS doesn't
  3664. // provide a key ID anywhere. So although we could use the 'URI' attribute
  3665. // to fetch the actual 16-byte key, without a key ID, we can't provide this
  3666. // automatically to the ClearKey CDM. Instead, the application will have
  3667. // to use player.configure('drm.clearKeys', { ... }) to provide the key IDs
  3668. // and keys or player.configure('drm.servers.org\.w3\.clearkey', ...) to
  3669. // provide a ClearKey license server URI.
  3670. return shaka.util.ManifestParserUtils.createDrmInfo(
  3671. 'org.w3.clearkey', /* initDatas= */ null);
  3672. }
  3673. };
  3674. /**
  3675. * @typedef {{
  3676. * stream: !shaka.extern.Stream,
  3677. * type: string,
  3678. * redirectUris: !Array.<string>,
  3679. * getUris: function():!Array.<string>,
  3680. * minTimestamp: number,
  3681. * maxTimestamp: number,
  3682. * mediaSequenceToStartTime: !Map.<number, number>,
  3683. * canSkipSegments: boolean,
  3684. * canBlockReload: boolean,
  3685. * hasEndList: boolean,
  3686. * firstSequenceNumber: number,
  3687. * nextMediaSequence: number,
  3688. * nextPart: number,
  3689. * loadedOnce: boolean
  3690. * }}
  3691. *
  3692. * @description
  3693. * Contains a stream and information about it.
  3694. *
  3695. * @property {!shaka.extern.Stream} stream
  3696. * The Stream itself.
  3697. * @property {string} type
  3698. * The type value. Could be 'video', 'audio', 'text', or 'image'.
  3699. * @property {!Array.<string>} redirectUris
  3700. * The redirect URIs.
  3701. * @property {function():!Array.<string>} getUris
  3702. * The verbatim media playlist URIs, as it appeared in the master playlist.
  3703. * @property {number} minTimestamp
  3704. * The minimum timestamp found in the stream.
  3705. * @property {number} maxTimestamp
  3706. * The maximum timestamp found in the stream.
  3707. * @property {!Map.<number, number>} mediaSequenceToStartTime
  3708. * A map of media sequence numbers to media start times.
  3709. * Only used for VOD content.
  3710. * @property {boolean} canSkipSegments
  3711. * True if the server supports delta playlist updates, and we can send a
  3712. * request for a playlist that can skip older media segments.
  3713. * @property {boolean} canBlockReload
  3714. * True if the server supports blocking playlist reload, and we can send a
  3715. * request for a playlist that can block reload until some segments are
  3716. * present.
  3717. * @property {boolean} hasEndList
  3718. * True if the stream has an EXT-X-ENDLIST tag.
  3719. * @property {number} firstSequenceNumber
  3720. * The sequence number of the first reference. Only calculated if needed.
  3721. * @property {number} nextMediaSequence
  3722. * The next media sequence.
  3723. * @property {number} nextPart
  3724. * The next part.
  3725. * @property {boolean} loadedOnce
  3726. * True if the stream has been loaded at least once.
  3727. */
  3728. shaka.hls.HlsParser.StreamInfo;
  3729. /**
  3730. * @typedef {{
  3731. * audio: !Array.<shaka.hls.HlsParser.StreamInfo>,
  3732. * video: !Array.<shaka.hls.HlsParser.StreamInfo>
  3733. * }}
  3734. *
  3735. * @description Audio and video stream infos.
  3736. * @property {!Array.<shaka.hls.HlsParser.StreamInfo>} audio
  3737. * @property {!Array.<shaka.hls.HlsParser.StreamInfo>} video
  3738. */
  3739. shaka.hls.HlsParser.StreamInfos;
  3740. /**
  3741. * @const {!Object.<string, string>}
  3742. * @private
  3743. */
  3744. shaka.hls.HlsParser.RAW_FORMATS_TO_MIME_TYPES_ = {
  3745. 'aac': 'audio/aac',
  3746. 'ac3': 'audio/ac3',
  3747. 'ec3': 'audio/ec3',
  3748. 'mp3': 'audio/mpeg',
  3749. };
  3750. /**
  3751. * @const {!Object.<string, string>}
  3752. * @private
  3753. */
  3754. shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_ = {
  3755. 'mp4': 'audio/mp4',
  3756. 'mp4a': 'audio/mp4',
  3757. 'm4s': 'audio/mp4',
  3758. 'm4i': 'audio/mp4',
  3759. 'm4a': 'audio/mp4',
  3760. 'm4f': 'audio/mp4',
  3761. 'cmfa': 'audio/mp4',
  3762. // MPEG2-TS also uses video/ for audio: https://bit.ly/TsMse
  3763. 'ts': 'video/mp2t',
  3764. 'tsa': 'video/mp2t',
  3765. };
  3766. /**
  3767. * @const {!Object.<string, string>}
  3768. * @private
  3769. */
  3770. shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_ = {
  3771. 'mp4': 'video/mp4',
  3772. 'mp4v': 'video/mp4',
  3773. 'm4s': 'video/mp4',
  3774. 'm4i': 'video/mp4',
  3775. 'm4v': 'video/mp4',
  3776. 'm4f': 'video/mp4',
  3777. 'cmfv': 'video/mp4',
  3778. 'ts': 'video/mp2t',
  3779. 'tsv': 'video/mp2t',
  3780. };
  3781. /**
  3782. * @const {!Object.<string, string>}
  3783. * @private
  3784. */
  3785. shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_ = {
  3786. 'mp4': 'application/mp4',
  3787. 'm4s': 'application/mp4',
  3788. 'm4i': 'application/mp4',
  3789. 'm4f': 'application/mp4',
  3790. 'cmft': 'application/mp4',
  3791. 'vtt': 'text/vtt',
  3792. 'webvtt': 'text/vtt',
  3793. 'ttml': 'application/ttml+xml',
  3794. };
  3795. /**
  3796. * @const {!Object.<string, string>}
  3797. * @private
  3798. */
  3799. shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_ = {
  3800. 'jpg': 'image/jpeg',
  3801. 'png': 'image/png',
  3802. 'svg': 'image/svg+xml',
  3803. 'webp': 'image/webp',
  3804. 'avif': 'image/avif',
  3805. };
  3806. /**
  3807. * @const {!Object.<string, !Object.<string, string>>}
  3808. * @private
  3809. */
  3810. shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_ = {
  3811. 'audio': shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_,
  3812. 'video': shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_,
  3813. 'text': shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_,
  3814. 'image': shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_,
  3815. };
  3816. /**
  3817. * @typedef {function(!shaka.hls.Tag, string):?shaka.extern.DrmInfo}
  3818. * @private
  3819. */
  3820. shaka.hls.HlsParser.DrmParser_;
  3821. /**
  3822. * @const {!Object.<string, shaka.hls.HlsParser.DrmParser_>}
  3823. * @private
  3824. */
  3825. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_ = {
  3826. 'com.apple.streamingkeydelivery':
  3827. shaka.hls.HlsParser.fairplayDrmParser_,
  3828. 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed':
  3829. shaka.hls.HlsParser.widevineDrmParser_,
  3830. 'com.microsoft.playready':
  3831. shaka.hls.HlsParser.playreadyDrmParser_,
  3832. 'identity':
  3833. shaka.hls.HlsParser.identityDrmParser_,
  3834. };
  3835. /**
  3836. * @enum {string}
  3837. * @private
  3838. */
  3839. shaka.hls.HlsParser.PresentationType_ = {
  3840. VOD: 'VOD',
  3841. EVENT: 'EVENT',
  3842. LIVE: 'LIVE',
  3843. };
  3844. shaka.media.ManifestParser.registerParserByMime(
  3845. 'application/x-mpegurl', () => new shaka.hls.HlsParser());
  3846. shaka.media.ManifestParser.registerParserByMime(
  3847. 'application/vnd.apple.mpegurl', () => new shaka.hls.HlsParser());