Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.media.ContentWorkarounds');
  10. goog.require('shaka.media.IClosedCaptionParser');
  11. goog.require('shaka.media.TimeRangesUtils');
  12. goog.require('shaka.media.Transmuxer');
  13. goog.require('shaka.text.TextEngine');
  14. goog.require('shaka.util.Destroyer');
  15. goog.require('shaka.util.Error');
  16. goog.require('shaka.util.EventManager');
  17. goog.require('shaka.util.Functional');
  18. goog.require('shaka.util.IDestroyable');
  19. goog.require('shaka.util.ManifestParserUtils');
  20. goog.require('shaka.util.MimeUtils');
  21. goog.require('shaka.util.Platform');
  22. goog.require('shaka.util.PublicPromise');
  23. /**
  24. * @summary
  25. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  26. * All asynchronous operations return a Promise, and all operations are
  27. * internally synchronized and serialized as needed. Operations that can
  28. * be done in parallel will be done in parallel.
  29. *
  30. * @implements {shaka.util.IDestroyable}
  31. */
  32. shaka.media.MediaSourceEngine = class {
  33. /**
  34. * @param {HTMLMediaElement} video The video element, whose source is tied to
  35. * MediaSource during the lifetime of the MediaSourceEngine.
  36. * @param {!shaka.media.IClosedCaptionParser} closedCaptionParser
  37. * The closed caption parser that should be used to parser closed captions
  38. * from the video stream. MediaSourceEngine takes ownership of the parser.
  39. * When MediaSourceEngine is destroyed, it will destroy the parser.
  40. * @param {!shaka.extern.TextDisplayer} textDisplayer
  41. * The text displayer that will be used with the text engine.
  42. * MediaSourceEngine takes ownership of the displayer. When
  43. * MediaSourceEngine is destroyed, it will destroy the displayer.
  44. * @param {!function(!Array.<shaka.extern.ID3Metadata>, number, ?number)=}
  45. * onMetadata
  46. */
  47. constructor(video, closedCaptionParser, textDisplayer, onMetadata) {
  48. /** @private {HTMLMediaElement} */
  49. this.video_ = video;
  50. /** @private {shaka.extern.TextDisplayer} */
  51. this.textDisplayer_ = textDisplayer;
  52. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  53. SourceBuffer>} */
  54. this.sourceBuffers_ = {};
  55. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  56. string>} */
  57. this.sourceBufferTypes_ = {};
  58. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  59. boolean>} */
  60. this.expectedEncryption_ = {};
  61. /** @private {shaka.text.TextEngine} */
  62. this.textEngine_ = null;
  63. /** @private {boolean} */
  64. this.segmentRelativeVttTiming_ = false;
  65. const onMetadataNoOp = (metadata, timestampOffset, segmentEnd) => {};
  66. /** @private {!function(!Array.<shaka.extern.ID3Metadata>,
  67. number, ?number)} */
  68. this.onMetadata_ = onMetadata || onMetadataNoOp;
  69. /**
  70. * @private {!Object.<string,
  71. * !Array.<shaka.media.MediaSourceEngine.Operation>>}
  72. */
  73. this.queues_ = {};
  74. /** @private {shaka.util.EventManager} */
  75. this.eventManager_ = new shaka.util.EventManager();
  76. /** @private {!Object.<string, !shaka.media.Transmuxer>} */
  77. this.transmuxers_ = {};
  78. /** @private {shaka.media.IClosedCaptionParser} */
  79. this.captionParser_ = closedCaptionParser;
  80. /** @private {!shaka.util.PublicPromise} */
  81. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  82. /** @private {MediaSource} */
  83. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  84. /** @type {!shaka.util.Destroyer} */
  85. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  86. /** @private {string} */
  87. this.url_ = '';
  88. /** @private {boolean} */
  89. this.sequenceMode_ = false;
  90. /** @private {!shaka.util.PublicPromise.<number>} */
  91. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  92. }
  93. /**
  94. * Create a MediaSource object, attach it to the video element, and return it.
  95. * Resolves the given promise when the MediaSource is ready.
  96. *
  97. * Replaced by unit tests.
  98. *
  99. * @param {!shaka.util.PublicPromise} p
  100. * @return {!MediaSource}
  101. */
  102. createMediaSource(p) {
  103. const mediaSource = new MediaSource();
  104. // Set up MediaSource on the video element.
  105. this.eventManager_.listenOnce(
  106. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  107. // Store the object URL for releasing it later.
  108. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  109. this.video_.src = this.url_;
  110. return mediaSource;
  111. }
  112. /**
  113. * @param {!shaka.util.PublicPromise} p
  114. * @private
  115. */
  116. onSourceOpen_(p) {
  117. // Release the object URL that was previously created, to prevent memory
  118. // leak.
  119. // createObjectURL creates a strong reference to the MediaSource object
  120. // inside the browser. Setting the src of the video then creates another
  121. // reference within the video element. revokeObjectURL will remove the
  122. // strong reference to the MediaSource object, and allow it to be
  123. // garbage-collected later.
  124. URL.revokeObjectURL(this.url_);
  125. p.resolve();
  126. }
  127. /**
  128. * Checks if a certain type is supported.
  129. *
  130. * @param {shaka.extern.Stream} stream
  131. * @return {boolean}
  132. */
  133. static isStreamSupported(stream) {
  134. const fullMimeType = shaka.util.MimeUtils.getFullType(
  135. stream.mimeType, stream.codecs);
  136. const extendedMimeType = shaka.util.MimeUtils.getExtendedType(stream);
  137. return shaka.text.TextEngine.isTypeSupported(fullMimeType) ||
  138. MediaSource.isTypeSupported(extendedMimeType) ||
  139. shaka.media.Transmuxer.isSupported(fullMimeType, stream.type);
  140. }
  141. /**
  142. * Returns a map of MediaSource support for well-known types.
  143. *
  144. * @return {!Object.<string, boolean>}
  145. */
  146. static probeSupport() {
  147. const testMimeTypes = [
  148. // MP4 types
  149. 'video/mp4; codecs="avc1.42E01E"',
  150. 'video/mp4; codecs="avc3.42E01E"',
  151. 'video/mp4; codecs="hev1.1.6.L93.90"',
  152. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  153. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  154. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  155. 'video/mp4; codecs="vp9"',
  156. 'video/mp4; codecs="vp09.00.10.08"',
  157. 'video/mp4; codecs="av01.0.01M.08"',
  158. 'audio/mp4; codecs="mp4a.40.2"',
  159. 'audio/mp4; codecs="ac-3"',
  160. 'audio/mp4; codecs="ec-3"',
  161. 'audio/mp4; codecs="opus"',
  162. 'audio/mp4; codecs="flac"',
  163. // WebM types
  164. 'video/webm; codecs="vp8"',
  165. 'video/webm; codecs="vp9"',
  166. 'video/webm; codecs="vp09.00.10.08"',
  167. 'audio/webm; codecs="vorbis"',
  168. 'audio/webm; codecs="opus"',
  169. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  170. 'video/mp2t; codecs="avc1.42E01E"',
  171. 'video/mp2t; codecs="avc3.42E01E"',
  172. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  173. 'video/mp2t; codecs="mp4a.40.2"',
  174. 'video/mp2t; codecs="ac-3"',
  175. 'video/mp2t; codecs="ec-3"',
  176. // WebVTT types
  177. 'text/vtt',
  178. 'application/mp4; codecs="wvtt"',
  179. // TTML types
  180. 'application/ttml+xml',
  181. 'application/mp4; codecs="stpp"',
  182. // Containerless types
  183. ...shaka.media.MediaSourceEngine.RAW_FORMATS,
  184. ];
  185. const support = {};
  186. for (const type of testMimeTypes) {
  187. if (shaka.util.Platform.supportsMediaSource()) {
  188. // Our TextEngine is only effective for MSE platforms at the moment.
  189. if (shaka.text.TextEngine.isTypeSupported(type)) {
  190. support[type] = true;
  191. } else {
  192. support[type] = MediaSource.isTypeSupported(type) ||
  193. shaka.media.Transmuxer.isSupported(type);
  194. }
  195. } else {
  196. support[type] = shaka.util.Platform.supportsMediaType(type);
  197. }
  198. const basicType = type.split(';')[0];
  199. support[basicType] = support[basicType] || support[type];
  200. }
  201. return support;
  202. }
  203. /** @override */
  204. destroy() {
  205. return this.destroyer_.destroy();
  206. }
  207. /** @private */
  208. async doDestroy_() {
  209. const Functional = shaka.util.Functional;
  210. const cleanup = [];
  211. for (const contentType in this.queues_) {
  212. // Make a local copy of the queue and the first item.
  213. const q = this.queues_[contentType];
  214. const inProgress = q[0];
  215. // Drop everything else out of the original queue.
  216. this.queues_[contentType] = q.slice(0, 1);
  217. // We will wait for this item to complete/fail.
  218. if (inProgress) {
  219. cleanup.push(inProgress.p.catch(Functional.noop));
  220. }
  221. // The rest will be rejected silently if possible.
  222. for (const item of q.slice(1)) {
  223. item.p.reject(shaka.util.Destroyer.destroyedError());
  224. }
  225. }
  226. if (this.textEngine_) {
  227. cleanup.push(this.textEngine_.destroy());
  228. }
  229. if (this.textDisplayer_) {
  230. cleanup.push(this.textDisplayer_.destroy());
  231. }
  232. for (const contentType in this.transmuxers_) {
  233. cleanup.push(this.transmuxers_[contentType].destroy());
  234. }
  235. await Promise.all(cleanup);
  236. if (this.eventManager_) {
  237. this.eventManager_.release();
  238. this.eventManager_ = null;
  239. }
  240. if (this.video_) {
  241. // "unload" the video element.
  242. this.video_.removeAttribute('src');
  243. this.video_.load();
  244. this.video_ = null;
  245. }
  246. this.mediaSource_ = null;
  247. this.textEngine_ = null;
  248. this.textDisplayer_ = null;
  249. this.sourceBuffers_ = {};
  250. this.transmuxers_ = {};
  251. this.captionParser_ = null;
  252. if (goog.DEBUG) {
  253. for (const contentType in this.queues_) {
  254. goog.asserts.assert(
  255. this.queues_[contentType].length == 0,
  256. contentType + ' queue should be empty after destroy!');
  257. }
  258. }
  259. this.queues_ = {};
  260. }
  261. /**
  262. * @return {!Promise} Resolved when MediaSource is open and attached to the
  263. * media element. This process is actually initiated by the constructor.
  264. */
  265. open() {
  266. return this.mediaSourceOpen_;
  267. }
  268. /**
  269. * Initialize MediaSourceEngine.
  270. *
  271. * Note that it is not valid to call this multiple times, except to add or
  272. * reinitialize text streams.
  273. *
  274. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  275. * shaka.extern.Stream>} streamsByType
  276. * A map of content types to streams. All streams must be supported
  277. * according to MediaSourceEngine.isStreamSupported.
  278. * @param {boolean} forceTransmuxTS
  279. * If true, this will transmux TS content even if it is natively supported.
  280. * @param {boolean=} sequenceMode
  281. * If true, the media segments are appended to the SourceBuffer in strict
  282. * sequence.
  283. *
  284. * @return {!Promise}
  285. */
  286. async init(streamsByType, forceTransmuxTS, sequenceMode=false) {
  287. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  288. await this.mediaSourceOpen_;
  289. this.sequenceMode_ = sequenceMode;
  290. for (const contentType of streamsByType.keys()) {
  291. const stream = streamsByType.get(contentType);
  292. goog.asserts.assert(
  293. shaka.media.MediaSourceEngine.isStreamSupported(stream),
  294. 'Type negotiation should happen before MediaSourceEngine.init!');
  295. let mimeType = shaka.util.MimeUtils.getFullType(
  296. stream.mimeType, stream.codecs);
  297. if (contentType == ContentType.TEXT) {
  298. this.reinitText(mimeType, sequenceMode);
  299. } else {
  300. if ((forceTransmuxTS || !MediaSource.isTypeSupported(mimeType)) &&
  301. shaka.media.Transmuxer.isSupported(mimeType, contentType)) {
  302. this.transmuxers_[contentType] = new shaka.media.Transmuxer();
  303. mimeType =
  304. shaka.media.Transmuxer.convertTsCodecs(contentType, mimeType);
  305. }
  306. const sourceBuffer = this.mediaSource_.addSourceBuffer(mimeType);
  307. this.eventManager_.listen(
  308. sourceBuffer, 'error',
  309. () => this.onError_(contentType));
  310. this.eventManager_.listen(
  311. sourceBuffer, 'updateend',
  312. () => this.onUpdateEnd_(contentType));
  313. this.sourceBuffers_[contentType] = sourceBuffer;
  314. this.sourceBufferTypes_[contentType] = mimeType;
  315. this.queues_[contentType] = [];
  316. this.expectedEncryption_[contentType] = !!stream.drmInfos.length;
  317. }
  318. }
  319. }
  320. /**
  321. * Reinitialize the TextEngine for a new text type.
  322. * @param {string} mimeType
  323. * @param {boolean} sequenceMode
  324. */
  325. reinitText(mimeType, sequenceMode) {
  326. if (!this.textEngine_) {
  327. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  328. }
  329. this.textEngine_.initParser(mimeType, sequenceMode,
  330. this.segmentRelativeVttTiming_);
  331. }
  332. /**
  333. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  334. * object has been destroyed.
  335. */
  336. ended() {
  337. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  338. }
  339. /**
  340. * Gets the first timestamp in buffer for the given content type.
  341. *
  342. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  343. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  344. */
  345. bufferStart(contentType) {
  346. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  347. if (contentType == ContentType.TEXT) {
  348. return this.textEngine_.bufferStart();
  349. }
  350. return shaka.media.TimeRangesUtils.bufferStart(
  351. this.getBuffered_(contentType));
  352. }
  353. /**
  354. * Gets the last timestamp in buffer for the given content type.
  355. *
  356. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  357. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  358. */
  359. bufferEnd(contentType) {
  360. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  361. if (contentType == ContentType.TEXT) {
  362. return this.textEngine_.bufferEnd();
  363. }
  364. return shaka.media.TimeRangesUtils.bufferEnd(
  365. this.getBuffered_(contentType));
  366. }
  367. /**
  368. * Determines if the given time is inside the buffered range of the given
  369. * content type.
  370. *
  371. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  372. * @param {number} time Playhead time
  373. * @return {boolean}
  374. */
  375. isBuffered(contentType, time) {
  376. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  377. if (contentType == ContentType.TEXT) {
  378. return this.textEngine_.isBuffered(time);
  379. } else {
  380. const buffered = this.getBuffered_(contentType);
  381. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  382. }
  383. }
  384. /**
  385. * Computes how far ahead of the given timestamp is buffered for the given
  386. * content type.
  387. *
  388. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  389. * @param {number} time
  390. * @return {number} The amount of time buffered ahead in seconds.
  391. */
  392. bufferedAheadOf(contentType, time) {
  393. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  394. if (contentType == ContentType.TEXT) {
  395. return this.textEngine_.bufferedAheadOf(time);
  396. } else {
  397. const buffered = this.getBuffered_(contentType);
  398. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  399. }
  400. }
  401. /**
  402. * Returns info about what is currently buffered.
  403. * @return {shaka.extern.BufferedInfo}
  404. */
  405. getBufferedInfo() {
  406. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  407. const TimeRangeUtils = shaka.media.TimeRangesUtils;
  408. const info = {
  409. total: TimeRangeUtils.getBufferedInfo(this.video_.buffered),
  410. audio: TimeRangeUtils.getBufferedInfo(
  411. this.getBuffered_(ContentType.AUDIO)),
  412. video: TimeRangeUtils.getBufferedInfo(
  413. this.getBuffered_(ContentType.VIDEO)),
  414. text: [],
  415. };
  416. if (this.textEngine_) {
  417. const start = this.textEngine_.bufferStart();
  418. const end = this.textEngine_.bufferEnd();
  419. if (start != null && end != null) {
  420. info.text.push({start: start, end: end});
  421. }
  422. }
  423. return info;
  424. }
  425. /**
  426. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  427. * @return {TimeRanges} The buffered ranges for the given content type, or
  428. * null if the buffered ranges could not be obtained.
  429. * @private
  430. */
  431. getBuffered_(contentType) {
  432. try {
  433. return this.sourceBuffers_[contentType].buffered;
  434. } catch (exception) {
  435. if (contentType in this.sourceBuffers_) {
  436. // Note: previous MediaSource errors may cause access to |buffered| to
  437. // throw.
  438. shaka.log.error('failed to get buffered range for ' + contentType,
  439. exception);
  440. }
  441. return null;
  442. }
  443. }
  444. /**
  445. * Enqueue an operation to append data to the SourceBuffer.
  446. * Start and end times are needed for TextEngine, but not for MediaSource.
  447. * Start and end times may be null for initialization segments; if present
  448. * they are relative to the presentation timeline.
  449. *
  450. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  451. * @param {!BufferSource} data
  452. * @param {?number} startTime relative to the start of the presentation
  453. * @param {?number} endTime relative to the start of the presentation
  454. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  455. * captions
  456. * @param {boolean=} seeked True if we just seeked
  457. * @return {!Promise}
  458. */
  459. async appendBuffer(
  460. contentType, data, startTime, endTime, hasClosedCaptions, seeked) {
  461. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  462. if (contentType == ContentType.TEXT) {
  463. if (this.sequenceMode_) {
  464. // This won't be known until the first video segment is appended.
  465. const offset = await this.textSequenceModeOffset_;
  466. this.textEngine_.setTimestampOffset(offset);
  467. }
  468. await this.textEngine_.appendBuffer(data, startTime, endTime);
  469. return;
  470. }
  471. if (this.transmuxers_[contentType]) {
  472. const transmuxedData =
  473. await this.transmuxers_[contentType].transmux(data);
  474. // For HLS CEA-608/708 CLOSED-CAPTIONS, text data is embedded in
  475. // the video stream, so textEngine may not have been initialized.
  476. if (!this.textEngine_) {
  477. this.reinitText('text/vtt', this.sequenceMode_);
  478. }
  479. if (transmuxedData.metadata) {
  480. const timestampOffset =
  481. this.sourceBuffers_[contentType].timestampOffset;
  482. this.onMetadata_(transmuxedData.metadata, timestampOffset, endTime);
  483. }
  484. // This doesn't work for native TS support (ex. Edge/Chromecast),
  485. // since no transmuxing is needed for native TS.
  486. if (transmuxedData.captions && transmuxedData.captions.length) {
  487. const videoOffset =
  488. this.sourceBuffers_[ContentType.VIDEO].timestampOffset;
  489. const closedCaptions = this.textEngine_
  490. .convertMuxjsCaptionsToShakaCaptions(transmuxedData.captions);
  491. this.textEngine_.storeAndAppendClosedCaptions(
  492. closedCaptions, startTime, endTime, videoOffset);
  493. }
  494. data = transmuxedData.data;
  495. } else if (hasClosedCaptions) {
  496. if (!this.textEngine_) {
  497. this.reinitText('text/vtt', this.sequenceMode_);
  498. }
  499. // If it is the init segment for closed captions, initialize the closed
  500. // caption parser.
  501. if (startTime == null && endTime == null) {
  502. this.captionParser_.init(data);
  503. } else {
  504. const closedCaptions = this.captionParser_.parseFrom(data);
  505. if (closedCaptions.length) {
  506. const videoOffset =
  507. this.sourceBuffers_[ContentType.VIDEO].timestampOffset;
  508. this.textEngine_.storeAndAppendClosedCaptions(
  509. closedCaptions, startTime, endTime, videoOffset);
  510. }
  511. }
  512. }
  513. data = this.workAroundBrokenPlatforms_(data, startTime, contentType);
  514. const sourceBuffer = this.sourceBuffers_[contentType];
  515. const SEQUENCE = shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  516. if (this.sequenceMode_ && sourceBuffer.mode != SEQUENCE &&
  517. startTime != null) {
  518. // This is the first media segment to be appended to a SourceBuffer in
  519. // sequence mode. We set the mode late so that we can trick MediaSource
  520. // into extracting a timestamp for us to align text segments in sequence
  521. // mode.
  522. // Timestamps can only be reliably extracted from video, not audio.
  523. // Packed audio formats do not have internal timestamps at all.
  524. // Prefer video for this when available.
  525. const isBestSourceBufferForTimestamps =
  526. contentType == ContentType.VIDEO ||
  527. !(ContentType.VIDEO in this.sourceBuffers_);
  528. if (isBestSourceBufferForTimestamps) {
  529. // Append the segment in segments mode first, with offset of 0 and an
  530. // open append window.
  531. const originalRange =
  532. [sourceBuffer.appendWindowStart, sourceBuffer.appendWindowEnd];
  533. sourceBuffer.appendWindowStart = 0;
  534. sourceBuffer.appendWindowEnd = Infinity;
  535. const originalOffset = sourceBuffer.timestampOffset;
  536. sourceBuffer.timestampOffset = 0;
  537. await this.enqueueOperation_(
  538. contentType, () => this.append_(contentType, data));
  539. // Reset the offset and append window.
  540. sourceBuffer.timestampOffset = originalOffset;
  541. sourceBuffer.appendWindowStart = originalRange[0];
  542. sourceBuffer.appendWindowEnd = originalRange[1];
  543. // Now get the timestamp of the segment and compute the offset for text
  544. // segments.
  545. const mediaStartTime = shaka.media.TimeRangesUtils.bufferStart(
  546. this.getBuffered_(contentType));
  547. const textOffset = (startTime || 0) - (mediaStartTime || 0);
  548. this.textSequenceModeOffset_.resolve(textOffset);
  549. // Finally, clear the buffer.
  550. await this.enqueueOperation_(
  551. contentType,
  552. () => this.remove_(contentType, 0, this.mediaSource_.duration));
  553. }
  554. // Now switch to sequence mode and fall through to our normal operations.
  555. sourceBuffer.mode = SEQUENCE;
  556. }
  557. if (startTime != null && this.sequenceMode_ &&
  558. contentType != ContentType.TEXT) {
  559. // In sequence mode, for non-text streams, if we just cleared the buffer
  560. // and are performing an unbuffered seek, we need to set a new
  561. // timestampOffset on the sourceBuffer.
  562. if (seeked) {
  563. const timestampOffset = /** @type {number} */ (startTime);
  564. this.enqueueOperation_(
  565. contentType,
  566. () => this.setTimestampOffset_(contentType, timestampOffset));
  567. }
  568. }
  569. await this.enqueueOperation_(
  570. contentType,
  571. () => this.append_(contentType, data));
  572. }
  573. /**
  574. * Set the selected closed captions Id and language.
  575. *
  576. * @param {string} id
  577. */
  578. setSelectedClosedCaptionId(id) {
  579. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  580. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  581. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  582. }
  583. /** Disable embedded closed captions. */
  584. clearSelectedClosedCaptionId() {
  585. if (this.textEngine_) {
  586. this.textEngine_.setSelectedClosedCaptionId('', 0);
  587. }
  588. }
  589. /**
  590. * Enqueue an operation to remove data from the SourceBuffer.
  591. *
  592. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  593. * @param {number} startTime relative to the start of the presentation
  594. * @param {number} endTime relative to the start of the presentation
  595. * @return {!Promise}
  596. */
  597. async remove(contentType, startTime, endTime) {
  598. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  599. if (contentType == ContentType.TEXT) {
  600. await this.textEngine_.remove(startTime, endTime);
  601. } else {
  602. await this.enqueueOperation_(
  603. contentType,
  604. () => this.remove_(contentType, startTime, endTime));
  605. }
  606. }
  607. /**
  608. * Enqueue an operation to clear the SourceBuffer.
  609. *
  610. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  611. * @return {!Promise}
  612. */
  613. async clear(contentType) {
  614. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  615. if (contentType == ContentType.TEXT) {
  616. if (!this.textEngine_) {
  617. return;
  618. }
  619. await this.textEngine_.remove(0, Infinity);
  620. } else {
  621. // Note that not all platforms allow clearing to Infinity.
  622. await this.enqueueOperation_(
  623. contentType,
  624. () => this.remove_(contentType, 0, this.mediaSource_.duration));
  625. }
  626. }
  627. /**
  628. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  629. */
  630. resetCaptionParser() {
  631. this.captionParser_.reset();
  632. }
  633. /**
  634. * Enqueue an operation to flush the SourceBuffer.
  635. * This is a workaround for what we believe is a Chromecast bug.
  636. *
  637. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  638. * @return {!Promise}
  639. */
  640. async flush(contentType) {
  641. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  642. // everything.
  643. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  644. if (contentType == ContentType.TEXT) {
  645. // Nothing to flush for text.
  646. return;
  647. }
  648. await this.enqueueOperation_(
  649. contentType,
  650. () => this.flush_(contentType));
  651. }
  652. /**
  653. * Sets the timestamp offset and append window end for the given content type.
  654. *
  655. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  656. * @param {number} timestampOffset The timestamp offset. Segments which start
  657. * at time t will be inserted at time t + timestampOffset instead. This
  658. * value does not affect segments which have already been inserted.
  659. * @param {number} appendWindowStart The timestamp to set the append window
  660. * start to. For future appends, frames/samples with timestamps less than
  661. * this value will be dropped.
  662. * @param {number} appendWindowEnd The timestamp to set the append window end
  663. * to. For future appends, frames/samples with timestamps greater than this
  664. * value will be dropped.
  665. * @param {boolean} sequenceMode If true, the timestampOffset will not be
  666. * applied in this step.
  667. * @return {!Promise}
  668. */
  669. async setStreamProperties(
  670. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  671. sequenceMode) {
  672. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  673. if (contentType == ContentType.TEXT) {
  674. if (!sequenceMode) {
  675. this.textEngine_.setTimestampOffset(timestampOffset);
  676. }
  677. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  678. return;
  679. }
  680. await Promise.all([
  681. // Queue an abort() to help MSE splice together overlapping segments.
  682. // We set appendWindowEnd when we change periods in DASH content, and the
  683. // period transition may result in overlap.
  684. //
  685. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  686. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  687. // timestamp offset. By calling abort(), we reset the state so we can
  688. // set it.
  689. this.enqueueOperation_(
  690. contentType,
  691. () => this.abort_(contentType)),
  692. // Don't set the timestampOffset here when in sequenceMode, since we
  693. // use timestampOffset for a different purpose in that mode (e.g. to
  694. // indicate where the current segment is).
  695. sequenceMode ? Promise.resolve() : this.enqueueOperation_(
  696. contentType,
  697. () => this.setTimestampOffset_(contentType, timestampOffset)),
  698. this.enqueueOperation_(
  699. contentType,
  700. () => this.setAppendWindow_(
  701. contentType, appendWindowStart, appendWindowEnd)),
  702. ]);
  703. }
  704. /**
  705. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  706. * @return {!Promise}
  707. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  708. */
  709. async endOfStream(reason) {
  710. await this.enqueueBlockingOperation_(() => {
  711. // If endOfStream() has already been called on the media source,
  712. // don't call it again.
  713. if (this.ended()) {
  714. return;
  715. }
  716. // Tizen won't let us pass undefined, but it will let us omit the
  717. // argument.
  718. if (reason) {
  719. this.mediaSource_.endOfStream(reason);
  720. } else {
  721. this.mediaSource_.endOfStream();
  722. }
  723. });
  724. }
  725. /**
  726. * We only support increasing duration at this time. Decreasing duration
  727. * causes the MSE removal algorithm to run, which results in an 'updateend'
  728. * event. Supporting this scenario would be complicated, and is not currently
  729. * needed.
  730. *
  731. * @param {number} duration
  732. * @return {!Promise}
  733. */
  734. async setDuration(duration) {
  735. goog.asserts.assert(
  736. isNaN(this.mediaSource_.duration) ||
  737. this.mediaSource_.duration <= duration,
  738. 'duration cannot decrease: ' + this.mediaSource_.duration + ' -> ' +
  739. duration);
  740. await this.enqueueBlockingOperation_(() => {
  741. this.mediaSource_.duration = duration;
  742. });
  743. }
  744. /**
  745. * Get the current MediaSource duration.
  746. *
  747. * @return {number}
  748. */
  749. getDuration() {
  750. return this.mediaSource_.duration;
  751. }
  752. /**
  753. * Append data to the SourceBuffer.
  754. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  755. * @param {BufferSource} data
  756. * @private
  757. */
  758. append_(contentType, data) {
  759. // This will trigger an 'updateend' event.
  760. this.sourceBuffers_[contentType].appendBuffer(data);
  761. }
  762. /**
  763. * Remove data from the SourceBuffer.
  764. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  765. * @param {number} startTime relative to the start of the presentation
  766. * @param {number} endTime relative to the start of the presentation
  767. * @private
  768. */
  769. remove_(contentType, startTime, endTime) {
  770. if (endTime <= startTime) {
  771. // Ignore removal of inverted or empty ranges.
  772. // Fake 'updateend' event to resolve the operation.
  773. this.onUpdateEnd_(contentType);
  774. return;
  775. }
  776. // This will trigger an 'updateend' event.
  777. this.sourceBuffers_[contentType].remove(startTime, endTime);
  778. }
  779. /**
  780. * Call abort() on the SourceBuffer.
  781. * This resets MSE's last_decode_timestamp on all track buffers, which should
  782. * trigger the splicing logic for overlapping segments.
  783. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  784. * @private
  785. */
  786. abort_(contentType) {
  787. // Save the append window, which is reset on abort().
  788. const appendWindowStart =
  789. this.sourceBuffers_[contentType].appendWindowStart;
  790. const appendWindowEnd = this.sourceBuffers_[contentType].appendWindowEnd;
  791. // This will not trigger an 'updateend' event, since nothing is happening.
  792. // This is only to reset MSE internals, not to abort an actual operation.
  793. this.sourceBuffers_[contentType].abort();
  794. // Restore the append window.
  795. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  796. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  797. // Fake an 'updateend' event to resolve the operation.
  798. this.onUpdateEnd_(contentType);
  799. }
  800. /**
  801. * Nudge the playhead to force the media pipeline to be flushed.
  802. * This seems to be necessary on Chromecast to get new content to replace old
  803. * content.
  804. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  805. * @private
  806. */
  807. flush_(contentType) {
  808. // Never use flush_ if there's data. It causes a hiccup in playback.
  809. goog.asserts.assert(
  810. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  811. 'only be used after clearing all data!');
  812. // Seeking forces the pipeline to be flushed.
  813. this.video_.currentTime -= 0.001;
  814. // Fake an 'updateend' event to resolve the operation.
  815. this.onUpdateEnd_(contentType);
  816. }
  817. /**
  818. * Set the SourceBuffer's timestamp offset.
  819. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  820. * @param {number} timestampOffset
  821. * @private
  822. */
  823. setTimestampOffset_(contentType, timestampOffset) {
  824. // Work around for
  825. // https://github.com/shaka-project/shaka-player/issues/1281:
  826. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  827. if (timestampOffset < 0) {
  828. // Try to prevent rounding errors in Edge from removing the first
  829. // keyframe.
  830. timestampOffset += 0.001;
  831. }
  832. this.sourceBuffers_[contentType].timestampOffset = timestampOffset;
  833. // Fake an 'updateend' event to resolve the operation.
  834. this.onUpdateEnd_(contentType);
  835. }
  836. /**
  837. * Set the SourceBuffer's append window end.
  838. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  839. * @param {number} appendWindowStart
  840. * @param {number} appendWindowEnd
  841. * @private
  842. */
  843. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  844. // You can't set start > end, so first set start to 0, then set the new
  845. // end, then set the new start. That way, there are no intermediate
  846. // states which are invalid.
  847. this.sourceBuffers_[contentType].appendWindowStart = 0;
  848. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  849. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  850. // Fake an 'updateend' event to resolve the operation.
  851. this.onUpdateEnd_(contentType);
  852. }
  853. /**
  854. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  855. * @private
  856. */
  857. onError_(contentType) {
  858. const operation = this.queues_[contentType][0];
  859. goog.asserts.assert(operation, 'Spurious error event!');
  860. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  861. 'SourceBuffer should not be updating on error!');
  862. const code = this.video_.error ? this.video_.error.code : 0;
  863. operation.p.reject(new shaka.util.Error(
  864. shaka.util.Error.Severity.CRITICAL,
  865. shaka.util.Error.Category.MEDIA,
  866. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  867. code));
  868. // Do not pop from queue. An 'updateend' event will fire next, and to
  869. // avoid synchronizing these two event handlers, we will allow that one to
  870. // pop from the queue as normal. Note that because the operation has
  871. // already been rejected, the call to resolve() in the 'updateend' handler
  872. // will have no effect.
  873. }
  874. /**
  875. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  876. * @private
  877. */
  878. onUpdateEnd_(contentType) {
  879. const operation = this.queues_[contentType][0];
  880. goog.asserts.assert(operation, 'Spurious updateend event!');
  881. if (!operation) {
  882. return;
  883. }
  884. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  885. 'SourceBuffer should not be updating on updateend!');
  886. operation.p.resolve();
  887. this.popFromQueue_(contentType);
  888. }
  889. /**
  890. * Enqueue an operation and start it if appropriate.
  891. *
  892. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  893. * @param {function()} start
  894. * @return {!Promise}
  895. * @private
  896. */
  897. enqueueOperation_(contentType, start) {
  898. this.destroyer_.ensureNotDestroyed();
  899. const operation = {
  900. start: start,
  901. p: new shaka.util.PublicPromise(),
  902. };
  903. this.queues_[contentType].push(operation);
  904. if (this.queues_[contentType].length == 1) {
  905. this.startOperation_(contentType);
  906. }
  907. return operation.p;
  908. }
  909. /**
  910. * Enqueue an operation which must block all other operations on all
  911. * SourceBuffers.
  912. *
  913. * @param {function()} run
  914. * @return {!Promise}
  915. * @private
  916. */
  917. async enqueueBlockingOperation_(run) {
  918. this.destroyer_.ensureNotDestroyed();
  919. /** @type {!Array.<!shaka.util.PublicPromise>} */
  920. const allWaiters = [];
  921. // Enqueue a 'wait' operation onto each queue.
  922. // This operation signals its readiness when it starts.
  923. // When all wait operations are ready, the real operation takes place.
  924. for (const contentType in this.sourceBuffers_) {
  925. const ready = new shaka.util.PublicPromise();
  926. const operation = {
  927. start: () => ready.resolve(),
  928. p: ready,
  929. };
  930. this.queues_[contentType].push(operation);
  931. allWaiters.push(ready);
  932. if (this.queues_[contentType].length == 1) {
  933. operation.start();
  934. }
  935. }
  936. // Return a Promise to the real operation, which waits to begin until
  937. // there are no other in-progress operations on any SourceBuffers.
  938. try {
  939. await Promise.all(allWaiters);
  940. } catch (error) {
  941. // One of the waiters failed, which means we've been destroyed.
  942. goog.asserts.assert(
  943. this.destroyer_.destroyed(), 'Should be destroyed by now');
  944. // We haven't popped from the queue. Canceled waiters have been removed
  945. // by destroy. What's left now should just be resolved waiters. In
  946. // uncompiled mode, we will maintain good hygiene and make sure the
  947. // assert at the end of destroy passes. In compiled mode, the queues
  948. // are wiped in destroy.
  949. if (goog.DEBUG) {
  950. for (const contentType in this.sourceBuffers_) {
  951. if (this.queues_[contentType].length) {
  952. goog.asserts.assert(
  953. this.queues_[contentType].length == 1,
  954. 'Should be at most one item in queue!');
  955. goog.asserts.assert(
  956. allWaiters.includes(this.queues_[contentType][0].p),
  957. 'The item in queue should be one of our waiters!');
  958. this.queues_[contentType].shift();
  959. }
  960. }
  961. }
  962. throw error;
  963. }
  964. if (goog.DEBUG) {
  965. // If we did it correctly, nothing is updating.
  966. for (const contentType in this.sourceBuffers_) {
  967. goog.asserts.assert(
  968. this.sourceBuffers_[contentType].updating == false,
  969. 'SourceBuffers should not be updating after a blocking op!');
  970. }
  971. }
  972. // Run the real operation, which is synchronous.
  973. try {
  974. run();
  975. } catch (exception) {
  976. throw new shaka.util.Error(
  977. shaka.util.Error.Severity.CRITICAL,
  978. shaka.util.Error.Category.MEDIA,
  979. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  980. exception);
  981. } finally {
  982. // Unblock the queues.
  983. for (const contentType in this.sourceBuffers_) {
  984. this.popFromQueue_(contentType);
  985. }
  986. }
  987. }
  988. /**
  989. * Pop from the front of the queue and start a new operation.
  990. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  991. * @private
  992. */
  993. popFromQueue_(contentType) {
  994. // Remove the in-progress operation, which is now complete.
  995. this.queues_[contentType].shift();
  996. this.startOperation_(contentType);
  997. }
  998. /**
  999. * Starts the next operation in the queue.
  1000. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1001. * @private
  1002. */
  1003. startOperation_(contentType) {
  1004. // Retrieve the next operation, if any, from the queue and start it.
  1005. const next = this.queues_[contentType][0];
  1006. if (next) {
  1007. try {
  1008. next.start();
  1009. } catch (exception) {
  1010. if (exception.name == 'QuotaExceededError') {
  1011. next.p.reject(new shaka.util.Error(
  1012. shaka.util.Error.Severity.CRITICAL,
  1013. shaka.util.Error.Category.MEDIA,
  1014. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1015. contentType));
  1016. } else {
  1017. next.p.reject(new shaka.util.Error(
  1018. shaka.util.Error.Severity.CRITICAL,
  1019. shaka.util.Error.Category.MEDIA,
  1020. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1021. exception));
  1022. }
  1023. this.popFromQueue_(contentType);
  1024. }
  1025. }
  1026. }
  1027. /**
  1028. * @return {!shaka.extern.TextDisplayer}
  1029. */
  1030. getTextDisplayer() {
  1031. goog.asserts.assert(
  1032. this.textDisplayer_,
  1033. 'TextDisplayer should only be null when this is destroyed');
  1034. return this.textDisplayer_;
  1035. }
  1036. /**
  1037. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1038. */
  1039. setTextDisplayer(textDisplayer) {
  1040. const oldTextDisplayer = this.textDisplayer_;
  1041. this.textDisplayer_ = textDisplayer;
  1042. if (oldTextDisplayer) {
  1043. textDisplayer.setTextVisibility(oldTextDisplayer.isTextVisible());
  1044. oldTextDisplayer.destroy();
  1045. }
  1046. if (this.textEngine_) {
  1047. this.textEngine_.setDisplayer(textDisplayer);
  1048. }
  1049. }
  1050. /**
  1051. * @param {boolean} segmentRelativeVttTiming
  1052. */
  1053. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1054. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1055. }
  1056. /**
  1057. * Apply platform-specific transformations to this segment to work around
  1058. * issues in the platform.
  1059. *
  1060. * @param {!BufferSource} segment
  1061. * @param {?number} startTime
  1062. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1063. * @return {!BufferSource}
  1064. * @private
  1065. */
  1066. workAroundBrokenPlatforms_(segment, startTime, contentType) {
  1067. const isInitSegment = startTime == null;
  1068. const encryptionExpected = this.expectedEncryption_[contentType];
  1069. // If:
  1070. // 1. this is an init segment,
  1071. // 2. and encryption is expected,
  1072. // 3. and the platform requires encryption in all init segments,
  1073. // 4. and the content is MP4 (mimeType == "video/mp4" or "audio/mp4"),
  1074. // then insert fake encryption metadata for init segments that lack it.
  1075. // The MP4 requirement is because we can currently only do this
  1076. // transformation on MP4 containers.
  1077. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1078. if (isInitSegment &&
  1079. encryptionExpected &&
  1080. shaka.util.Platform.requiresEncryptionInfoInAllInitSegments() &&
  1081. shaka.util.MimeUtils.getContainerType(
  1082. this.sourceBufferTypes_[contentType]) == 'mp4') {
  1083. shaka.log.debug('Forcing fake encryption information in init segment.');
  1084. segment = shaka.media.ContentWorkarounds.fakeEncryption(segment);
  1085. }
  1086. return segment;
  1087. }
  1088. };
  1089. /**
  1090. * Internal reference to window.URL.createObjectURL function to avoid
  1091. * compatibility issues with other libraries and frameworks such as React
  1092. * Native. For use in unit tests only, not meant for external use.
  1093. *
  1094. * @type {function(?):string}
  1095. */
  1096. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  1097. /**
  1098. * @typedef {{
  1099. * start: function(),
  1100. * p: !shaka.util.PublicPromise
  1101. * }}
  1102. *
  1103. * @summary An operation in queue.
  1104. * @property {function()} start
  1105. * The function which starts the operation.
  1106. * @property {!shaka.util.PublicPromise} p
  1107. * The PublicPromise which is associated with this operation.
  1108. */
  1109. shaka.media.MediaSourceEngine.Operation;
  1110. /**
  1111. * @enum {string}
  1112. * @private
  1113. */
  1114. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  1115. SEQUENCE: 'sequence',
  1116. SEGMENTS: 'segments',
  1117. };
  1118. /**
  1119. * MIME types of raw formats.
  1120. *
  1121. * @const {!Array.<string>}
  1122. */
  1123. shaka.media.MediaSourceEngine.RAW_FORMATS = [
  1124. 'audio/aac',
  1125. 'audio/ac3',
  1126. 'audio/ec3',
  1127. 'audio/mpeg',
  1128. ];