Home Reference Source

src/controller/buffer-controller.ts

  1. import { Events } from '../events';
  2. import { logger } from '../utils/logger';
  3. import { ErrorDetails, ErrorTypes } from '../errors';
  4. import { BufferHelper } from '../utils/buffer-helper';
  5. import { getMediaSource } from '../utils/mediasource-helper';
  6. import { ElementaryStreamTypes } from '../loader/fragment';
  7. import type { TrackSet } from '../types/track';
  8. import BufferOperationQueue from './buffer-operation-queue';
  9. import {
  10. BufferOperation,
  11. SourceBuffers,
  12. SourceBufferName,
  13. SourceBufferListeners,
  14. } from '../types/buffer';
  15. import type {
  16. LevelUpdatedData,
  17. BufferAppendingData,
  18. MediaAttachingData,
  19. ManifestParsedData,
  20. BufferCodecsData,
  21. BufferEOSData,
  22. BufferFlushingData,
  23. FragParsedData,
  24. FragChangedData,
  25. } from '../types/events';
  26. import type { ComponentAPI } from '../types/component-api';
  27. import type { ChunkMetadata } from '../types/transmuxer';
  28. import type Hls from '../hls';
  29. import type { LevelDetails } from '../loader/level-details';
  30.  
  31. const MediaSource = getMediaSource();
  32. const VIDEO_CODEC_PROFILE_REPACE = /([ha]vc.)(?:\.[^.,]+)+/;
  33.  
  34. export default class BufferController implements ComponentAPI {
  35. // The level details used to determine duration, target-duration and live
  36. private details: LevelDetails | null = null;
  37. // cache the self generated object url to detect hijack of video tag
  38. private _objectUrl: string | null = null;
  39. // A queue of buffer operations which require the SourceBuffer to not be updating upon execution
  40. private operationQueue!: BufferOperationQueue;
  41. // References to event listeners for each SourceBuffer, so that they can be referenced for event removal
  42. private listeners!: SourceBufferListeners;
  43.  
  44. private hls: Hls;
  45.  
  46. // The number of BUFFER_CODEC events received before any sourceBuffers are created
  47. public bufferCodecEventsExpected: number = 0;
  48.  
  49. // The total number of BUFFER_CODEC events received
  50. private _bufferCodecEventsTotal: number = 0;
  51.  
  52. // A reference to the attached media element
  53. public media: HTMLMediaElement | null = null;
  54.  
  55. // A reference to the active media source
  56. public mediaSource: MediaSource | null = null;
  57.  
  58. // Last MP3 audio chunk appended
  59. private lastMpegAudioChunk: ChunkMetadata | null = null;
  60.  
  61. // counters
  62. public appendError: number = 0;
  63.  
  64. public tracks: TrackSet = {};
  65. public pendingTracks: TrackSet = {};
  66. public sourceBuffer!: SourceBuffers;
  67.  
  68. constructor(hls: Hls) {
  69. this.hls = hls;
  70. this._initSourceBuffer();
  71. this.registerListeners();
  72. }
  73.  
  74. public hasSourceTypes(): boolean {
  75. return (
  76. this.getSourceBufferTypes().length > 0 ||
  77. Object.keys(this.pendingTracks).length > 0
  78. );
  79. }
  80.  
  81. public destroy() {
  82. this.unregisterListeners();
  83. this.details = null;
  84. this.lastMpegAudioChunk = null;
  85. }
  86.  
  87. protected registerListeners() {
  88. const { hls } = this;
  89. hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  90. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  91. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  92. hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
  93. hls.on(Events.BUFFER_APPENDING, this.onBufferAppending, this);
  94. hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this);
  95. hls.on(Events.BUFFER_EOS, this.onBufferEos, this);
  96. hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
  97. hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  98. hls.on(Events.FRAG_PARSED, this.onFragParsed, this);
  99. hls.on(Events.FRAG_CHANGED, this.onFragChanged, this);
  100. }
  101.  
  102. protected unregisterListeners() {
  103. const { hls } = this;
  104. hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  105. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  106. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  107. hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
  108. hls.off(Events.BUFFER_APPENDING, this.onBufferAppending, this);
  109. hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this);
  110. hls.off(Events.BUFFER_EOS, this.onBufferEos, this);
  111. hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
  112. hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  113. hls.off(Events.FRAG_PARSED, this.onFragParsed, this);
  114. hls.off(Events.FRAG_CHANGED, this.onFragChanged, this);
  115. }
  116.  
  117. private _initSourceBuffer() {
  118. this.sourceBuffer = {};
  119. this.operationQueue = new BufferOperationQueue(this.sourceBuffer);
  120. this.listeners = {
  121. audio: [],
  122. video: [],
  123. audiovideo: [],
  124. };
  125. this.lastMpegAudioChunk = null;
  126. }
  127.  
  128. protected onManifestParsed(
  129. event: Events.MANIFEST_PARSED,
  130. data: ManifestParsedData
  131. ) {
  132. // in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
  133. // sourcebuffers will be created all at once when the expected nb of tracks will be reached
  134. // in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
  135. // it will contain the expected nb of source buffers, no need to compute it
  136. let codecEvents: number = 2;
  137. if ((data.audio && !data.video) || !data.altAudio) {
  138. codecEvents = 1;
  139. }
  140. this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = codecEvents;
  141. this.details = null;
  142. logger.log(
  143. `${this.bufferCodecEventsExpected} bufferCodec event(s) expected`
  144. );
  145. }
  146.  
  147. protected onMediaAttaching(
  148. event: Events.MEDIA_ATTACHING,
  149. data: MediaAttachingData
  150. ) {
  151. const media = (this.media = data.media);
  152. if (media && MediaSource) {
  153. const ms = (this.mediaSource = new MediaSource());
  154. // MediaSource listeners are arrow functions with a lexical scope, and do not need to be bound
  155. ms.addEventListener('sourceopen', this._onMediaSourceOpen);
  156. ms.addEventListener('sourceended', this._onMediaSourceEnded);
  157. ms.addEventListener('sourceclose', this._onMediaSourceClose);
  158. // link video and media Source
  159. media.src = self.URL.createObjectURL(ms);
  160. // cache the locally generated object url
  161. this._objectUrl = media.src;
  162. media.addEventListener('emptied', this._onMediaEmptied);
  163. }
  164. }
  165.  
  166. protected onMediaDetaching() {
  167. const { media, mediaSource, _objectUrl } = this;
  168. if (mediaSource) {
  169. logger.log('[buffer-controller]: media source detaching');
  170. if (mediaSource.readyState === 'open') {
  171. try {
  172. // endOfStream could trigger exception if any sourcebuffer is in updating state
  173. // we don't really care about checking sourcebuffer state here,
  174. // as we are anyway detaching the MediaSource
  175. // let's just avoid this exception to propagate
  176. mediaSource.endOfStream();
  177. } catch (err) {
  178. logger.warn(
  179. `[buffer-controller]: onMediaDetaching: ${err.message} while calling endOfStream`
  180. );
  181. }
  182. }
  183. // Clean up the SourceBuffers by invoking onBufferReset
  184. this.onBufferReset();
  185. mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
  186. mediaSource.removeEventListener('sourceended', this._onMediaSourceEnded);
  187. mediaSource.removeEventListener('sourceclose', this._onMediaSourceClose);
  188.  
  189. // Detach properly the MediaSource from the HTMLMediaElement as
  190. // suggested in https://github.com/w3c/media-source/issues/53.
  191. if (media) {
  192. media.removeEventListener('emptied', this._onMediaEmptied);
  193. if (_objectUrl) {
  194. self.URL.revokeObjectURL(_objectUrl);
  195. }
  196.  
  197. // clean up video tag src only if it's our own url. some external libraries might
  198. // hijack the video tag and change its 'src' without destroying the Hls instance first
  199. if (media.src === _objectUrl) {
  200. media.removeAttribute('src');
  201. media.load();
  202. } else {
  203. logger.warn(
  204. '[buffer-controller]: media.src was changed by a third party - skip cleanup'
  205. );
  206. }
  207. }
  208.  
  209. this.mediaSource = null;
  210. this.media = null;
  211. this._objectUrl = null;
  212. this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
  213. this.pendingTracks = {};
  214. this.tracks = {};
  215. }
  216.  
  217. this.hls.trigger(Events.MEDIA_DETACHED, undefined);
  218. }
  219.  
  220. protected onBufferReset() {
  221. this.getSourceBufferTypes().forEach((type) => {
  222. const sb = this.sourceBuffer[type];
  223. try {
  224. if (sb) {
  225. this.removeBufferListeners(type);
  226. if (this.mediaSource) {
  227. this.mediaSource.removeSourceBuffer(sb);
  228. }
  229. // Synchronously remove the SB from the map before the next call in order to prevent an async function from
  230. // accessing it
  231. this.sourceBuffer[type] = undefined;
  232. }
  233. } catch (err) {
  234. logger.warn(
  235. `[buffer-controller]: Failed to reset the ${type} buffer`,
  236. err
  237. );
  238. }
  239. });
  240. this._initSourceBuffer();
  241. }
  242.  
  243. protected onBufferCodecs(
  244. event: Events.BUFFER_CODECS,
  245. data: BufferCodecsData
  246. ) {
  247. const sourceBufferCount = this.getSourceBufferTypes().length;
  248.  
  249. Object.keys(data).forEach((trackName) => {
  250. if (sourceBufferCount) {
  251. // check if SourceBuffer codec needs to change
  252. const track = this.tracks[trackName];
  253. if (track && typeof track.buffer.changeType === 'function') {
  254. const { id, codec, levelCodec, container, metadata } =
  255. data[trackName];
  256. const currentCodec = (track.levelCodec || track.codec).replace(
  257. VIDEO_CODEC_PROFILE_REPACE,
  258. '$1'
  259. );
  260. const nextCodec = (levelCodec || codec).replace(
  261. VIDEO_CODEC_PROFILE_REPACE,
  262. '$1'
  263. );
  264. if (currentCodec !== nextCodec) {
  265. const mimeType = `${container};codecs=${levelCodec || codec}`;
  266. this.appendChangeType(trackName, mimeType);
  267. logger.log(
  268. `[buffer-controller]: switching codec ${currentCodec} to ${nextCodec}`
  269. );
  270. this.tracks[trackName] = {
  271. buffer: track.buffer,
  272. codec,
  273. container,
  274. levelCodec,
  275. metadata,
  276. id,
  277. };
  278. }
  279. }
  280. } else {
  281. // if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
  282. this.pendingTracks[trackName] = data[trackName];
  283. }
  284. });
  285.  
  286. // if sourcebuffers already created, do nothing ...
  287. if (sourceBufferCount) {
  288. return;
  289. }
  290.  
  291. this.bufferCodecEventsExpected = Math.max(
  292. this.bufferCodecEventsExpected - 1,
  293. 0
  294. );
  295. if (this.mediaSource && this.mediaSource.readyState === 'open') {
  296. this.checkPendingTracks();
  297. }
  298. }
  299.  
  300. protected appendChangeType(type, mimeType) {
  301. const { operationQueue } = this;
  302. const operation: BufferOperation = {
  303. execute: () => {
  304. const sb = this.sourceBuffer[type];
  305. if (sb) {
  306. logger.log(
  307. `[buffer-controller]: changing ${type} sourceBuffer type to ${mimeType}`
  308. );
  309. sb.changeType(mimeType);
  310. }
  311. operationQueue.shiftAndExecuteNext(type);
  312. },
  313. onStart: () => {},
  314. onComplete: () => {},
  315. onError: (e) => {
  316. logger.warn(
  317. `[buffer-controller]: Failed to change ${type} SourceBuffer type`,
  318. e
  319. );
  320. },
  321. };
  322.  
  323. operationQueue.append(operation, type);
  324. }
  325.  
  326. protected onBufferAppending(
  327. event: Events.BUFFER_APPENDING,
  328. eventData: BufferAppendingData
  329. ) {
  330. const { hls, operationQueue, tracks } = this;
  331. const { data, type, frag, part, chunkMeta } = eventData;
  332. const chunkStats = chunkMeta.buffering[type];
  333.  
  334. const bufferAppendingStart = self.performance.now();
  335. chunkStats.start = bufferAppendingStart;
  336. const fragBuffering = frag.stats.buffering;
  337. const partBuffering = part ? part.stats.buffering : null;
  338. if (fragBuffering.start === 0) {
  339. fragBuffering.start = bufferAppendingStart;
  340. }
  341. if (partBuffering && partBuffering.start === 0) {
  342. partBuffering.start = bufferAppendingStart;
  343. }
  344.  
  345. // TODO: Only update timestampOffset when audio/mpeg fragment or part is not contiguous with previously appended
  346. // Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
  347. // in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
  348. // is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos).
  349. // More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
  350. const audioTrack = tracks.audio;
  351. let checkTimestampOffset = false;
  352. if (type === 'audio' && audioTrack?.container === 'audio/mpeg') {
  353. checkTimestampOffset =
  354. !this.lastMpegAudioChunk ||
  355. chunkMeta.id === 1 ||
  356. this.lastMpegAudioChunk.sn !== chunkMeta.sn;
  357. this.lastMpegAudioChunk = chunkMeta;
  358. }
  359.  
  360. const fragStart = frag.start;
  361. const operation: BufferOperation = {
  362. execute: () => {
  363. chunkStats.executeStart = self.performance.now();
  364. if (checkTimestampOffset) {
  365. const sb = this.sourceBuffer[type];
  366. if (sb) {
  367. const delta = fragStart - sb.timestampOffset;
  368. if (Math.abs(delta) >= 0.1) {
  369. logger.log(
  370. `[buffer-controller]: Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`
  371. );
  372. sb.timestampOffset = fragStart;
  373. }
  374. }
  375. }
  376. this.appendExecutor(data, type);
  377. },
  378. onStart: () => {
  379. // logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
  380. },
  381. onComplete: () => {
  382. // logger.debug(`[buffer-controller]: ${type} SourceBuffer updateend`);
  383. const end = self.performance.now();
  384. chunkStats.executeEnd = chunkStats.end = end;
  385. if (fragBuffering.first === 0) {
  386. fragBuffering.first = end;
  387. }
  388. if (partBuffering && partBuffering.first === 0) {
  389. partBuffering.first = end;
  390. }
  391.  
  392. const { sourceBuffer } = this;
  393. const timeRanges = {};
  394. for (const type in sourceBuffer) {
  395. timeRanges[type] = BufferHelper.getBuffered(sourceBuffer[type]);
  396. }
  397. this.appendError = 0;
  398. this.hls.trigger(Events.BUFFER_APPENDED, {
  399. type,
  400. frag,
  401. part,
  402. chunkMeta,
  403. parent: frag.type,
  404. timeRanges,
  405. });
  406. },
  407. onError: (err) => {
  408. // in case any error occured while appending, put back segment in segments table
  409. logger.error(
  410. `[buffer-controller]: Error encountered while trying to append to the ${type} SourceBuffer`,
  411. err
  412. );
  413. const event = {
  414. type: ErrorTypes.MEDIA_ERROR,
  415. parent: frag.type,
  416. details: ErrorDetails.BUFFER_APPEND_ERROR,
  417. err,
  418. fatal: false,
  419. };
  420.  
  421. if (err.code === DOMException.QUOTA_EXCEEDED_ERR) {
  422. // QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
  423. // let's stop appending any segments, and report BUFFER_FULL_ERROR error
  424. event.details = ErrorDetails.BUFFER_FULL_ERROR;
  425. } else {
  426. this.appendError++;
  427. event.details = ErrorDetails.BUFFER_APPEND_ERROR;
  428. /* with UHD content, we could get loop of quota exceeded error until
  429. browser is able to evict some data from sourcebuffer. Retrying can help recover.
  430. */
  431. if (this.appendError > hls.config.appendErrorMaxRetry) {
  432. logger.error(
  433. `[buffer-controller]: Failed ${hls.config.appendErrorMaxRetry} times to append segment in sourceBuffer`
  434. );
  435. event.fatal = true;
  436. hls.stopLoad();
  437. }
  438. }
  439. hls.trigger(Events.ERROR, event);
  440. },
  441. };
  442. operationQueue.append(operation, type);
  443. }
  444.  
  445. protected onBufferFlushing(
  446. event: Events.BUFFER_FLUSHING,
  447. data: BufferFlushingData
  448. ) {
  449. const { operationQueue } = this;
  450. const flushOperation = (type: SourceBufferName): BufferOperation => ({
  451. execute: this.removeExecutor.bind(
  452. this,
  453. type,
  454. data.startOffset,
  455. data.endOffset
  456. ),
  457. onStart: () => {
  458. // logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
  459. },
  460. onComplete: () => {
  461. // logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
  462. this.hls.trigger(Events.BUFFER_FLUSHED, { type });
  463. },
  464. onError: (e) => {
  465. logger.warn(
  466. `[buffer-controller]: Failed to remove from ${type} SourceBuffer`,
  467. e
  468. );
  469. },
  470. });
  471.  
  472. if (data.type) {
  473. operationQueue.append(flushOperation(data.type), data.type);
  474. } else {
  475. this.getSourceBufferTypes().forEach((type: SourceBufferName) => {
  476. operationQueue.append(flushOperation(type), type);
  477. });
  478. }
  479. }
  480.  
  481. protected onFragParsed(event: Events.FRAG_PARSED, data: FragParsedData) {
  482. const { frag, part } = data;
  483. const buffersAppendedTo: Array<SourceBufferName> = [];
  484. const elementaryStreams = part
  485. ? part.elementaryStreams
  486. : frag.elementaryStreams;
  487. if (elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO]) {
  488. buffersAppendedTo.push('audiovideo');
  489. } else {
  490. if (elementaryStreams[ElementaryStreamTypes.AUDIO]) {
  491. buffersAppendedTo.push('audio');
  492. }
  493. if (elementaryStreams[ElementaryStreamTypes.VIDEO]) {
  494. buffersAppendedTo.push('video');
  495. }
  496. }
  497.  
  498. const onUnblocked = () => {
  499. const now = self.performance.now();
  500. frag.stats.buffering.end = now;
  501. if (part) {
  502. part.stats.buffering.end = now;
  503. }
  504. const stats = part ? part.stats : frag.stats;
  505. this.hls.trigger(Events.FRAG_BUFFERED, {
  506. frag,
  507. part,
  508. stats,
  509. id: frag.type,
  510. });
  511. };
  512.  
  513. if (buffersAppendedTo.length === 0) {
  514. logger.warn(
  515. `Fragments must have at least one ElementaryStreamType set. type: ${frag.type} level: ${frag.level} sn: ${frag.sn}`
  516. );
  517. }
  518.  
  519. this.blockBuffers(onUnblocked, buffersAppendedTo);
  520. }
  521.  
  522. private onFragChanged(event: Events.FRAG_CHANGED, data: FragChangedData) {
  523. this.flushBackBuffer();
  524. }
  525.  
  526. // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
  527. // an undefined data.type will mark all buffers as EOS.
  528. protected onBufferEos(event: Events.BUFFER_EOS, data: BufferEOSData) {
  529. const ended = this.getSourceBufferTypes().reduce((acc, type) => {
  530. const sb = this.sourceBuffer[type];
  531. if (sb && (!data.type || data.type === type)) {
  532. sb.ending = true;
  533. if (!sb.ended) {
  534. sb.ended = true;
  535. logger.log(`[buffer-controller]: ${type} sourceBuffer now EOS`);
  536. }
  537. }
  538. return acc && !!(!sb || sb.ended);
  539. }, true);
  540.  
  541. if (ended) {
  542. logger.log(`[buffer-controller]: Queueing mediaSource.endOfStream()`);
  543. this.blockBuffers(() => {
  544. this.getSourceBufferTypes().forEach((type) => {
  545. const sb = this.sourceBuffer[type];
  546. if (sb) {
  547. sb.ending = false;
  548. }
  549. });
  550. const { mediaSource } = this;
  551. if (!mediaSource || mediaSource.readyState !== 'open') {
  552. if (mediaSource) {
  553. logger.info(
  554. `[buffer-controller]: Could not call mediaSource.endOfStream(). mediaSource.readyState: ${mediaSource.readyState}`
  555. );
  556. }
  557. return;
  558. }
  559. logger.log(`[buffer-controller]: Calling mediaSource.endOfStream()`);
  560. // Allow this to throw and be caught by the enqueueing function
  561. mediaSource.endOfStream();
  562. });
  563. }
  564. }
  565.  
  566. protected onLevelUpdated(
  567. event: Events.LEVEL_UPDATED,
  568. { details }: LevelUpdatedData
  569. ) {
  570. if (!details.fragments.length) {
  571. return;
  572. }
  573. this.details = details;
  574.  
  575. if (this.getSourceBufferTypes().length) {
  576. this.blockBuffers(this.updateMediaElementDuration.bind(this));
  577. } else {
  578. this.updateMediaElementDuration();
  579. }
  580. }
  581.  
  582. flushBackBuffer() {
  583. const { hls, details, media, sourceBuffer } = this;
  584. if (!media || details === null) {
  585. return;
  586. }
  587.  
  588. const sourceBufferTypes = this.getSourceBufferTypes();
  589. if (!sourceBufferTypes.length) {
  590. return;
  591. }
  592.  
  593. // Support for deprecated liveBackBufferLength
  594. const backBufferLength =
  595. details.live && hls.config.liveBackBufferLength !== null
  596. ? hls.config.liveBackBufferLength
  597. : hls.config.backBufferLength;
  598.  
  599. if (!Number.isFinite(backBufferLength) || backBufferLength < 0) {
  600. return;
  601. }
  602.  
  603. const currentTime = media.currentTime;
  604. const targetDuration = details.levelTargetDuration;
  605. const maxBackBufferLength = Math.max(backBufferLength, targetDuration);
  606. const targetBackBufferPosition =
  607. Math.floor(currentTime / targetDuration) * targetDuration -
  608. maxBackBufferLength;
  609. sourceBufferTypes.forEach((type: SourceBufferName) => {
  610. const sb = sourceBuffer[type];
  611. if (sb) {
  612. const buffered = BufferHelper.getBuffered(sb);
  613. // when target buffer start exceeds actual buffer start
  614. if (
  615. buffered.length > 0 &&
  616. targetBackBufferPosition > buffered.start(0)
  617. ) {
  618. hls.trigger(Events.BACK_BUFFER_REACHED, {
  619. bufferEnd: targetBackBufferPosition,
  620. });
  621.  
  622. // Support for deprecated event:
  623. if (details.live) {
  624. hls.trigger(Events.LIVE_BACK_BUFFER_REACHED, {
  625. bufferEnd: targetBackBufferPosition,
  626. });
  627. } else if (
  628. sb.ended &&
  629. buffered.end(buffered.length - 1) - currentTime < targetDuration * 2
  630. ) {
  631. logger.info(
  632. `[buffer-controller]: Cannot flush ${type} back buffer while SourceBuffer is in ended state`
  633. );
  634. return;
  635. }
  636.  
  637. hls.trigger(Events.BUFFER_FLUSHING, {
  638. startOffset: 0,
  639. endOffset: targetBackBufferPosition,
  640. type,
  641. });
  642. }
  643. }
  644. });
  645. }
  646.  
  647. /**
  648. * Update Media Source duration to current level duration or override to Infinity if configuration parameter
  649. * 'liveDurationInfinity` is set to `true`
  650. * More details: https://github.com/video-dev/hls.js/issues/355
  651. */
  652. private updateMediaElementDuration() {
  653. if (
  654. !this.details ||
  655. !this.media ||
  656. !this.mediaSource ||
  657. this.mediaSource.readyState !== 'open'
  658. ) {
  659. return;
  660. }
  661. const { details, hls, media, mediaSource } = this;
  662. const levelDuration = details.fragments[0].start + details.totalduration;
  663. const mediaDuration = media.duration;
  664. const msDuration = Number.isFinite(mediaSource.duration)
  665. ? mediaSource.duration
  666. : 0;
  667.  
  668. if (details.live && hls.config.liveDurationInfinity) {
  669. // Override duration to Infinity
  670. logger.log(
  671. '[buffer-controller]: Media Source duration is set to Infinity'
  672. );
  673. mediaSource.duration = Infinity;
  674. this.updateSeekableRange(details);
  675. } else if (
  676. (levelDuration > msDuration && levelDuration > mediaDuration) ||
  677. !Number.isFinite(mediaDuration)
  678. ) {
  679. // levelDuration was the last value we set.
  680. // not using mediaSource.duration as the browser may tweak this value
  681. // only update Media Source duration if its value increase, this is to avoid
  682. // flushing already buffered portion when switching between quality level
  683. logger.log(
  684. `[buffer-controller]: Updating Media Source duration to ${levelDuration.toFixed(
  685. 3
  686. )}`
  687. );
  688. mediaSource.duration = levelDuration;
  689. }
  690. }
  691.  
  692. updateSeekableRange(levelDetails) {
  693. const mediaSource = this.mediaSource;
  694. const fragments = levelDetails.fragments;
  695. const len = fragments.length;
  696. if (len && levelDetails.live && mediaSource?.setLiveSeekableRange) {
  697. const start = Math.max(0, fragments[0].start);
  698. const end = Math.max(start, start + levelDetails.totalduration);
  699. mediaSource.setLiveSeekableRange(start, end);
  700. }
  701. }
  702.  
  703. protected checkPendingTracks() {
  704. const { bufferCodecEventsExpected, operationQueue, pendingTracks } = this;
  705.  
  706. // Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
  707. // This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
  708. // data has been appended to existing ones.
  709. // 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
  710. const pendingTracksCount = Object.keys(pendingTracks).length;
  711. if (
  712. (pendingTracksCount && !bufferCodecEventsExpected) ||
  713. pendingTracksCount === 2
  714. ) {
  715. // ok, let's create them now !
  716. this.createSourceBuffers(pendingTracks);
  717. this.pendingTracks = {};
  718. // append any pending segments now !
  719. const buffers = this.getSourceBufferTypes();
  720. if (buffers.length === 0) {
  721. this.hls.trigger(Events.ERROR, {
  722. type: ErrorTypes.MEDIA_ERROR,
  723. details: ErrorDetails.BUFFER_INCOMPATIBLE_CODECS_ERROR,
  724. fatal: true,
  725. reason: 'could not create source buffer for media codec(s)',
  726. });
  727. return;
  728. }
  729. buffers.forEach((type: SourceBufferName) => {
  730. operationQueue.executeNext(type);
  731. });
  732. }
  733. }
  734.  
  735. protected createSourceBuffers(tracks: TrackSet) {
  736. const { sourceBuffer, mediaSource } = this;
  737. if (!mediaSource) {
  738. throw Error('createSourceBuffers called when mediaSource was null');
  739. }
  740. let tracksCreated = 0;
  741. for (const trackName in tracks) {
  742. if (!sourceBuffer[trackName]) {
  743. const track = tracks[trackName as keyof TrackSet];
  744. if (!track) {
  745. throw Error(
  746. `source buffer exists for track ${trackName}, however track does not`
  747. );
  748. }
  749. // use levelCodec as first priority
  750. const codec = track.levelCodec || track.codec;
  751. const mimeType = `${track.container};codecs=${codec}`;
  752. logger.log(`[buffer-controller]: creating sourceBuffer(${mimeType})`);
  753. try {
  754. const sb = (sourceBuffer[trackName] =
  755. mediaSource.addSourceBuffer(mimeType));
  756. const sbName = trackName as SourceBufferName;
  757. this.addBufferListener(sbName, 'updatestart', this._onSBUpdateStart);
  758. this.addBufferListener(sbName, 'updateend', this._onSBUpdateEnd);
  759. this.addBufferListener(sbName, 'error', this._onSBUpdateError);
  760. this.tracks[trackName] = {
  761. buffer: sb,
  762. codec: codec,
  763. container: track.container,
  764. levelCodec: track.levelCodec,
  765. metadata: track.metadata,
  766. id: track.id,
  767. };
  768. tracksCreated++;
  769. } catch (err) {
  770. logger.error(
  771. `[buffer-controller]: error while trying to add sourceBuffer: ${err.message}`
  772. );
  773. this.hls.trigger(Events.ERROR, {
  774. type: ErrorTypes.MEDIA_ERROR,
  775. details: ErrorDetails.BUFFER_ADD_CODEC_ERROR,
  776. fatal: false,
  777. error: err,
  778. mimeType: mimeType,
  779. });
  780. }
  781. }
  782. }
  783. if (tracksCreated) {
  784. this.hls.trigger(Events.BUFFER_CREATED, { tracks: this.tracks });
  785. }
  786. }
  787.  
  788. // Keep as arrow functions so that we can directly reference these functions directly as event listeners
  789. private _onMediaSourceOpen = () => {
  790. const { media, mediaSource } = this;
  791. logger.log('[buffer-controller]: Media source opened');
  792. if (media) {
  793. media.removeEventListener('emptied', this._onMediaEmptied);
  794. this.updateMediaElementDuration();
  795. this.hls.trigger(Events.MEDIA_ATTACHED, { media });
  796. }
  797.  
  798. if (mediaSource) {
  799. // once received, don't listen anymore to sourceopen event
  800. mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
  801. }
  802. this.checkPendingTracks();
  803. };
  804.  
  805. private _onMediaSourceClose = () => {
  806. logger.log('[buffer-controller]: Media source closed');
  807. };
  808.  
  809. private _onMediaSourceEnded = () => {
  810. logger.log('[buffer-controller]: Media source ended');
  811. };
  812.  
  813. private _onMediaEmptied = () => {
  814. const { media, _objectUrl } = this;
  815. if (media && media.src !== _objectUrl) {
  816. logger.error(
  817. `Media element src was set while attaching MediaSource (${_objectUrl} > ${media.src})`
  818. );
  819. }
  820. };
  821.  
  822. private _onSBUpdateStart(type: SourceBufferName) {
  823. const { operationQueue } = this;
  824. const operation = operationQueue.current(type);
  825. operation.onStart();
  826. }
  827.  
  828. private _onSBUpdateEnd(type: SourceBufferName) {
  829. const { operationQueue } = this;
  830. const operation = operationQueue.current(type);
  831. operation.onComplete();
  832. operationQueue.shiftAndExecuteNext(type);
  833. }
  834.  
  835. private _onSBUpdateError(type: SourceBufferName, event: Event) {
  836. logger.error(`[buffer-controller]: ${type} SourceBuffer error`, event);
  837. // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
  838. // SourceBuffer errors are not necessarily fatal; if so, the HTMLMediaElement will fire an error event
  839. this.hls.trigger(Events.ERROR, {
  840. type: ErrorTypes.MEDIA_ERROR,
  841. details: ErrorDetails.BUFFER_APPENDING_ERROR,
  842. fatal: false,
  843. });
  844. // updateend is always fired after error, so we'll allow that to shift the current operation off of the queue
  845. const operation = this.operationQueue.current(type);
  846. if (operation) {
  847. operation.onError(event);
  848. }
  849. }
  850.  
  851. // This method must result in an updateend event; if remove is not called, _onSBUpdateEnd must be called manually
  852. private removeExecutor(
  853. type: SourceBufferName,
  854. startOffset: number,
  855. endOffset: number
  856. ) {
  857. const { media, mediaSource, operationQueue, sourceBuffer } = this;
  858. const sb = sourceBuffer[type];
  859. if (!media || !mediaSource || !sb) {
  860. logger.warn(
  861. `[buffer-controller]: Attempting to remove from the ${type} SourceBuffer, but it does not exist`
  862. );
  863. operationQueue.shiftAndExecuteNext(type);
  864. return;
  865. }
  866. const mediaDuration = Number.isFinite(media.duration)
  867. ? media.duration
  868. : Infinity;
  869. const msDuration = Number.isFinite(mediaSource.duration)
  870. ? mediaSource.duration
  871. : Infinity;
  872. const removeStart = Math.max(0, startOffset);
  873. const removeEnd = Math.min(endOffset, mediaDuration, msDuration);
  874. if (removeEnd > removeStart && !sb.ending) {
  875. sb.ended = false;
  876. logger.log(
  877. `[buffer-controller]: Removing [${removeStart},${removeEnd}] from the ${type} SourceBuffer`
  878. );
  879. console.assert(!sb.updating, `${type} sourceBuffer must not be updating`);
  880. sb.remove(removeStart, removeEnd);
  881. } else {
  882. // Cycle the queue
  883. operationQueue.shiftAndExecuteNext(type);
  884. }
  885. }
  886.  
  887. // This method must result in an updateend event; if append is not called, _onSBUpdateEnd must be called manually
  888. private appendExecutor(data: Uint8Array, type: SourceBufferName) {
  889. const { operationQueue, sourceBuffer } = this;
  890. const sb = sourceBuffer[type];
  891. if (!sb) {
  892. logger.warn(
  893. `[buffer-controller]: Attempting to append to the ${type} SourceBuffer, but it does not exist`
  894. );
  895. operationQueue.shiftAndExecuteNext(type);
  896. return;
  897. }
  898.  
  899. sb.ended = false;
  900. console.assert(!sb.updating, `${type} sourceBuffer must not be updating`);
  901. sb.appendBuffer(data);
  902. }
  903.  
  904. // Enqueues an operation to each SourceBuffer queue which, upon execution, resolves a promise. When all promises
  905. // resolve, the onUnblocked function is executed. Functions calling this method do not need to unblock the queue
  906. // upon completion, since we already do it here
  907. private blockBuffers(
  908. onUnblocked: () => void,
  909. buffers: Array<SourceBufferName> = this.getSourceBufferTypes()
  910. ) {
  911. if (!buffers.length) {
  912. logger.log(
  913. '[buffer-controller]: Blocking operation requested, but no SourceBuffers exist'
  914. );
  915. Promise.resolve().then(onUnblocked);
  916. return;
  917. }
  918. const { operationQueue } = this;
  919.  
  920. // logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
  921. const blockingOperations = buffers.map((type) =>
  922. operationQueue.appendBlocker(type as SourceBufferName)
  923. );
  924. Promise.all(blockingOperations).then(() => {
  925. // logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
  926. onUnblocked();
  927. buffers.forEach((type) => {
  928. const sb = this.sourceBuffer[type];
  929. // Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
  930. // true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
  931. // While this is a workaround, it's probably useful to have around
  932. if (!sb || !sb.updating) {
  933. operationQueue.shiftAndExecuteNext(type);
  934. }
  935. });
  936. });
  937. }
  938.  
  939. private getSourceBufferTypes(): Array<SourceBufferName> {
  940. return Object.keys(this.sourceBuffer) as Array<SourceBufferName>;
  941. }
  942.  
  943. private addBufferListener(
  944. type: SourceBufferName,
  945. event: string,
  946. fn: Function
  947. ) {
  948. const buffer = this.sourceBuffer[type];
  949. if (!buffer) {
  950. return;
  951. }
  952. const listener = fn.bind(this, type);
  953. this.listeners[type].push({ event, listener });
  954. buffer.addEventListener(event, listener);
  955. }
  956.  
  957. private removeBufferListeners(type: SourceBufferName) {
  958. const buffer = this.sourceBuffer[type];
  959. if (!buffer) {
  960. return;
  961. }
  962. this.listeners[type].forEach((l) => {
  963. buffer.removeEventListener(l.event, l.listener);
  964. });
  965. }
  966. }