Home Reference Source

src/controller/stream-controller.ts

  1. import BaseStreamController, { State } from './base-stream-controller';
  2. import type { NetworkComponentAPI } from '../types/component-api';
  3. import { Events } from '../events';
  4. import { BufferHelper } from '../utils/buffer-helper';
  5. import type { FragmentTracker } from './fragment-tracker';
  6. import { FragmentState } from './fragment-tracker';
  7. import type { Level } from '../types/level';
  8. import { PlaylistLevelType } from '../types/loader';
  9. import Fragment, { ElementaryStreamTypes } from '../loader/fragment';
  10. import FragmentLoader from '../loader/fragment-loader';
  11. import TransmuxerInterface from '../demux/transmuxer-interface';
  12. import type { TransmuxerResult } from '../types/transmuxer';
  13. import { ChunkMetadata } from '../types/transmuxer';
  14. import GapController, { MAX_START_GAP_JUMP } from './gap-controller';
  15. import { ErrorDetails } from '../errors';
  16. import { logger } from '../utils/logger';
  17. import type Hls from '../hls';
  18. import type LevelDetails from '../loader/level-details';
  19. import type { TrackSet } from '../types/track';
  20. import type { SourceBufferName } from '../types/buffer';
  21. import type {
  22. MediaAttachedData,
  23. BufferCreatedData,
  24. ManifestParsedData,
  25. LevelLoadingData,
  26. LevelLoadedData,
  27. LevelsUpdatedData,
  28. AudioTrackSwitchingData,
  29. AudioTrackSwitchedData,
  30. FragLoadedData,
  31. FragParsingMetadataData,
  32. FragParsingUserdataData,
  33. FragBufferedData,
  34. BufferFlushedData,
  35. ErrorData,
  36. } from '../types/events';
  37.  
  38. const TICK_INTERVAL = 100; // how often to tick in ms
  39.  
  40. export default class StreamController
  41. extends BaseStreamController
  42. implements NetworkComponentAPI {
  43. private audioCodecSwap: boolean = false;
  44. private bitrateTest: boolean = false;
  45. private gapController: GapController | null = null;
  46. private level: number = -1;
  47. private _forceStartLoad: boolean = false;
  48. private retryDate: number = 0;
  49. private altAudio: boolean = false;
  50. private audioOnly: boolean = false;
  51. private fragPlaying: Fragment | null = null;
  52. private previouslyPaused: boolean = false;
  53. private immediateSwitch: boolean = false;
  54. private onvplaying: EventListener | null = null;
  55. private onvseeked: EventListener | null = null;
  56. private fragLastKbps: number = 0;
  57. private stalled: boolean = false;
  58. private audioCodecSwitch: boolean = false;
  59. private videoBuffer: any | null = null;
  60.  
  61. constructor(hls: Hls, fragmentTracker: FragmentTracker) {
  62. super(hls, fragmentTracker, '[stream-controller]');
  63. this.fragmentLoader = new FragmentLoader(hls.config);
  64. this.state = State.STOPPED;
  65.  
  66. this._registerListeners();
  67. }
  68.  
  69. private _registerListeners() {
  70. const { hls } = this;
  71. hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  72. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  73. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  74. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  75. hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
  76. hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  77. hls.on(
  78. Events.FRAG_LOAD_EMERGENCY_ABORTED,
  79. this.onFragLoadEmergencyAborted,
  80. this
  81. );
  82. hls.on(Events.ERROR, this.onError, this);
  83. hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  84. hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
  85. hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
  86. hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  87. hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  88. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  89. }
  90.  
  91. protected _unregisterListeners() {
  92. const { hls } = this;
  93. hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  94. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  95. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  96. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  97. hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  98. hls.off(
  99. Events.FRAG_LOAD_EMERGENCY_ABORTED,
  100. this.onFragLoadEmergencyAborted,
  101. this
  102. );
  103. hls.off(Events.ERROR, this.onError, this);
  104. hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  105. hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
  106. hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
  107. hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  108. hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  109. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  110. }
  111.  
  112. protected onHandlerDestroying() {
  113. this._unregisterListeners();
  114. }
  115.  
  116. startLoad(startPosition: number): void {
  117. if (this.levels) {
  118. const { lastCurrentTime, hls } = this;
  119. this.stopLoad();
  120. this.setInterval(TICK_INTERVAL);
  121. this.level = -1;
  122. this.fragLoadError = 0;
  123. if (!this.startFragRequested) {
  124. // determine load level
  125. let startLevel = hls.startLevel;
  126. if (startLevel === -1) {
  127. if (hls.config.testBandwidth) {
  128. // -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
  129. startLevel = 0;
  130. this.bitrateTest = true;
  131. } else {
  132. startLevel = hls.nextAutoLevel;
  133. }
  134. }
  135. // set new level to playlist loader : this will trigger start level load
  136. // hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
  137. this.level = hls.nextLoadLevel = startLevel;
  138. this.loadedmetadata = false;
  139. }
  140. // if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
  141. if (lastCurrentTime > 0 && startPosition === -1) {
  142. this.log(
  143. `Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(
  144. 3
  145. )}`
  146. );
  147. startPosition = lastCurrentTime;
  148. }
  149. this.state = State.IDLE;
  150. this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition;
  151. this.tick();
  152. } else {
  153. this._forceStartLoad = true;
  154. this.state = State.STOPPED;
  155. }
  156. }
  157.  
  158. stopLoad() {
  159. this._forceStartLoad = false;
  160. super.stopLoad();
  161. }
  162.  
  163. doTick() {
  164. switch (this.state) {
  165. case State.IDLE:
  166. this.doTickIdle();
  167. break;
  168. case State.WAITING_LEVEL: {
  169. const { levels, level } = this;
  170. const details = levels?.[level]?.details;
  171. if (details && (!details.live || this.levelLastLoaded === this.level)) {
  172. if (this.waitForCdnTuneIn(details)) {
  173. break;
  174. }
  175. this.state = State.IDLE;
  176. break;
  177. }
  178. break;
  179. }
  180. case State.FRAG_LOADING_WAITING_RETRY:
  181. {
  182. const now = self.performance.now();
  183. const retryDate = this.retryDate;
  184. // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
  185. if (!retryDate || now >= retryDate || this.media?.seeking) {
  186. this.log('retryDate reached, switch back to IDLE state');
  187. this.state = State.IDLE;
  188. }
  189. }
  190. break;
  191. default:
  192. break;
  193. }
  194. // check buffer
  195. // check/update current fragment
  196. this.onTickEnd();
  197. }
  198.  
  199. protected onTickEnd() {
  200. super.onTickEnd();
  201. this.checkBuffer();
  202. this.checkFragmentChanged();
  203. }
  204.  
  205. private doTickIdle() {
  206. const { hls, levelLastLoaded, levels, media } = this;
  207. const { config, nextLoadLevel: level } = hls;
  208.  
  209. // if start level not parsed yet OR
  210. // if video not attached AND start fragment already requested OR start frag prefetch not enabled
  211. // exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
  212. if (
  213. levelLastLoaded === null ||
  214. (!media && (this.startFragRequested || !config.startFragPrefetch))
  215. ) {
  216. return;
  217. }
  218.  
  219. // If the "main" level is audio-only but we are loading an alternate track in the same group, do not load anything
  220. if (this.altAudio && this.audioOnly) {
  221. return;
  222. }
  223.  
  224. if (!levels || !levels[level]) {
  225. return;
  226. }
  227.  
  228. const levelInfo = levels[level];
  229.  
  230. // if buffer length is less than maxBufLen try to load a new fragment
  231. // set next load level : this will trigger a playlist load if needed
  232. this.level = hls.nextLoadLevel = level;
  233.  
  234. const levelDetails = levelInfo.details;
  235. // if level info not retrieved yet, switch state and wait for level retrieval
  236. // if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
  237. // a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
  238. if (
  239. !levelDetails ||
  240. this.state === State.WAITING_LEVEL ||
  241. (levelDetails.live && this.levelLastLoaded !== level)
  242. ) {
  243. this.state = State.WAITING_LEVEL;
  244. return;
  245. }
  246.  
  247. const pos = this.getLoadPosition();
  248. if (!Number.isFinite(pos)) {
  249. return;
  250. }
  251.  
  252. let frag = levelDetails.initSegment;
  253. let targetBufferTime = 0;
  254. if (!frag || frag.data) {
  255. // compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
  256. const levelBitrate = levelInfo.maxBitrate;
  257. let maxBufLen;
  258. if (levelBitrate) {
  259. maxBufLen = Math.max(
  260. (8 * config.maxBufferSize) / levelBitrate,
  261. config.maxBufferLength
  262. );
  263. } else {
  264. maxBufLen = config.maxBufferLength;
  265. }
  266. maxBufLen = Math.min(maxBufLen, config.maxMaxBufferLength);
  267.  
  268. // determine next candidate fragment to be loaded, based on current position and end of buffer position
  269. // ensure up to `config.maxMaxBufferLength` of buffer upfront
  270. const maxBufferHole =
  271. pos < config.maxBufferHole
  272. ? Math.max(MAX_START_GAP_JUMP, config.maxBufferHole)
  273. : config.maxBufferHole;
  274. const bufferInfo = BufferHelper.bufferInfo(
  275. this.mediaBuffer ? this.mediaBuffer : media,
  276. pos,
  277. maxBufferHole
  278. );
  279. const bufferLen = bufferInfo.len;
  280. // Stay idle if we are still with buffer margins
  281. if (bufferLen >= maxBufLen) {
  282. return;
  283. }
  284.  
  285. if (this._streamEnded(bufferInfo, levelDetails)) {
  286. const data: any = {};
  287. if (this.altAudio) {
  288. data.type = 'video';
  289. }
  290.  
  291. this.hls.trigger(Events.BUFFER_EOS, data);
  292. this.state = State.ENDED;
  293. return;
  294. }
  295.  
  296. targetBufferTime = bufferInfo.end;
  297. frag = this.getNextFragment(targetBufferTime, levelDetails);
  298. // Avoid loop loading by using nextLoadPosition set for backtracking
  299. // TODO: this could be improved to simply pick next sn fragment
  300. if (
  301. frag &&
  302. this.fragmentTracker.getState(frag) === FragmentState.OK &&
  303. this.nextLoadPosition > targetBufferTime
  304. ) {
  305. frag = this.getNextFragment(this.nextLoadPosition, levelDetails);
  306. }
  307. if (!frag) {
  308. return;
  309. }
  310. }
  311.  
  312. // We want to load the key if we're dealing with an identity key, because we will decrypt
  313. // this content using the key we fetch. Other keys will be handled by the DRM CDM via EME.
  314. if (frag.decryptdata?.keyFormat === 'identity' && !frag.decryptdata?.key) {
  315. this.log(
  316. `Loading key for ${frag.sn} of [${levelDetails.startSN}-${levelDetails.endSN}], level ${level}`
  317. );
  318. this.loadKey(frag);
  319. } else {
  320. this.loadFragment(frag, levelDetails, targetBufferTime);
  321. }
  322. }
  323.  
  324. private loadKey(frag: Fragment) {
  325. this.state = State.KEY_LOADING;
  326. this.hls.trigger(Events.KEY_LOADING, { frag });
  327. }
  328.  
  329. protected loadFragment(
  330. frag: Fragment,
  331. levelDetails: LevelDetails,
  332. targetBufferTime: number
  333. ) {
  334. // Check if fragment is not loaded
  335. const fragState = this.fragmentTracker.getState(frag);
  336. this.fragCurrent = frag;
  337. // Don't update nextLoadPosition for fragments which are not buffered
  338. if (Number.isFinite(frag.sn as number) && !this.bitrateTest) {
  339. this.nextLoadPosition = frag.start + frag.duration;
  340. }
  341.  
  342. // Use data from loaded backtracked fragment if available
  343. if (fragState === FragmentState.BACKTRACKED) {
  344. const data = this.fragmentTracker.getBacktrackData(frag);
  345. if (data) {
  346. this._handleFragmentLoadProgress(data);
  347. this._handleFragmentLoadComplete(data);
  348. return;
  349. }
  350. }
  351. if (
  352. fragState === FragmentState.NOT_LOADED ||
  353. fragState === FragmentState.PARTIAL
  354. ) {
  355. if (frag.sn === 'initSegment') {
  356. this._loadInitSegment(frag);
  357. } else if (this.bitrateTest) {
  358. frag.bitrateTest = true;
  359. this.log(
  360. `Fragment ${frag.sn} of level ${frag.level} is being downloaded to test bitrate and will not be buffered`
  361. );
  362. this._loadBitrateTestFrag(frag);
  363. } else {
  364. this.startFragRequested = true;
  365. super.loadFragment(frag, levelDetails, targetBufferTime);
  366. }
  367. } else if (fragState === FragmentState.APPENDING) {
  368. // Lower the buffer size and try again
  369. if (this._reduceMaxBufferLength(frag.duration)) {
  370. this.fragmentTracker.removeFragment(frag);
  371. }
  372. } else if (this.media?.buffered.length === 0) {
  373. // Stop gap for bad tracker / buffer flush behavior
  374. this.fragmentTracker.removeAllFragments();
  375. }
  376. }
  377.  
  378. getAppendedFrag(position) {
  379. return this.fragmentTracker.getAppendedFrag(
  380. position,
  381. PlaylistLevelType.MAIN
  382. );
  383. }
  384.  
  385. getBufferedFrag(position) {
  386. return this.fragmentTracker.getBufferedFrag(
  387. position,
  388. PlaylistLevelType.MAIN
  389. );
  390. }
  391.  
  392. followingBufferedFrag(frag: Fragment | null) {
  393. if (frag) {
  394. // try to get range of next fragment (500ms after this range)
  395. return this.getBufferedFrag(frag.end + 0.5);
  396. }
  397. return null;
  398. }
  399.  
  400. /*
  401. on immediate level switch :
  402. - pause playback if playing
  403. - cancel any pending load request
  404. - and trigger a buffer flush
  405. */
  406. immediateLevelSwitch() {
  407. this.log('immediateLevelSwitch');
  408. if (!this.immediateSwitch) {
  409. this.immediateSwitch = true;
  410. const media = this.media;
  411. let previouslyPaused;
  412. if (media) {
  413. previouslyPaused = media.paused;
  414. if (!previouslyPaused) {
  415. media.pause();
  416. }
  417. } else {
  418. // don't restart playback after instant level switch in case media not attached
  419. previouslyPaused = true;
  420. }
  421. this.previouslyPaused = previouslyPaused;
  422. }
  423. const fragCurrent = this.fragCurrent;
  424. if (fragCurrent?.loader) {
  425. fragCurrent.loader.abort();
  426. }
  427.  
  428. this.fragCurrent = null;
  429. // flush everything
  430. this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
  431. }
  432.  
  433. /**
  434. * on immediate level switch end, after new fragment has been buffered:
  435. * - nudge video decoder by slightly adjusting video currentTime (if currentTime buffered)
  436. * - resume the playback if needed
  437. */
  438. immediateLevelSwitchEnd() {
  439. const media = this.media;
  440. if (BufferHelper.getBuffered(media).length) {
  441. this.immediateSwitch = false;
  442. if (
  443. media.currentTime > 0 &&
  444. BufferHelper.isBuffered(media, media.currentTime)
  445. ) {
  446. // only nudge if currentTime is buffered
  447. media.currentTime -= 0.0001;
  448. }
  449. if (!this.previouslyPaused) {
  450. media.play();
  451. }
  452. }
  453. }
  454.  
  455. /**
  456. * try to switch ASAP without breaking video playback:
  457. * in order to ensure smooth but quick level switching,
  458. * we need to find the next flushable buffer range
  459. * we should take into account new segment fetch time
  460. */
  461. nextLevelSwitch() {
  462. const { levels, media } = this;
  463. // ensure that media is defined and that metadata are available (to retrieve currentTime)
  464. if (media?.readyState) {
  465. let fetchdelay;
  466. const fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
  467. if (fragPlayingCurrent && fragPlayingCurrent.start > 1) {
  468. // flush buffer preceding current fragment (flush until current fragment start offset)
  469. // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
  470. this.flushMainBuffer(0, fragPlayingCurrent.start - 1);
  471. }
  472. if (!media.paused && levels) {
  473. // add a safety delay of 1s
  474. const nextLevelId = this.hls.nextLoadLevel;
  475. const nextLevel = levels[nextLevelId];
  476. const fragLastKbps = this.fragLastKbps;
  477. if (fragLastKbps && this.fragCurrent) {
  478. fetchdelay =
  479. (this.fragCurrent.duration * nextLevel.maxBitrate) /
  480. (1000 * fragLastKbps) +
  481. 1;
  482. } else {
  483. fetchdelay = 0;
  484. }
  485. } else {
  486. fetchdelay = 0;
  487. }
  488. // this.log('fetchdelay:'+fetchdelay);
  489. // find buffer range that will be reached once new fragment will be fetched
  490. const bufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
  491. if (bufferedFrag) {
  492. // we can flush buffer range following this one without stalling playback
  493. const nextBufferedFrag = this.followingBufferedFrag(bufferedFrag);
  494. if (nextBufferedFrag) {
  495. // if we are here, we can also cancel any loading/demuxing in progress, as they are useless
  496. const fragCurrent = this.fragCurrent;
  497. if (fragCurrent?.loader) {
  498. fragCurrent.loader.abort();
  499. }
  500.  
  501. this.fragCurrent = null;
  502. // start flush position is the start PTS of next buffered frag.
  503. // we use frag.naxStartPTS which is max(audio startPTS, video startPTS).
  504. // in case there is a small PTS Delta between audio and video, using maxStartPTS avoids flushing last samples from current fragment
  505. const maxStart = nextBufferedFrag.maxStartPTS
  506. ? nextBufferedFrag.maxStartPTS
  507. : nextBufferedFrag.start;
  508. const startPts = Math.max(
  509. bufferedFrag.end,
  510. maxStart +
  511. Math.min(
  512. this.config.maxFragLookUpTolerance,
  513. nextBufferedFrag.duration
  514. )
  515. );
  516. this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY);
  517. }
  518. }
  519. }
  520. }
  521.  
  522. flushMainBuffer(startOffset: number, endOffset: number) {
  523. super.flushMainBuffer(
  524. startOffset,
  525. endOffset,
  526. this.altAudio ? 'video' : null
  527. );
  528. }
  529.  
  530. onMediaAttached(event: Events.MEDIA_ATTACHED, data: MediaAttachedData) {
  531. super.onMediaAttached(event, data);
  532. const media = data.media;
  533. this.onvplaying = this.onMediaPlaying.bind(this);
  534. this.onvseeked = this.onMediaSeeked.bind(this);
  535. media.addEventListener('playing', this.onvplaying as EventListener);
  536. media.addEventListener('seeked', this.onvseeked as EventListener);
  537. this.gapController = new GapController(
  538. this.config,
  539. media,
  540. this.fragmentTracker,
  541. this.hls
  542. );
  543. }
  544.  
  545. onMediaDetaching() {
  546. const { media } = this;
  547. if (media) {
  548. media.removeEventListener('playing', this.onvplaying);
  549. media.removeEventListener('seeked', this.onvseeked);
  550. this.onvplaying = this.onvseeked = null;
  551. }
  552.  
  553. super.onMediaDetaching();
  554. }
  555.  
  556. onMediaPlaying() {
  557. // tick to speed up FRAG_CHANGED triggering
  558. this.tick();
  559. }
  560.  
  561. onMediaSeeked() {
  562. const media = this.media;
  563. const currentTime = media ? media.currentTime : null;
  564. if (Number.isFinite(currentTime)) {
  565. this.log(`Media seeked to ${currentTime.toFixed(3)}`);
  566. }
  567.  
  568. // tick to speed up FRAG_CHANGED triggering
  569. this.tick();
  570. }
  571.  
  572. onManifestLoading() {
  573. // reset buffer on manifest loading
  574. this.log('Trigger BUFFER_RESET');
  575. this.hls.trigger(Events.BUFFER_RESET, undefined);
  576. this.fragmentTracker.removeAllFragments();
  577. this.stalled = false;
  578. this.startPosition = this.lastCurrentTime = 0;
  579. this.fragPlaying = null;
  580. }
  581.  
  582. onManifestParsed(event: Events.MANIFEST_PARSED, data: ManifestParsedData) {
  583. let aac = false;
  584. let heaac = false;
  585. let codec;
  586. data.levels.forEach((level) => {
  587. // detect if we have different kind of audio codecs used amongst playlists
  588. codec = level.audioCodec;
  589. if (codec) {
  590. if (codec.indexOf('mp4a.40.2') !== -1) {
  591. aac = true;
  592. }
  593.  
  594. if (codec.indexOf('mp4a.40.5') !== -1) {
  595. heaac = true;
  596. }
  597. }
  598. });
  599. this.audioCodecSwitch = aac && heaac;
  600. if (this.audioCodecSwitch) {
  601. this.log(
  602. 'Both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC'
  603. );
  604. }
  605.  
  606. this.levels = data.levels;
  607. this.startFragRequested = false;
  608. }
  609.  
  610. onLevelLoading(event: Events.LEVEL_LOADING, data: LevelLoadingData) {
  611. const { levels } = this;
  612. if (!levels || this.state !== State.IDLE) {
  613. return;
  614. }
  615. const level = levels[data.level];
  616. if (
  617. !level.details ||
  618. (level.details.live && this.levelLastLoaded !== data.level) ||
  619. this.waitForCdnTuneIn(level.details)
  620. ) {
  621. this.state = State.WAITING_LEVEL;
  622. }
  623. }
  624.  
  625. onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
  626. const { levels } = this;
  627. const newLevelId = data.level;
  628. const newDetails = data.details;
  629. const duration = newDetails.totalduration;
  630.  
  631. if (!levels) {
  632. this.warn(`Levels were reset while loading level ${newLevelId}`);
  633. return;
  634. }
  635. this.log(
  636. `Level ${newLevelId} loaded [${newDetails.startSN},${newDetails.endSN}], cc [${newDetails.startCC}, ${newDetails.endCC}] duration:${duration}`
  637. );
  638.  
  639. const fragCurrent = this.fragCurrent;
  640. if (
  641. fragCurrent &&
  642. (this.state === State.FRAG_LOADING ||
  643. this.state === State.FRAG_LOADING_WAITING_RETRY)
  644. ) {
  645. if (fragCurrent.level !== data.level && fragCurrent.loader) {
  646. this.state = State.IDLE;
  647. fragCurrent.loader.abort();
  648. }
  649. }
  650.  
  651. const curLevel = levels[newLevelId];
  652. let sliding = 0;
  653. if (newDetails.live || curLevel.details?.live) {
  654. if (!newDetails.fragments[0]) {
  655. newDetails.deltaUpdateFailed = true;
  656. }
  657. if (newDetails.deltaUpdateFailed) {
  658. return;
  659. }
  660. sliding = this.alignPlaylists(newDetails, curLevel.details);
  661. }
  662. // override level info
  663. curLevel.details = newDetails;
  664. this.levelLastLoaded = newLevelId;
  665.  
  666. this.hls.trigger(Events.LEVEL_UPDATED, {
  667. details: newDetails,
  668. level: newLevelId,
  669. });
  670.  
  671. // only switch back to IDLE state if we were waiting for level to start downloading a new fragment
  672. if (this.state === State.WAITING_LEVEL) {
  673. if (this.waitForCdnTuneIn(newDetails)) {
  674. // Wait for Low-Latency CDN Tune-in
  675. return;
  676. }
  677. this.state = State.IDLE;
  678. }
  679.  
  680. if (!this.startFragRequested) {
  681. this.setStartPosition(newDetails, sliding);
  682. } else if (newDetails.live) {
  683. this.synchronizeToLiveEdge(newDetails);
  684. }
  685.  
  686. // trigger handler right now
  687. this.tick();
  688. }
  689.  
  690. _handleFragmentLoadProgress(data: FragLoadedData) {
  691. const { frag, part, payload } = data;
  692. const { levels } = this;
  693. if (!levels) {
  694. this.warn(
  695. `Levels were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`
  696. );
  697. return;
  698. }
  699. const currentLevel = levels[frag.level];
  700. const details = currentLevel.details as LevelDetails;
  701. if (!details) {
  702. this.warn(
  703. `Dropping fragment ${frag.sn} of level ${frag.level} after level details were reset`
  704. );
  705. return;
  706. }
  707. const videoCodec = currentLevel.videoCodec;
  708.  
  709. // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
  710. const accurateTimeOffset = details.PTSKnown || !details.live;
  711. const initSegmentData = details.initSegment?.data || new Uint8Array(0);
  712. const audioCodec = this._getAudioCodec(currentLevel);
  713.  
  714. // transmux the MPEG-TS data to ISO-BMFF segments
  715. // this.log(`Transmuxing ${frag.sn} of [${details.startSN} ,${details.endSN}],level ${frag.level}, cc ${frag.cc}`);
  716. const transmuxer = (this.transmuxer =
  717. this.transmuxer ||
  718. new TransmuxerInterface(
  719. this.hls,
  720. PlaylistLevelType.MAIN,
  721. this._handleTransmuxComplete.bind(this),
  722. this._handleTransmuxerFlush.bind(this)
  723. ));
  724. const partIndex = part ? part.index : -1;
  725. const partial = partIndex !== -1;
  726. const chunkMeta = new ChunkMetadata(
  727. frag.level,
  728. frag.sn as number,
  729. frag.stats.chunkCount,
  730. payload.byteLength,
  731. partIndex,
  732. partial
  733. );
  734. const initPTS = this.initPTS[frag.cc];
  735.  
  736. transmuxer.push(
  737. payload,
  738. initSegmentData,
  739. audioCodec,
  740. videoCodec,
  741. frag,
  742. part,
  743. details.totalduration,
  744. accurateTimeOffset,
  745. chunkMeta,
  746. initPTS
  747. );
  748. }
  749.  
  750. private resetTransmuxer() {
  751. if (this.transmuxer) {
  752. this.transmuxer.destroy();
  753. this.transmuxer = null;
  754. }
  755. }
  756.  
  757. onAudioTrackSwitching(
  758. event: Events.AUDIO_TRACK_SWITCHING,
  759. data: AudioTrackSwitchingData
  760. ) {
  761. // if any URL found on new audio track, it is an alternate audio track
  762. const fromAltAudio = this.altAudio;
  763. const altAudio = !!data.url;
  764. const trackId = data.id;
  765. // if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
  766. // don't do anything if we switch to alt audio: audio stream controller is handling it.
  767. // we will just have to change buffer scheduling on audioTrackSwitched
  768. if (!altAudio) {
  769. if (this.mediaBuffer !== this.media) {
  770. this.log(
  771. 'Switching on main audio, use media.buffered to schedule main fragment loading'
  772. );
  773. this.mediaBuffer = this.media;
  774. const fragCurrent = this.fragCurrent;
  775. // we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
  776. if (fragCurrent?.loader) {
  777. this.log('Switching to main audio track, cancel main fragment load');
  778. fragCurrent.loader.abort();
  779. }
  780. this.fragCurrent = null;
  781. this.fragPrevious = null;
  782. // destroy transmuxer to force init segment generation (following audio switch)
  783. this.resetTransmuxer();
  784. // switch to IDLE state to load new fragment
  785. this.state = State.IDLE;
  786. } else if (this.audioOnly) {
  787. // Reset audio transmuxer so when switching back to main audio we're not still appending where we left off
  788. this.resetTransmuxer();
  789. }
  790. const hls = this.hls;
  791. // If switching from alt to main audio, flush all audio and trigger track switched
  792. if (fromAltAudio) {
  793. hls.trigger(Events.BUFFER_FLUSHING, {
  794. startOffset: 0,
  795. endOffset: Number.POSITIVE_INFINITY,
  796. type: 'audio',
  797. });
  798. }
  799. hls.trigger(Events.AUDIO_TRACK_SWITCHED, {
  800. id: trackId,
  801. });
  802. }
  803. }
  804.  
  805. onAudioTrackSwitched(
  806. event: Events.AUDIO_TRACK_SWITCHED,
  807. data: AudioTrackSwitchedData
  808. ) {
  809. const trackId = data.id;
  810. const altAudio = !!this.hls.audioTracks[trackId].url;
  811. if (altAudio) {
  812. const videoBuffer = this.videoBuffer;
  813. // if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
  814. if (videoBuffer && this.mediaBuffer !== videoBuffer) {
  815. this.log(
  816. 'Switching on alternate audio, use video.buffered to schedule main fragment loading'
  817. );
  818. this.mediaBuffer = videoBuffer;
  819. }
  820. }
  821. this.altAudio = altAudio;
  822. this.tick();
  823. }
  824.  
  825. onBufferCreated(event: Events.BUFFER_CREATED, data: BufferCreatedData) {
  826. const tracks = data.tracks;
  827. let mediaTrack;
  828. let name;
  829. let alternate = false;
  830. for (const type in tracks) {
  831. const track = tracks[type];
  832. if (track.id === 'main') {
  833. name = type;
  834. mediaTrack = track;
  835. // keep video source buffer reference
  836. if (type === 'video') {
  837. const videoTrack = tracks[type];
  838. if (videoTrack) {
  839. this.videoBuffer = videoTrack.buffer;
  840. }
  841. }
  842. } else {
  843. alternate = true;
  844. }
  845. }
  846. if (alternate && mediaTrack) {
  847. this.log(
  848. `Alternate track found, use ${name}.buffered to schedule main fragment loading`
  849. );
  850. this.mediaBuffer = mediaTrack.buffer;
  851. } else {
  852. this.mediaBuffer = this.media;
  853. }
  854. }
  855.  
  856. onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
  857. const { frag, part } = data;
  858. if (frag && frag.type !== 'main') {
  859. return;
  860. }
  861. if (this.fragContextChanged(frag)) {
  862. // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
  863. // Avoid setting state back to IDLE, since that will interfere with a level switch
  864. this.warn(
  865. `Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${
  866. frag.level
  867. } finished buffering, but was aborted. state: ${this.state}`
  868. );
  869. return;
  870. }
  871. const stats = part ? part.stats : frag.stats;
  872. this.fragLastKbps = Math.round(
  873. (8 * stats.total) / (stats.buffering.end - stats.loading.first)
  874. );
  875. this.fragPrevious = frag;
  876. this.fragBufferedComplete(frag, part);
  877. }
  878.  
  879. onError(event: Events.ERROR, data: ErrorData) {
  880. const frag = data.frag || this.fragCurrent;
  881. // don't handle frag error not related to main fragment
  882. if (frag && frag.type !== 'main') {
  883. return;
  884. }
  885.  
  886. // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
  887. const mediaBuffered =
  888. !!this.media &&
  889. BufferHelper.isBuffered(this.media, this.media.currentTime) &&
  890. BufferHelper.isBuffered(this.media, this.media.currentTime + 0.5);
  891.  
  892. switch (data.details) {
  893. case ErrorDetails.FRAG_LOAD_ERROR:
  894. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  895. case ErrorDetails.KEY_LOAD_ERROR:
  896. case ErrorDetails.KEY_LOAD_TIMEOUT:
  897. if (!data.fatal) {
  898. // keep retrying until the limit will be reached
  899. if (this.fragLoadError + 1 <= this.config.fragLoadingMaxRetry) {
  900. // exponential backoff capped to config.fragLoadingMaxRetryTimeout
  901. const delay = Math.min(
  902. Math.pow(2, this.fragLoadError) *
  903. this.config.fragLoadingRetryDelay,
  904. this.config.fragLoadingMaxRetryTimeout
  905. );
  906. // @ts-ignore - frag is potentially null according to TS here
  907. this.warn(
  908. `Fragment ${frag?.sn} of level ${frag?.level} failed to load, retrying in ${delay}ms`
  909. );
  910. this.retryDate = self.performance.now() + delay;
  911. // retry loading state
  912. // if loadedmetadata is not set, it means that we are emergency switch down on first frag
  913. // in that case, reset startFragRequested flag
  914. if (!this.loadedmetadata) {
  915. this.startFragRequested = false;
  916. this.nextLoadPosition = this.startPosition;
  917. }
  918. this.fragLoadError++;
  919. this.state = State.FRAG_LOADING_WAITING_RETRY;
  920. } else {
  921. logger.error(
  922. `[stream-controller]: ${data.details} reaches max retry, redispatch as fatal ...`
  923. );
  924. // switch error to fatal
  925. data.fatal = true;
  926. this.state = State.ERROR;
  927. }
  928. }
  929. break;
  930. case ErrorDetails.LEVEL_LOAD_ERROR:
  931. case ErrorDetails.LEVEL_LOAD_TIMEOUT:
  932. if (this.state !== State.ERROR) {
  933. if (data.fatal) {
  934. // if fatal error, stop processing
  935. this.warn(`${data.details}`);
  936. this.state = State.ERROR;
  937. } else {
  938. // in case of non fatal error while loading level, if level controller is not retrying to load level , switch back to IDLE
  939. if (!data.levelRetry && this.state === State.WAITING_LEVEL) {
  940. this.state = State.IDLE;
  941. }
  942. }
  943. }
  944. break;
  945. case ErrorDetails.BUFFER_FULL_ERROR:
  946. // if in appending state
  947. if (
  948. data.parent === 'main' &&
  949. (this.state === State.PARSING || this.state === State.PARSED)
  950. ) {
  951. // reduce max buf len if current position is buffered
  952. if (mediaBuffered) {
  953. this._reduceMaxBufferLength(this.config.maxBufferLength);
  954. this.state = State.IDLE;
  955. } else {
  956. // current position is not buffered, but browser is still complaining about buffer full error
  957. // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
  958. // in that case flush the whole buffer to recover
  959. this.warn(
  960. 'buffer full error also media.currentTime is not buffered, flush everything'
  961. );
  962. this.fragCurrent = null;
  963. // flush everything
  964. this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
  965. }
  966. }
  967. break;
  968. default:
  969. break;
  970. }
  971. }
  972.  
  973. _reduceMaxBufferLength(minLength) {
  974. const config = this.config;
  975. if (config.maxMaxBufferLength >= minLength) {
  976. // reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
  977. config.maxMaxBufferLength /= 2;
  978. this.warn(`Reduce max buffer length to ${config.maxMaxBufferLength}s`);
  979. return true;
  980. }
  981. return false;
  982. }
  983.  
  984. // Checks the health of the buffer and attempts to resolve playback stalls.
  985. private checkBuffer() {
  986. const { media, gapController } = this;
  987. if (!media || !gapController || !media.readyState) {
  988. // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
  989. return;
  990. }
  991.  
  992. // Check combined buffer
  993. const buffered = BufferHelper.getBuffered(media);
  994.  
  995. if (!this.loadedmetadata && buffered.length) {
  996. this.loadedmetadata = true;
  997. this._seekToStartPos();
  998. } else if (this.immediateSwitch) {
  999. this.immediateLevelSwitchEnd();
  1000. } else {
  1001. // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
  1002. gapController.poll(this.lastCurrentTime);
  1003. }
  1004.  
  1005. this.lastCurrentTime = media.currentTime;
  1006. }
  1007.  
  1008. onFragLoadEmergencyAborted() {
  1009. this.state = State.IDLE;
  1010. // if loadedmetadata is not set, it means that we are emergency switch down on first frag
  1011. // in that case, reset startFragRequested flag
  1012. if (!this.loadedmetadata) {
  1013. this.startFragRequested = false;
  1014. this.nextLoadPosition = this.startPosition;
  1015. }
  1016. this.tick();
  1017. }
  1018.  
  1019. onBufferFlushed(event: Events.BUFFER_FLUSHED, { type }: BufferFlushedData) {
  1020. /* after successful buffer flushing, filter flushed fragments from bufferedFrags
  1021. use mediaBuffered instead of media (so that we will check against video.buffered ranges in case of alt audio track)
  1022. */
  1023. const media =
  1024. (type === ElementaryStreamTypes.VIDEO
  1025. ? this.videoBuffer
  1026. : this.mediaBuffer) || this.media;
  1027. if (media && type !== ElementaryStreamTypes.AUDIO) {
  1028. this.fragmentTracker.detectEvictedFragments(
  1029. type,
  1030. BufferHelper.getBuffered(media)
  1031. );
  1032. }
  1033. // reset reference to frag
  1034. this.fragPrevious = null;
  1035. // move to IDLE once flush complete. this should trigger new fragment loading
  1036. this.state = State.IDLE;
  1037. }
  1038.  
  1039. onLevelsUpdated(event: Events.LEVELS_UPDATED, data: LevelsUpdatedData) {
  1040. this.levels = data.levels;
  1041. }
  1042.  
  1043. swapAudioCodec() {
  1044. this.audioCodecSwap = !this.audioCodecSwap;
  1045. }
  1046.  
  1047. /**
  1048. * Seeks to the set startPosition if not equal to the mediaElement's current time.
  1049. * @private
  1050. */
  1051. _seekToStartPos() {
  1052. const { media } = this;
  1053. const currentTime = media.currentTime;
  1054. let startPosition = this.startPosition;
  1055. // only adjust currentTime if different from startPosition or if startPosition not buffered
  1056. // at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
  1057. if (currentTime !== startPosition && startPosition >= 0) {
  1058. if (media.seeking) {
  1059. logger.log(
  1060. `could not seek to ${startPosition}, already seeking at ${currentTime}`
  1061. );
  1062. return;
  1063. }
  1064. const buffered = BufferHelper.getBuffered(media);
  1065. const bufferStart = buffered.length ? buffered.start(0) : 0;
  1066. const delta = bufferStart - startPosition;
  1067. if (delta > 0 && delta < this.config.maxBufferHole) {
  1068. logger.log(
  1069. `adjusting start position by ${delta} to match buffer start`
  1070. );
  1071. startPosition += delta;
  1072. this.startPosition = startPosition;
  1073. }
  1074. this.log(
  1075. `seek to target start position ${startPosition} from current time ${currentTime}`
  1076. );
  1077. media.currentTime = startPosition;
  1078. }
  1079. }
  1080.  
  1081. _getAudioCodec(currentLevel) {
  1082. let audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec;
  1083. if (this.audioCodecSwap) {
  1084. this.log('Swapping playlist audio codec');
  1085. if (audioCodec) {
  1086. if (audioCodec.indexOf('mp4a.40.5') !== -1) {
  1087. audioCodec = 'mp4a.40.2';
  1088. } else {
  1089. audioCodec = 'mp4a.40.5';
  1090. }
  1091. }
  1092. }
  1093.  
  1094. return audioCodec;
  1095. }
  1096.  
  1097. private _loadBitrateTestFrag(frag: Fragment) {
  1098. this._doFragLoad(frag).then((data) => {
  1099. const { hls } = this;
  1100. if (!data || hls.nextLoadLevel || this.fragContextChanged(frag)) {
  1101. return;
  1102. }
  1103. this.fragLoadError = 0;
  1104. this.state = State.IDLE;
  1105. this.startFragRequested = false;
  1106. this.bitrateTest = false;
  1107. frag.bitrateTest = false;
  1108. const stats = frag.stats;
  1109. // Bitrate tests fragments are neither parsed nor buffered
  1110. stats.parsing.start = stats.parsing.end = stats.buffering.start = stats.buffering.end = self.performance.now();
  1111. hls.trigger(Events.FRAG_BUFFERED, {
  1112. stats,
  1113. frag,
  1114. part: null,
  1115. id: 'main',
  1116. });
  1117. this.tick();
  1118. });
  1119. }
  1120.  
  1121. private _handleTransmuxComplete(transmuxResult: TransmuxerResult) {
  1122. const id = 'main';
  1123. const { hls } = this;
  1124. const { remuxResult, chunkMeta } = transmuxResult;
  1125.  
  1126. const context = this.getCurrentContext(chunkMeta);
  1127. if (!context) {
  1128. this.warn(
  1129. `The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`
  1130. );
  1131. return;
  1132. }
  1133. const { frag, part, level } = context;
  1134. const { video, text, id3, initSegment } = remuxResult;
  1135. // The audio-stream-controller handles audio buffering if Hls.js is playing an alternate audio track
  1136. const audio = this.altAudio ? undefined : remuxResult.audio;
  1137.  
  1138. // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
  1139. // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
  1140. if (this.fragContextChanged(frag)) {
  1141. return;
  1142. }
  1143.  
  1144. this.state = State.PARSING;
  1145.  
  1146. if (initSegment) {
  1147. if (initSegment.tracks) {
  1148. this._bufferInitSegment(level, initSegment.tracks, frag, chunkMeta);
  1149. hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
  1150. frag,
  1151. id,
  1152. tracks: initSegment.tracks,
  1153. });
  1154. }
  1155.  
  1156. // This would be nice if Number.isFinite acted as a typeguard, but it doesn't. See: https://github.com/Microsoft/TypeScript/issues/10038
  1157. const initPTS = initSegment.initPTS as number;
  1158. const timescale = initSegment.timescale as number;
  1159. if (Number.isFinite(initPTS)) {
  1160. this.initPTS[frag.cc] = initPTS;
  1161. hls.trigger(Events.INIT_PTS_FOUND, { frag, id, initPTS, timescale });
  1162. }
  1163. }
  1164.  
  1165. // Avoid buffering if backtracking this fragment
  1166. if (video && remuxResult.independent !== false) {
  1167. if (level.details) {
  1168. const { startPTS, endPTS, startDTS, endDTS } = video;
  1169. if (part) {
  1170. part.elementaryStreams[video.type] = {
  1171. startPTS,
  1172. endPTS,
  1173. startDTS,
  1174. endDTS,
  1175. };
  1176. } else if (video.dropped && video.independent) {
  1177. // Set video stream start to fragment start so that truncated samples do not distort the timeline, and mark it partial
  1178. frag.setElementaryStreamInfo(
  1179. video.type as ElementaryStreamTypes,
  1180. frag.start,
  1181. endPTS,
  1182. frag.start,
  1183. endDTS,
  1184. true
  1185. );
  1186. }
  1187. frag.setElementaryStreamInfo(
  1188. video.type as ElementaryStreamTypes,
  1189. startPTS,
  1190. endPTS,
  1191. startDTS,
  1192. endDTS
  1193. );
  1194. this.bufferFragmentData(video, frag, part, chunkMeta);
  1195. }
  1196. } else if (remuxResult.independent === false) {
  1197. this.backtrack();
  1198. return;
  1199. }
  1200.  
  1201. if (audio) {
  1202. const { startPTS, endPTS, startDTS, endDTS } = audio;
  1203. if (part) {
  1204. part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
  1205. startPTS,
  1206. endPTS,
  1207. startDTS,
  1208. endDTS,
  1209. };
  1210. }
  1211. frag.setElementaryStreamInfo(
  1212. ElementaryStreamTypes.AUDIO,
  1213. startPTS,
  1214. endPTS,
  1215. startDTS,
  1216. endDTS
  1217. );
  1218. this.bufferFragmentData(audio, frag, part, chunkMeta);
  1219. }
  1220.  
  1221. if (id3?.samples?.length) {
  1222. const emittedID3: FragParsingMetadataData = {
  1223. frag,
  1224. id,
  1225. samples: id3.samples,
  1226. };
  1227. hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
  1228. }
  1229. if (text) {
  1230. const emittedText: FragParsingUserdataData = {
  1231. frag,
  1232. id,
  1233. samples: text.samples,
  1234. };
  1235. hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
  1236. }
  1237. }
  1238.  
  1239. private _bufferInitSegment(
  1240. currentLevel: Level,
  1241. tracks: TrackSet,
  1242. frag: Fragment,
  1243. chunkMeta: ChunkMetadata
  1244. ) {
  1245. if (this.state !== State.PARSING) {
  1246. return;
  1247. }
  1248.  
  1249. this.audioOnly = !!tracks.audio && !tracks.video;
  1250.  
  1251. // if audio track is expected to come from audio stream controller, discard any coming from main
  1252. if (this.altAudio && !this.audioOnly) {
  1253. delete tracks.audio;
  1254. }
  1255. // include levelCodec in audio and video tracks
  1256. const { audio, video } = tracks;
  1257. if (audio) {
  1258. let audioCodec = currentLevel.audioCodec;
  1259. const ua = navigator.userAgent.toLowerCase();
  1260. if (audioCodec && this.audioCodecSwap) {
  1261. this.log('Swapping playlist audio codec');
  1262. if (audioCodec.indexOf('mp4a.40.5') !== -1) {
  1263. audioCodec = 'mp4a.40.2';
  1264. } else {
  1265. audioCodec = 'mp4a.40.5';
  1266. }
  1267. }
  1268. // In the case that AAC and HE-AAC audio codecs are signalled in manifest,
  1269. // force HE-AAC, as it seems that most browsers prefers it.
  1270. if (this.audioCodecSwitch) {
  1271. // don't force HE-AAC if mono stream, or in Firefox
  1272. if (audio.metadata.channelCount !== 1 && ua.indexOf('firefox') === -1) {
  1273. audioCodec = 'mp4a.40.5';
  1274. }
  1275. }
  1276. // HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
  1277. if (ua.indexOf('android') !== -1 && audio.container !== 'audio/mpeg') {
  1278. // Exclude mpeg audio
  1279. audioCodec = 'mp4a.40.2';
  1280. this.log(`Android: force audio codec to ${audioCodec}`);
  1281. }
  1282. audio.levelCodec = audioCodec;
  1283. audio.id = 'main';
  1284. }
  1285. if (video) {
  1286. video.levelCodec = currentLevel.videoCodec;
  1287. video.id = 'main';
  1288. }
  1289. this.hls.trigger(Events.BUFFER_CODECS, tracks);
  1290. // loop through tracks that are going to be provided to bufferController
  1291. Object.keys(tracks).forEach((trackName) => {
  1292. const track = tracks[trackName];
  1293. const initSegment = track.initSegment;
  1294. this.log(
  1295. `Main track:${trackName},container:${track.container},codecs[level/parsed]=[${track.levelCodec}/${track.codec}]`
  1296. );
  1297. if (initSegment) {
  1298. this.hls.trigger(Events.BUFFER_APPENDING, {
  1299. type: trackName as SourceBufferName,
  1300. data: initSegment,
  1301. frag,
  1302. part: null,
  1303. chunkMeta,
  1304. });
  1305. }
  1306. });
  1307. // trigger handler right now
  1308. this.tick();
  1309. }
  1310.  
  1311. private backtrack() {
  1312. // Causes findFragments to backtrack through fragments to find the keyframe
  1313. this.resetTransmuxer();
  1314. this.state = State.BACKTRACKING;
  1315. }
  1316.  
  1317. private checkFragmentChanged() {
  1318. const video = this.media;
  1319. let fragPlayingCurrent: Fragment | null = null;
  1320. if (video && video.readyState > 1 && video.seeking === false) {
  1321. const currentTime = video.currentTime;
  1322. /* if video element is in seeked state, currentTime can only increase.
  1323. (assuming that playback rate is positive ...)
  1324. As sometimes currentTime jumps back to zero after a
  1325. media decode error, check this, to avoid seeking back to
  1326. wrong position after a media decode error
  1327. */
  1328.  
  1329. if (BufferHelper.isBuffered(video, currentTime)) {
  1330. fragPlayingCurrent = this.getAppendedFrag(currentTime);
  1331. } else if (BufferHelper.isBuffered(video, currentTime + 0.1)) {
  1332. /* ensure that FRAG_CHANGED event is triggered at startup,
  1333. when first video frame is displayed and playback is paused.
  1334. add a tolerance of 100ms, in case current position is not buffered,
  1335. check if current pos+100ms is buffered and use that buffer range
  1336. for FRAG_CHANGED event reporting */
  1337. fragPlayingCurrent = this.getAppendedFrag(currentTime + 0.1);
  1338. }
  1339. if (fragPlayingCurrent) {
  1340. const fragPlaying = this.fragPlaying;
  1341. const fragCurrentLevel = fragPlayingCurrent.level;
  1342. if (
  1343. !fragPlaying ||
  1344. fragPlayingCurrent.sn !== fragPlaying.sn ||
  1345. fragPlaying.level !== fragCurrentLevel ||
  1346. fragPlayingCurrent.urlId !== fragPlaying.urlId
  1347. ) {
  1348. this.hls.trigger(Events.FRAG_CHANGED, { frag: fragPlayingCurrent });
  1349. if (!fragPlaying || fragPlaying.level !== fragCurrentLevel) {
  1350. this.hls.trigger(Events.LEVEL_SWITCHED, {
  1351. level: fragCurrentLevel,
  1352. });
  1353. }
  1354. this.fragPlaying = fragPlayingCurrent;
  1355. }
  1356. }
  1357. }
  1358. }
  1359.  
  1360. get nextLevel() {
  1361. const frag = this.nextBufferedFrag;
  1362. if (frag) {
  1363. return frag.level;
  1364. } else {
  1365. return -1;
  1366. }
  1367. }
  1368.  
  1369. get currentLevel() {
  1370. const media = this.media;
  1371. if (media) {
  1372. const fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
  1373. if (fragPlayingCurrent) {
  1374. return fragPlayingCurrent.level;
  1375. }
  1376. }
  1377. return -1;
  1378. }
  1379.  
  1380. get nextBufferedFrag() {
  1381. const media = this.media;
  1382. if (media) {
  1383. // first get end range of current fragment
  1384. const fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
  1385. return this.followingBufferedFrag(fragPlayingCurrent);
  1386. } else {
  1387. return null;
  1388. }
  1389. }
  1390.  
  1391. get forceStartLoad() {
  1392. return this._forceStartLoad;
  1393. }
  1394. }