Home Reference Source

src/controller/audio-stream-controller.ts

  1. import BaseStreamController, { State } from './base-stream-controller';
  2. import type { NetworkComponentAPI } from '../types/component-api';
  3. import { Events } from '../events';
  4. import { BufferHelper } from '../utils/buffer-helper';
  5. import type { FragmentTracker } from './fragment-tracker';
  6. import { FragmentState } from './fragment-tracker';
  7. import { Level } from '../types/level';
  8. import { PlaylistLevelType } from '../types/loader';
  9. import Fragment, { ElementaryStreamTypes, Part } from '../loader/fragment';
  10. import FragmentLoader from '../loader/fragment-loader';
  11. import ChunkCache from '../demux/chunk-cache';
  12. import TransmuxerInterface from '../demux/transmuxer-interface';
  13. import type { TransmuxerResult } from '../types/transmuxer';
  14. import { ChunkMetadata } from '../types/transmuxer';
  15. import { fragmentWithinToleranceTest } from './fragment-finders';
  16. import { alignPDT } from '../utils/discontinuities';
  17. import { MAX_START_GAP_JUMP } from './gap-controller';
  18. import { ErrorDetails } from '../errors';
  19. import { logger } from '../utils/logger';
  20. import type Hls from '../hls';
  21. import type LevelDetails from '../loader/level-details';
  22. import type { TrackSet } from '../types/track';
  23. import type {
  24. BufferCreatedData,
  25. AudioTracksUpdatedData,
  26. AudioTrackSwitchingData,
  27. LevelLoadedData,
  28. TrackLoadedData,
  29. BufferAppendingData,
  30. BufferFlushedData,
  31. InitPTSFoundData,
  32. FragLoadedData,
  33. FragParsingMetadataData,
  34. FragParsingUserdataData,
  35. FragBufferedData,
  36. } from '../types/events';
  37.  
  38. const TICK_INTERVAL = 100; // how often to tick in ms
  39.  
  40. type WaitingForPTSData = {
  41. frag: Fragment;
  42. part: Part | null;
  43. cache: ChunkCache;
  44. complete: boolean;
  45. };
  46.  
  47. class AudioStreamController
  48. extends BaseStreamController
  49. implements NetworkComponentAPI {
  50. private retryDate: number = 0;
  51. private videoBuffer: any | null = null;
  52. private videoTrackCC: number = -1;
  53. private waitingVideoCC: number = -1;
  54. private audioSwitch: boolean = false;
  55. private trackId: number = -1;
  56. private waitingData: WaitingForPTSData | null = null;
  57. private mainDetails: LevelDetails | null = null;
  58.  
  59. constructor(hls: Hls, fragmentTracker: FragmentTracker) {
  60. super(hls, fragmentTracker, '[audio-stream-controller]');
  61. this.fragmentLoader = new FragmentLoader(hls.config);
  62.  
  63. this._registerListeners();
  64. }
  65.  
  66. protected onHandlerDestroying() {
  67. this._unregisterListeners();
  68. }
  69.  
  70. private _registerListeners() {
  71. const { hls } = this;
  72. hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  73. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  74. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  75. hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  76. hls.on(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
  77. hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  78. hls.on(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
  79. hls.on(Events.ERROR, this.onError, this);
  80. hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
  81. hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
  82. hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  83. hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
  84. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  85. }
  86.  
  87. private _unregisterListeners() {
  88. const { hls } = this;
  89. hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  90. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  91. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  92. hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  93. hls.off(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
  94. hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  95. hls.off(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
  96. hls.off(Events.ERROR, this.onError, this);
  97. hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
  98. hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
  99. hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  100. hls.off(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
  101. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  102. }
  103.  
  104. // INIT_PTS_FOUND is triggered when the video track parsed in the stream-controller has a new PTS value
  105. onInitPtsFound(
  106. event: Events.INIT_PTS_FOUND,
  107. { frag, id, initPTS }: InitPTSFoundData
  108. ) {
  109. // Always update the new INIT PTS
  110. // Can change due level switch
  111. if (id === 'main') {
  112. const cc = frag.cc;
  113. this.initPTS[frag.cc] = initPTS;
  114. this.log(`InitPTS for cc: ${cc} found from main: ${initPTS}`);
  115. this.videoTrackCC = cc;
  116. // If we are waiting, tick immediately to unblock audio fragment transmuxing
  117. if (this.state === State.WAITING_INIT_PTS) {
  118. this.tick();
  119. }
  120. }
  121. }
  122.  
  123. startLoad(startPosition) {
  124. if (!this.levels) {
  125. this.startPosition = startPosition;
  126. this.state = State.STOPPED;
  127. return;
  128. }
  129. const lastCurrentTime = this.lastCurrentTime;
  130. this.stopLoad();
  131. this.setInterval(TICK_INTERVAL);
  132. this.fragLoadError = 0;
  133. if (lastCurrentTime > 0 && startPosition === -1) {
  134. this.log(
  135. `Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(
  136. 3
  137. )}`
  138. );
  139. this.state = State.IDLE;
  140. } else {
  141. this.lastCurrentTime = this.startPosition
  142. ? this.startPosition
  143. : startPosition;
  144. this.loadedmetadata = false;
  145. this.state = State.WAITING_TRACK;
  146. }
  147. this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition;
  148. this.tick();
  149. }
  150.  
  151. doTick() {
  152. switch (this.state) {
  153. case State.IDLE:
  154. this.doTickIdle();
  155. break;
  156. case State.WAITING_TRACK: {
  157. const { levels, trackId } = this;
  158. const details = levels?.[trackId]?.details;
  159. if (details) {
  160. if (this.waitForCdnTuneIn(details)) {
  161. break;
  162. }
  163. this.state = State.WAITING_INIT_PTS;
  164. }
  165. break;
  166. }
  167. case State.FRAG_LOADING_WAITING_RETRY: {
  168. const now = performance.now();
  169. const retryDate = this.retryDate;
  170. // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
  171. if (!retryDate || now >= retryDate || this.media?.seeking) {
  172. this.log('RetryDate reached, switch back to IDLE state');
  173. this.state = State.IDLE;
  174. }
  175. break;
  176. }
  177. case State.WAITING_INIT_PTS: {
  178. // Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS
  179. const waitingData = this.waitingData;
  180. if (waitingData) {
  181. const { frag, part, cache, complete } = waitingData;
  182. if (this.initPTS[frag.cc] !== undefined) {
  183. this.waitingData = null;
  184. this.state = State.FRAG_LOADING;
  185. const payload = cache.flush();
  186. const data: FragLoadedData = {
  187. frag,
  188. part,
  189. payload,
  190. networkDetails: null,
  191. };
  192. this._handleFragmentLoadProgress(data);
  193. if (complete) {
  194. super._handleFragmentLoadComplete(data);
  195. }
  196. } else if (this.videoTrackCC !== this.waitingVideoCC) {
  197. // Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found
  198. logger.log(
  199. `Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${this.videoTrackCC}`
  200. );
  201. this.clearWaitingFragment();
  202. } else {
  203. // Drop waiting fragment if an earlier fragment is needed
  204. const bufferInfo = BufferHelper.bufferInfo(
  205. this.mediaBuffer,
  206. this.media.currentTime,
  207. this.config.maxBufferHole
  208. );
  209. const waitingFragmentAtPosition = fragmentWithinToleranceTest(
  210. bufferInfo.end,
  211. this.config.maxFragLookUpTolerance,
  212. frag
  213. );
  214. if (waitingFragmentAtPosition < 0) {
  215. logger.log(
  216. `Waiting fragment cc (${frag.cc}) @ ${frag.start} cancelled because another fragment at ${bufferInfo.end} is needed`
  217. );
  218. this.clearWaitingFragment();
  219. }
  220. }
  221. } else {
  222. this.state = State.IDLE;
  223. }
  224. }
  225. }
  226.  
  227. this.onTickEnd();
  228. }
  229.  
  230. clearWaitingFragment() {
  231. const waitingData = this.waitingData;
  232. if (waitingData) {
  233. this.fragmentTracker.removeFragment(waitingData.frag);
  234. this.waitingData = null;
  235. this.waitingVideoCC = -1;
  236. this.state = State.IDLE;
  237. }
  238. }
  239.  
  240. protected onTickEnd() {
  241. const { media } = this;
  242. if (!media || !media.readyState) {
  243. // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
  244. return;
  245. }
  246. const mediaBuffer = this.mediaBuffer ? this.mediaBuffer : media;
  247. const buffered = mediaBuffer.buffered;
  248.  
  249. if (!this.loadedmetadata && buffered.length) {
  250. this.loadedmetadata = true;
  251. }
  252.  
  253. this.lastCurrentTime = media.currentTime;
  254. }
  255.  
  256. private doTickIdle() {
  257. const { hls, levels, media, trackId } = this;
  258.  
  259. const config = hls.config;
  260. if (!levels) {
  261. return;
  262. }
  263.  
  264. // if video not attached AND
  265. // start fragment already requested OR start frag prefetch not enabled
  266. // exit loop
  267. // => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop
  268. if (!media && (this.startFragRequested || !config.startFragPrefetch)) {
  269. return;
  270. }
  271.  
  272. const pos = this.getLoadPosition();
  273. if (!Number.isFinite(pos)) {
  274. return;
  275. }
  276.  
  277. if (!levels || !levels[trackId]) {
  278. return;
  279. }
  280. const levelInfo = levels[trackId];
  281.  
  282. const trackDetails = levelInfo.details;
  283. if (
  284. !trackDetails ||
  285. (trackDetails.live && this.levelLastLoaded !== trackId) ||
  286. this.waitForCdnTuneIn(trackDetails)
  287. ) {
  288. this.state = State.WAITING_TRACK;
  289. return;
  290. }
  291.  
  292. let frag = trackDetails.initSegment;
  293. let targetBufferTime = 0;
  294. if (!frag || frag.data) {
  295. const mediaBuffer = this.mediaBuffer ? this.mediaBuffer : this.media;
  296. const videoBuffer = this.videoBuffer ? this.videoBuffer : this.media;
  297. const maxBufferHole =
  298. pos < config.maxBufferHole
  299. ? Math.max(MAX_START_GAP_JUMP, config.maxBufferHole)
  300. : config.maxBufferHole;
  301. const bufferInfo = BufferHelper.bufferInfo(
  302. mediaBuffer,
  303. pos,
  304. maxBufferHole
  305. );
  306. const mainBufferInfo = BufferHelper.bufferInfo(
  307. videoBuffer,
  308. pos,
  309. maxBufferHole
  310. );
  311. const bufferLen = bufferInfo.len;
  312. const maxConfigBuffer = Math.min(
  313. config.maxBufferLength,
  314. config.maxMaxBufferLength
  315. );
  316. const maxBufLen = Math.max(maxConfigBuffer, mainBufferInfo.len);
  317. const audioSwitch = this.audioSwitch;
  318.  
  319. // if buffer length is less than maxBufLen try to load a new fragment
  320. if (bufferLen >= maxBufLen && !audioSwitch) {
  321. return;
  322. }
  323.  
  324. if (!audioSwitch && this._streamEnded(bufferInfo, trackDetails)) {
  325. hls.trigger(Events.BUFFER_EOS, { type: 'audio' });
  326. this.state = State.ENDED;
  327. return;
  328. }
  329.  
  330. const fragments = trackDetails.fragments;
  331. const start = fragments[0].start;
  332. targetBufferTime = bufferInfo.end;
  333.  
  334. if (audioSwitch) {
  335. targetBufferTime = pos;
  336. // if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
  337. if (trackDetails.PTSKnown && pos < start) {
  338. // if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
  339. if (bufferInfo.end > start || bufferInfo.nextStart) {
  340. this.log(
  341. 'Alt audio track ahead of main track, seek to start of alt audio track'
  342. );
  343. media.currentTime = start + 0.05;
  344. }
  345. }
  346. }
  347.  
  348. frag = this.getNextFragment(targetBufferTime, trackDetails);
  349. if (!frag) {
  350. return;
  351. }
  352. }
  353.  
  354. if (frag.decryptdata?.keyFormat === 'identity' && !frag.decryptdata?.key) {
  355. this.log(
  356. `Loading key for ${frag.sn} of [${trackDetails.startSN} ,${trackDetails.endSN}],track ${trackId}`
  357. );
  358. this.state = State.KEY_LOADING;
  359. hls.trigger(Events.KEY_LOADING, { frag });
  360. } else {
  361. this.loadFragment(frag, trackDetails, targetBufferTime);
  362. }
  363. }
  364.  
  365. onMediaDetaching() {
  366. this.videoBuffer = null;
  367. super.onMediaDetaching();
  368. }
  369.  
  370. onAudioTracksUpdated(
  371. event: Events.AUDIO_TRACKS_UPDATED,
  372. { audioTracks }: AudioTracksUpdatedData
  373. ) {
  374. this.log('Audio tracks updated');
  375. this.levels = audioTracks.map((mediaPlaylist) => new Level(mediaPlaylist));
  376. }
  377.  
  378. onAudioTrackSwitching(
  379. event: Events.AUDIO_TRACK_SWITCHING,
  380. data: AudioTrackSwitchingData
  381. ) {
  382. // if any URL found on new audio track, it is an alternate audio track
  383. const altAudio = !!data.url;
  384. this.trackId = data.id;
  385. const { fragCurrent, transmuxer } = this;
  386.  
  387. if (fragCurrent?.loader) {
  388. fragCurrent.loader.abort();
  389. }
  390. this.fragCurrent = null;
  391. this.clearWaitingFragment();
  392. // destroy useless transmuxer when switching audio to main
  393. if (!altAudio) {
  394. if (transmuxer) {
  395. transmuxer.destroy();
  396. this.transmuxer = null;
  397. }
  398. } else {
  399. // switching to audio track, start timer if not already started
  400. this.setInterval(TICK_INTERVAL);
  401. }
  402.  
  403. // should we switch tracks ?
  404. if (altAudio) {
  405. this.audioSwitch = true;
  406. // main audio track are handled by stream-controller, just do something if switching to alt audio track
  407. this.state = State.IDLE;
  408. } else {
  409. this.state = State.STOPPED;
  410. }
  411. this.tick();
  412. }
  413.  
  414. onManifestLoading() {
  415. this.mainDetails = null;
  416. this.fragmentTracker.removeAllFragments();
  417. this.startPosition = this.lastCurrentTime = 0;
  418. }
  419.  
  420. onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
  421. if (this.mainDetails === null) {
  422. this.mainDetails = data.details;
  423. }
  424. }
  425.  
  426. onAudioTrackLoaded(event: Events.AUDIO_TRACK_LOADED, data: TrackLoadedData) {
  427. const { levels } = this;
  428. const { details: newDetails, id: trackId } = data;
  429. if (!levels) {
  430. this.warn(`Audio tracks were reset while loading level ${trackId}`);
  431. return;
  432. }
  433. this.log(
  434. `Track ${trackId} loaded [${newDetails.startSN},${newDetails.endSN}],duration:${newDetails.totalduration}`
  435. );
  436.  
  437. const track = levels[trackId];
  438. let sliding = 0;
  439. if (newDetails.live || track.details?.live) {
  440. if (!newDetails.fragments[0]) {
  441. newDetails.deltaUpdateFailed = true;
  442. }
  443. if (newDetails.deltaUpdateFailed) {
  444. return;
  445. }
  446. if (
  447. !track.details &&
  448. this.mainDetails?.hasProgramDateTime &&
  449. newDetails.hasProgramDateTime
  450. ) {
  451. alignPDT(newDetails, this.mainDetails);
  452. sliding = newDetails.fragments[0].start;
  453. } else {
  454. sliding = this.alignPlaylists(newDetails, track.details);
  455. }
  456. }
  457. track.details = newDetails;
  458. this.levelLastLoaded = trackId;
  459.  
  460. // compute start position
  461. if (!this.startFragRequested) {
  462. this.setStartPosition(track.details, sliding);
  463. }
  464. // only switch back to IDLE state if we were waiting for track to start downloading a new fragment
  465. if (
  466. this.state === State.WAITING_TRACK &&
  467. !this.waitForCdnTuneIn(newDetails)
  468. ) {
  469. this.state = State.IDLE;
  470. }
  471.  
  472. // trigger handler right now
  473. this.tick();
  474. }
  475.  
  476. _handleFragmentLoadProgress(data: FragLoadedData) {
  477. const { frag, part, payload } = data;
  478. const { config, trackId, levels } = this;
  479. if (!levels) {
  480. this.warn(
  481. `Audio tracks were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`
  482. );
  483. return;
  484. }
  485.  
  486. const track = levels[trackId] as Level;
  487. console.assert(track, 'Audio track is defined on fragment load progress');
  488. const details = track.details as LevelDetails;
  489. console.assert(
  490. details,
  491. 'Audio track details are defined on fragment load progress'
  492. );
  493. const audioCodec =
  494. config.defaultAudioCodec || track.audioCodec || 'mp4a.40.2';
  495.  
  496. let transmuxer = this.transmuxer;
  497. if (!transmuxer) {
  498. transmuxer = this.transmuxer = new TransmuxerInterface(
  499. this.hls,
  500. PlaylistLevelType.AUDIO,
  501. this._handleTransmuxComplete.bind(this),
  502. this._handleTransmuxerFlush.bind(this)
  503. );
  504. }
  505.  
  506. // Check if we have video initPTS
  507. // If not we need to wait for it
  508. const initPTS = this.initPTS[frag.cc];
  509. const initSegmentData = details.initSegment?.data || new Uint8Array(0);
  510. if (initPTS !== undefined) {
  511. // this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
  512. // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
  513. const accurateTimeOffset = false; // details.PTSKnown || !details.live;
  514. const partIndex = part ? part.index : -1;
  515. const partial = partIndex !== -1;
  516. const chunkMeta = new ChunkMetadata(
  517. frag.level,
  518. frag.sn as number,
  519. frag.stats.chunkCount,
  520. payload.byteLength,
  521. partIndex,
  522. partial
  523. );
  524. transmuxer.push(
  525. payload,
  526. initSegmentData,
  527. audioCodec,
  528. '',
  529. frag,
  530. part,
  531. details.totalduration,
  532. accurateTimeOffset,
  533. chunkMeta,
  534. initPTS
  535. );
  536. } else {
  537. logger.log(
  538. `Unknown video PTS for cc ${frag.cc}, waiting for video PTS before demuxing audio frag ${frag.sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`
  539. );
  540. const { cache } = (this.waitingData = this.waitingData || {
  541. frag,
  542. part,
  543. cache: new ChunkCache(),
  544. complete: false,
  545. });
  546. cache.push(new Uint8Array(payload));
  547. this.waitingVideoCC = this.videoTrackCC;
  548. this.state = State.WAITING_INIT_PTS;
  549. }
  550. }
  551.  
  552. protected _handleFragmentLoadComplete(fragLoadedData: FragLoadedData) {
  553. if (this.waitingData) {
  554. return;
  555. }
  556. super._handleFragmentLoadComplete(fragLoadedData);
  557. }
  558.  
  559. onBufferReset() {
  560. // reset reference to sourcebuffers
  561. this.mediaBuffer = this.videoBuffer = null;
  562. this.loadedmetadata = false;
  563. }
  564.  
  565. onBufferCreated(event: Events.BUFFER_CREATED, data: BufferCreatedData) {
  566. const audioTrack = data.tracks.audio;
  567. if (audioTrack) {
  568. this.mediaBuffer = audioTrack.buffer;
  569. }
  570. if (data.tracks.video) {
  571. this.videoBuffer = data.tracks.video.buffer;
  572. }
  573. }
  574.  
  575. onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
  576. const { frag, part } = data;
  577. if (frag && frag.type !== 'audio') {
  578. return;
  579. }
  580. if (this.fragContextChanged(frag)) {
  581. // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
  582. // Avoid setting state back to IDLE or concluding the audio switch; otherwise, the switched-to track will not buffer
  583. this.warn(
  584. `Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${
  585. frag.level
  586. } finished buffering, but was aborted. state: ${
  587. this.state
  588. }, audioSwitch: ${this.audioSwitch}`
  589. );
  590. return;
  591. }
  592. this.fragPrevious = frag;
  593. if (this.audioSwitch && frag.sn !== 'initSegment') {
  594. this.audioSwitch = false;
  595. this.hls.trigger(Events.AUDIO_TRACK_SWITCHED, { id: this.trackId });
  596. }
  597. this.fragBufferedComplete(frag, part);
  598. }
  599.  
  600. onError(data) {
  601. const frag = data.frag;
  602. // don't handle frag error not related to audio fragment
  603. if (frag && frag.type !== 'audio') {
  604. return;
  605. }
  606.  
  607. switch (data.details) {
  608. case ErrorDetails.FRAG_LOAD_ERROR:
  609. case ErrorDetails.FRAG_LOAD_TIMEOUT: {
  610. const frag = data.frag;
  611. // don't handle frag error not related to audio fragment
  612. if (frag && frag.type !== 'audio') {
  613. break;
  614. }
  615.  
  616. if (!data.fatal) {
  617. let loadError = this.fragLoadError;
  618. if (loadError) {
  619. loadError++;
  620. } else {
  621. loadError = 1;
  622. }
  623.  
  624. const config = this.config;
  625. if (loadError <= config.fragLoadingMaxRetry) {
  626. this.fragLoadError = loadError;
  627. // exponential backoff capped to config.fragLoadingMaxRetryTimeout
  628. const delay = Math.min(
  629. Math.pow(2, loadError - 1) * config.fragLoadingRetryDelay,
  630. config.fragLoadingMaxRetryTimeout
  631. );
  632. this.warn(`Frag loading failed, retry in ${delay} ms`);
  633. this.retryDate = performance.now() + delay;
  634. // retry loading state
  635. this.state = State.FRAG_LOADING_WAITING_RETRY;
  636. } else {
  637. logger.error(
  638. `${data.details} reaches max retry, redispatch as fatal ...`
  639. );
  640. // switch error to fatal
  641. data.fatal = true;
  642. this.state = State.ERROR;
  643. }
  644. }
  645. break;
  646. }
  647. case ErrorDetails.AUDIO_TRACK_LOAD_ERROR:
  648. case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:
  649. case ErrorDetails.KEY_LOAD_ERROR:
  650. case ErrorDetails.KEY_LOAD_TIMEOUT:
  651. // when in ERROR state, don't switch back to IDLE state in case a non-fatal error is received
  652. if (this.state !== State.ERROR && this.state !== State.STOPPED) {
  653. // if fatal error, stop processing, otherwise move to IDLE to retry loading
  654. this.state = data.fatal ? State.ERROR : State.IDLE;
  655. this.warn(
  656. `${data.details} while loading frag, switching to ${this.state} state`
  657. );
  658. }
  659. break;
  660. case ErrorDetails.BUFFER_FULL_ERROR:
  661. // if in appending state
  662. if (
  663. data.parent === 'audio' &&
  664. (this.state === State.PARSING || this.state === State.PARSED)
  665. ) {
  666. const media = this.mediaBuffer;
  667. const currentTime = this.media.currentTime;
  668. const mediaBuffered =
  669. media &&
  670. BufferHelper.isBuffered(media, currentTime) &&
  671. BufferHelper.isBuffered(media, currentTime + 0.5);
  672. // reduce max buf len if current position is buffered
  673. if (mediaBuffered) {
  674. const config = this.config;
  675. if (config.maxMaxBufferLength >= config.maxBufferLength) {
  676. // reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
  677. config.maxMaxBufferLength /= 2;
  678. this.warn(
  679. `Reduce max buffer length to ${config.maxMaxBufferLength}s`
  680. );
  681. }
  682. this.state = State.IDLE;
  683. } else {
  684. // current position is not buffered, but browser is still complaining about buffer full error
  685. // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
  686. // in that case flush the whole audio buffer to recover
  687. this.warn(
  688. 'Buffer full error also media.currentTime is not buffered, flush audio buffer'
  689. );
  690. this.fragCurrent = null;
  691. // flush everything
  692. this.hls.trigger(Events.BUFFER_FLUSHING, {
  693. startOffset: 0,
  694. endOffset: Number.POSITIVE_INFINITY,
  695. type: 'audio',
  696. });
  697. }
  698. }
  699. break;
  700. default:
  701. break;
  702. }
  703. }
  704.  
  705. onBufferFlushed(event: Events.BUFFER_FLUSHED, { type }: BufferFlushedData) {
  706. /* after successful buffer flushing, filter flushed fragments from bufferedFrags
  707. use mediaBuffered instead of media (so that we will check against video.buffered ranges in case of alt audio track)
  708. */
  709. const media = this.mediaBuffer ? this.mediaBuffer : this.media;
  710. if (media && type === ElementaryStreamTypes.AUDIO) {
  711. // filter fragments potentially evicted from buffer. this is to avoid memleak on live streams
  712. this.fragmentTracker.detectEvictedFragments(
  713. ElementaryStreamTypes.AUDIO,
  714. BufferHelper.getBuffered(media)
  715. );
  716. }
  717. // reset reference to frag
  718. this.fragPrevious = null;
  719. // move to IDLE once flush complete. this should trigger new fragment loading
  720. this.state = State.IDLE;
  721. }
  722.  
  723. private _handleTransmuxComplete(transmuxResult: TransmuxerResult) {
  724. const id = 'audio';
  725. const { hls } = this;
  726. const { remuxResult, chunkMeta } = transmuxResult;
  727.  
  728. const context = this.getCurrentContext(chunkMeta);
  729. if (!context) {
  730. this.warn(
  731. `The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`
  732. );
  733. return;
  734. }
  735. const { frag, part } = context;
  736. const { audio, text, id3, initSegment } = remuxResult;
  737.  
  738. // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
  739. // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
  740. if (this.fragContextChanged(frag)) {
  741. return;
  742. }
  743.  
  744. this.state = State.PARSING;
  745. if (this.audioSwitch && audio) {
  746. this.completeAudioSwitch();
  747. }
  748.  
  749. if (initSegment?.tracks) {
  750. this._bufferInitSegment(initSegment.tracks, frag, chunkMeta);
  751. hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
  752. frag,
  753. id,
  754. tracks: initSegment.tracks,
  755. });
  756. // Only flush audio from old audio tracks when PTS is known on new audio track
  757. }
  758. if (audio) {
  759. const { startPTS, endPTS, startDTS, endDTS } = audio;
  760. if (part) {
  761. part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
  762. startPTS,
  763. endPTS,
  764. startDTS,
  765. endDTS,
  766. };
  767. }
  768. frag.setElementaryStreamInfo(
  769. ElementaryStreamTypes.AUDIO,
  770. startPTS,
  771. endPTS,
  772. startDTS,
  773. endDTS
  774. );
  775. this.bufferFragmentData(audio, frag, part, chunkMeta);
  776. }
  777.  
  778. if (id3?.samples?.length) {
  779. const emittedID3: FragParsingMetadataData = Object.assign(
  780. {
  781. frag,
  782. id,
  783. },
  784. id3
  785. );
  786. hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
  787. }
  788. if (text) {
  789. const emittedText: FragParsingUserdataData = Object.assign(
  790. {
  791. frag,
  792. id,
  793. },
  794. text
  795. );
  796. hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
  797. }
  798. }
  799.  
  800. private _bufferInitSegment(
  801. tracks: TrackSet,
  802. frag: Fragment,
  803. chunkMeta: ChunkMetadata
  804. ) {
  805. if (this.state !== State.PARSING) {
  806. return;
  807. }
  808. // delete any video track found on audio transmuxer
  809. if (tracks.video) {
  810. delete tracks.video;
  811. }
  812.  
  813. // include levelCodec in audio and video tracks
  814. const track = tracks.audio;
  815. if (!track) {
  816. return;
  817. }
  818.  
  819. track.levelCodec = track.codec;
  820. track.id = 'audio';
  821. this.hls.trigger(Events.BUFFER_CODECS, tracks);
  822. this.log(
  823. `Audio, container:${track.container}, codecs[level/parsed]=[${track.levelCodec}/${track.codec}]`
  824. );
  825. const initSegment = track.initSegment;
  826. if (initSegment) {
  827. const segment: BufferAppendingData = {
  828. type: 'audio',
  829. data: initSegment,
  830. frag,
  831. part: null,
  832. chunkMeta,
  833. };
  834. this.hls.trigger(Events.BUFFER_APPENDING, segment);
  835. }
  836. // trigger handler right now
  837. this.tick();
  838. }
  839.  
  840. protected loadFragment(
  841. frag: Fragment,
  842. trackDetails: LevelDetails,
  843. targetBufferTime: number
  844. ) {
  845. // only load if fragment is not loaded or if in audio switch
  846. const fragState = this.fragmentTracker.getState(frag);
  847. this.fragCurrent = frag;
  848.  
  849. // we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
  850. if (
  851. this.audioSwitch ||
  852. fragState === FragmentState.NOT_LOADED ||
  853. fragState === FragmentState.PARTIAL
  854. ) {
  855. if (frag.sn === 'initSegment') {
  856. this._loadInitSegment(frag);
  857. } else if (Number.isFinite(this.initPTS[frag.cc])) {
  858. this.startFragRequested = true;
  859. this.nextLoadPosition = frag.start + frag.duration;
  860. super.loadFragment(frag, trackDetails, targetBufferTime);
  861. } else {
  862. this.log(
  863. `Unknown video PTS for continuity counter ${frag.cc}, waiting for video PTS before loading audio fragment ${frag.sn} of level ${this.trackId}`
  864. );
  865. this.state = State.WAITING_INIT_PTS;
  866. }
  867. }
  868. }
  869.  
  870. private completeAudioSwitch() {
  871. const { hls, media, trackId } = this;
  872. if (media) {
  873. this.log('Switching audio track : flushing all audio');
  874. hls.trigger(Events.BUFFER_FLUSHING, {
  875. startOffset: 0,
  876. endOffset: Number.POSITIVE_INFINITY,
  877. type: 'audio',
  878. });
  879. }
  880. this.audioSwitch = false;
  881. hls.trigger(Events.AUDIO_TRACK_SWITCHED, { id: trackId });
  882. }
  883. }
  884. export default AudioStreamController;