Home Reference Source

src/controller/audio-stream-controller.ts

  1. import BaseStreamController, { State } from './base-stream-controller';
  2. import type { NetworkComponentAPI } from '../types/component-api';
  3. import { Events } from '../events';
  4. import { BufferHelper } from '../utils/buffer-helper';
  5. import type { FragmentTracker } from './fragment-tracker';
  6. import { FragmentState } from './fragment-tracker';
  7. import { Level } from '../types/level';
  8. import { PlaylistLevelType } from '../types/loader';
  9. import Fragment, { ElementaryStreamTypes, Part } from '../loader/fragment';
  10. import FragmentLoader from '../loader/fragment-loader';
  11. import ChunkCache from '../demux/chunk-cache';
  12. import TransmuxerInterface from '../demux/transmuxer-interface';
  13. import type { TransmuxerResult } from '../types/transmuxer';
  14. import { ChunkMetadata } from '../types/transmuxer';
  15. import { fragmentWithinToleranceTest } from './fragment-finders';
  16. import { alignPDT } from '../utils/discontinuities';
  17. import { MAX_START_GAP_JUMP } from './gap-controller';
  18. import { ErrorDetails } from '../errors';
  19. import { logger } from '../utils/logger';
  20. import type Hls from '../hls';
  21. import type LevelDetails from '../loader/level-details';
  22. import type { TrackSet } from '../types/track';
  23. import type {
  24. BufferCreatedData,
  25. AudioTracksUpdatedData,
  26. AudioTrackSwitchingData,
  27. LevelLoadedData,
  28. TrackLoadedData,
  29. BufferAppendingData,
  30. BufferFlushedData,
  31. InitPTSFoundData,
  32. FragLoadedData,
  33. FragParsingMetadataData,
  34. FragParsingUserdataData,
  35. FragBufferedData,
  36. } from '../types/events';
  37. import type { ErrorData } from '../types/events';
  38.  
  39. const TICK_INTERVAL = 100; // how often to tick in ms
  40.  
  41. type WaitingForPTSData = {
  42. frag: Fragment;
  43. part: Part | null;
  44. cache: ChunkCache;
  45. complete: boolean;
  46. };
  47.  
  48. class AudioStreamController
  49. extends BaseStreamController
  50. implements NetworkComponentAPI {
  51. private retryDate: number = 0;
  52. private videoBuffer: any | null = null;
  53. private videoTrackCC: number = -1;
  54. private waitingVideoCC: number = -1;
  55. private audioSwitch: boolean = false;
  56. private trackId: number = -1;
  57. private waitingData: WaitingForPTSData | null = null;
  58. private mainDetails: LevelDetails | null = null;
  59.  
  60. constructor(hls: Hls, fragmentTracker: FragmentTracker) {
  61. super(hls, fragmentTracker, '[audio-stream-controller]');
  62. this.fragmentLoader = new FragmentLoader(hls.config);
  63.  
  64. this._registerListeners();
  65. }
  66.  
  67. protected onHandlerDestroying() {
  68. this._unregisterListeners();
  69. }
  70.  
  71. private _registerListeners() {
  72. const { hls } = this;
  73. hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  74. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  75. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  76. hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  77. hls.on(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
  78. hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  79. hls.on(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
  80. hls.on(Events.ERROR, this.onError, this);
  81. hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
  82. hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
  83. hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  84. hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
  85. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  86. }
  87.  
  88. private _unregisterListeners() {
  89. const { hls } = this;
  90. hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  91. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  92. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  93. hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  94. hls.off(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
  95. hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  96. hls.off(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
  97. hls.off(Events.ERROR, this.onError, this);
  98. hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
  99. hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
  100. hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  101. hls.off(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
  102. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  103. }
  104.  
  105. // INIT_PTS_FOUND is triggered when the video track parsed in the stream-controller has a new PTS value
  106. onInitPtsFound(
  107. event: Events.INIT_PTS_FOUND,
  108. { frag, id, initPTS }: InitPTSFoundData
  109. ) {
  110. // Always update the new INIT PTS
  111. // Can change due level switch
  112. if (id === 'main') {
  113. const cc = frag.cc;
  114. this.initPTS[frag.cc] = initPTS;
  115. this.log(`InitPTS for cc: ${cc} found from main: ${initPTS}`);
  116. this.videoTrackCC = cc;
  117. // If we are waiting, tick immediately to unblock audio fragment transmuxing
  118. if (this.state === State.WAITING_INIT_PTS) {
  119. this.tick();
  120. }
  121. }
  122. }
  123.  
  124. startLoad(startPosition) {
  125. if (!this.levels) {
  126. this.startPosition = startPosition;
  127. this.state = State.STOPPED;
  128. return;
  129. }
  130. const lastCurrentTime = this.lastCurrentTime;
  131. this.stopLoad();
  132. this.setInterval(TICK_INTERVAL);
  133. this.fragLoadError = 0;
  134. if (lastCurrentTime > 0 && startPosition === -1) {
  135. this.log(
  136. `Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(
  137. 3
  138. )}`
  139. );
  140. this.state = State.IDLE;
  141. } else {
  142. this.lastCurrentTime = this.startPosition
  143. ? this.startPosition
  144. : startPosition;
  145. this.loadedmetadata = false;
  146. this.state = State.WAITING_TRACK;
  147. }
  148. this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition;
  149. this.tick();
  150. }
  151.  
  152. doTick() {
  153. switch (this.state) {
  154. case State.IDLE:
  155. this.doTickIdle();
  156. break;
  157. case State.WAITING_TRACK: {
  158. const { levels, trackId } = this;
  159. const details = levels?.[trackId]?.details;
  160. if (details) {
  161. if (this.waitForCdnTuneIn(details)) {
  162. break;
  163. }
  164. this.state = State.WAITING_INIT_PTS;
  165. }
  166. break;
  167. }
  168. case State.FRAG_LOADING_WAITING_RETRY: {
  169. const now = performance.now();
  170. const retryDate = this.retryDate;
  171. // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
  172. if (!retryDate || now >= retryDate || this.media?.seeking) {
  173. this.log('RetryDate reached, switch back to IDLE state');
  174. this.state = State.IDLE;
  175. }
  176. break;
  177. }
  178. case State.WAITING_INIT_PTS: {
  179. // Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS
  180. const waitingData = this.waitingData;
  181. if (waitingData) {
  182. const { frag, part, cache, complete } = waitingData;
  183. if (this.initPTS[frag.cc] !== undefined) {
  184. this.waitingData = null;
  185. this.state = State.FRAG_LOADING;
  186. const payload = cache.flush();
  187. const data: FragLoadedData = {
  188. frag,
  189. part,
  190. payload,
  191. networkDetails: null,
  192. };
  193. this._handleFragmentLoadProgress(data);
  194. if (complete) {
  195. super._handleFragmentLoadComplete(data);
  196. }
  197. } else if (this.videoTrackCC !== this.waitingVideoCC) {
  198. // Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found
  199. logger.log(
  200. `Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${this.videoTrackCC}`
  201. );
  202. this.clearWaitingFragment();
  203. } else {
  204. // Drop waiting fragment if an earlier fragment is needed
  205. const bufferInfo = BufferHelper.bufferInfo(
  206. this.mediaBuffer,
  207. this.media.currentTime,
  208. this.config.maxBufferHole
  209. );
  210. const waitingFragmentAtPosition = fragmentWithinToleranceTest(
  211. bufferInfo.end,
  212. this.config.maxFragLookUpTolerance,
  213. frag
  214. );
  215. if (waitingFragmentAtPosition < 0) {
  216. logger.log(
  217. `Waiting fragment cc (${frag.cc}) @ ${frag.start} cancelled because another fragment at ${bufferInfo.end} is needed`
  218. );
  219. this.clearWaitingFragment();
  220. }
  221. }
  222. } else {
  223. this.state = State.IDLE;
  224. }
  225. }
  226. }
  227.  
  228. this.onTickEnd();
  229. }
  230.  
  231. clearWaitingFragment() {
  232. const waitingData = this.waitingData;
  233. if (waitingData) {
  234. this.fragmentTracker.removeFragment(waitingData.frag);
  235. this.waitingData = null;
  236. this.waitingVideoCC = -1;
  237. this.state = State.IDLE;
  238. }
  239. }
  240.  
  241. protected onTickEnd() {
  242. const { media } = this;
  243. if (!media || !media.readyState) {
  244. // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
  245. return;
  246. }
  247. const mediaBuffer = this.mediaBuffer ? this.mediaBuffer : media;
  248. const buffered = mediaBuffer.buffered;
  249.  
  250. if (!this.loadedmetadata && buffered.length) {
  251. this.loadedmetadata = true;
  252. }
  253.  
  254. this.lastCurrentTime = media.currentTime;
  255. }
  256.  
  257. private doTickIdle() {
  258. const { hls, levels, media, trackId } = this;
  259.  
  260. const config = hls.config;
  261. if (!levels) {
  262. return;
  263. }
  264.  
  265. // if video not attached AND
  266. // start fragment already requested OR start frag prefetch not enabled
  267. // exit loop
  268. // => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop
  269. if (!media && (this.startFragRequested || !config.startFragPrefetch)) {
  270. return;
  271. }
  272.  
  273. const pos = this.getLoadPosition();
  274. if (!Number.isFinite(pos)) {
  275. return;
  276. }
  277.  
  278. if (!levels || !levels[trackId]) {
  279. return;
  280. }
  281. const levelInfo = levels[trackId];
  282.  
  283. const trackDetails = levelInfo.details;
  284. if (
  285. !trackDetails ||
  286. (trackDetails.live && this.levelLastLoaded !== trackId) ||
  287. this.waitForCdnTuneIn(trackDetails)
  288. ) {
  289. this.state = State.WAITING_TRACK;
  290. return;
  291. }
  292.  
  293. let frag = trackDetails.initSegment;
  294. let targetBufferTime = 0;
  295. if (!frag || frag.data) {
  296. const mediaBuffer = this.mediaBuffer ? this.mediaBuffer : this.media;
  297. const videoBuffer = this.videoBuffer ? this.videoBuffer : this.media;
  298. const maxBufferHole =
  299. pos < config.maxBufferHole
  300. ? Math.max(MAX_START_GAP_JUMP, config.maxBufferHole)
  301. : config.maxBufferHole;
  302. const bufferInfo = BufferHelper.bufferInfo(
  303. mediaBuffer,
  304. pos,
  305. maxBufferHole
  306. );
  307. const mainBufferInfo = BufferHelper.bufferInfo(
  308. videoBuffer,
  309. pos,
  310. maxBufferHole
  311. );
  312. const bufferLen = bufferInfo.len;
  313. const maxConfigBuffer = Math.min(
  314. config.maxBufferLength,
  315. config.maxMaxBufferLength
  316. );
  317. const maxBufLen = Math.max(maxConfigBuffer, mainBufferInfo.len);
  318. const audioSwitch = this.audioSwitch;
  319.  
  320. // if buffer length is less than maxBufLen try to load a new fragment
  321. if (bufferLen >= maxBufLen && !audioSwitch) {
  322. return;
  323. }
  324.  
  325. if (!audioSwitch && this._streamEnded(bufferInfo, trackDetails)) {
  326. hls.trigger(Events.BUFFER_EOS, { type: 'audio' });
  327. this.state = State.ENDED;
  328. return;
  329. }
  330.  
  331. const fragments = trackDetails.fragments;
  332. const start = fragments[0].start;
  333. targetBufferTime = bufferInfo.end;
  334.  
  335. if (audioSwitch) {
  336. targetBufferTime = pos;
  337. // if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
  338. if (trackDetails.PTSKnown && pos < start) {
  339. // if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
  340. if (bufferInfo.end > start || bufferInfo.nextStart) {
  341. this.log(
  342. 'Alt audio track ahead of main track, seek to start of alt audio track'
  343. );
  344. media.currentTime = start + 0.05;
  345. }
  346. }
  347. }
  348.  
  349. frag = this.getNextFragment(targetBufferTime, trackDetails);
  350. if (!frag) {
  351. return;
  352. }
  353. }
  354.  
  355. if (frag.decryptdata?.keyFormat === 'identity' && !frag.decryptdata?.key) {
  356. this.log(
  357. `Loading key for ${frag.sn} of [${trackDetails.startSN} ,${trackDetails.endSN}],track ${trackId}`
  358. );
  359. this.state = State.KEY_LOADING;
  360. hls.trigger(Events.KEY_LOADING, { frag });
  361. } else {
  362. this.loadFragment(frag, trackDetails, targetBufferTime);
  363. }
  364. }
  365.  
  366. onMediaDetaching() {
  367. this.videoBuffer = null;
  368. super.onMediaDetaching();
  369. }
  370.  
  371. onAudioTracksUpdated(
  372. event: Events.AUDIO_TRACKS_UPDATED,
  373. { audioTracks }: AudioTracksUpdatedData
  374. ) {
  375. this.levels = audioTracks.map((mediaPlaylist) => new Level(mediaPlaylist));
  376. }
  377.  
  378. onAudioTrackSwitching(
  379. event: Events.AUDIO_TRACK_SWITCHING,
  380. data: AudioTrackSwitchingData
  381. ) {
  382. // if any URL found on new audio track, it is an alternate audio track
  383. const altAudio = !!data.url;
  384. this.trackId = data.id;
  385. const { fragCurrent, transmuxer } = this;
  386.  
  387. if (fragCurrent?.loader) {
  388. fragCurrent.loader.abort();
  389. }
  390. this.fragCurrent = null;
  391. this.clearWaitingFragment();
  392. // destroy useless transmuxer when switching audio to main
  393. if (!altAudio) {
  394. if (transmuxer) {
  395. transmuxer.destroy();
  396. this.transmuxer = null;
  397. }
  398. } else {
  399. // switching to audio track, start timer if not already started
  400. this.setInterval(TICK_INTERVAL);
  401. }
  402.  
  403. // should we switch tracks ?
  404. if (altAudio) {
  405. this.audioSwitch = true;
  406. // main audio track are handled by stream-controller, just do something if switching to alt audio track
  407. this.state = State.IDLE;
  408. } else {
  409. this.state = State.STOPPED;
  410. }
  411. this.tick();
  412. }
  413.  
  414. onManifestLoading() {
  415. this.mainDetails = null;
  416. this.fragmentTracker.removeAllFragments();
  417. this.startPosition = this.lastCurrentTime = 0;
  418. }
  419.  
  420. onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
  421. if (this.mainDetails === null) {
  422. this.mainDetails = data.details;
  423. }
  424. }
  425.  
  426. onAudioTrackLoaded(event: Events.AUDIO_TRACK_LOADED, data: TrackLoadedData) {
  427. const { levels } = this;
  428. const { details: newDetails, id: trackId } = data;
  429. if (!levels) {
  430. this.warn(`Audio tracks were reset while loading level ${trackId}`);
  431. return;
  432. }
  433. this.log(
  434. `Track ${trackId} loaded [${newDetails.startSN},${newDetails.endSN}],duration:${newDetails.totalduration}`
  435. );
  436.  
  437. const track = levels[trackId];
  438. let sliding = 0;
  439. if (newDetails.live || track.details?.live) {
  440. if (!newDetails.fragments[0]) {
  441. newDetails.deltaUpdateFailed = true;
  442. }
  443. if (newDetails.deltaUpdateFailed) {
  444. return;
  445. }
  446. if (
  447. !track.details &&
  448. this.mainDetails?.hasProgramDateTime &&
  449. newDetails.hasProgramDateTime
  450. ) {
  451. alignPDT(newDetails, this.mainDetails);
  452. sliding = newDetails.fragments[0].start;
  453. } else {
  454. sliding = this.alignPlaylists(newDetails, track.details);
  455. }
  456. }
  457. track.details = newDetails;
  458. this.levelLastLoaded = trackId;
  459.  
  460. // compute start position
  461. if (!this.startFragRequested) {
  462. this.setStartPosition(track.details, sliding);
  463. }
  464. // only switch back to IDLE state if we were waiting for track to start downloading a new fragment
  465. if (
  466. this.state === State.WAITING_TRACK &&
  467. !this.waitForCdnTuneIn(newDetails)
  468. ) {
  469. this.state = State.IDLE;
  470. }
  471.  
  472. // trigger handler right now
  473. this.tick();
  474. }
  475.  
  476. _handleFragmentLoadProgress(data: FragLoadedData) {
  477. const { frag, part, payload } = data;
  478. const { config, trackId, levels } = this;
  479. if (!levels) {
  480. this.warn(
  481. `Audio tracks were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`
  482. );
  483. return;
  484. }
  485.  
  486. const track = levels[trackId] as Level;
  487. console.assert(track, 'Audio track is defined on fragment load progress');
  488. const details = track.details as LevelDetails;
  489. console.assert(
  490. details,
  491. 'Audio track details are defined on fragment load progress'
  492. );
  493. const audioCodec =
  494. config.defaultAudioCodec || track.audioCodec || 'mp4a.40.2';
  495.  
  496. let transmuxer = this.transmuxer;
  497. if (!transmuxer) {
  498. transmuxer = this.transmuxer = new TransmuxerInterface(
  499. this.hls,
  500. PlaylistLevelType.AUDIO,
  501. this._handleTransmuxComplete.bind(this),
  502. this._handleTransmuxerFlush.bind(this)
  503. );
  504. }
  505.  
  506. // Check if we have video initPTS
  507. // If not we need to wait for it
  508. const initPTS = this.initPTS[frag.cc];
  509. const initSegmentData = details.initSegment?.data;
  510. if (initPTS !== undefined) {
  511. // this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
  512. // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
  513. const accurateTimeOffset = false; // details.PTSKnown || !details.live;
  514. const partIndex = part ? part.index : -1;
  515. const partial = partIndex !== -1;
  516. const chunkMeta = new ChunkMetadata(
  517. frag.level,
  518. frag.sn as number,
  519. frag.stats.chunkCount,
  520. payload.byteLength,
  521. partIndex,
  522. partial
  523. );
  524. transmuxer.push(
  525. payload,
  526. initSegmentData,
  527. audioCodec,
  528. '',
  529. frag,
  530. part,
  531. details.totalduration,
  532. accurateTimeOffset,
  533. chunkMeta,
  534. initPTS
  535. );
  536. } else {
  537. logger.log(
  538. `Unknown video PTS for cc ${frag.cc}, waiting for video PTS before demuxing audio frag ${frag.sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`
  539. );
  540. const { cache } = (this.waitingData = this.waitingData || {
  541. frag,
  542. part,
  543. cache: new ChunkCache(),
  544. complete: false,
  545. });
  546. cache.push(new Uint8Array(payload));
  547. this.waitingVideoCC = this.videoTrackCC;
  548. this.state = State.WAITING_INIT_PTS;
  549. }
  550. }
  551.  
  552. protected _handleFragmentLoadComplete(fragLoadedData: FragLoadedData) {
  553. if (this.waitingData) {
  554. this.waitingData.complete = true;
  555. return;
  556. }
  557. super._handleFragmentLoadComplete(fragLoadedData);
  558. }
  559.  
  560. onBufferReset(/* event: Events.BUFFER_RESET */) {
  561. // reset reference to sourcebuffers
  562. this.mediaBuffer = this.videoBuffer = null;
  563. this.loadedmetadata = false;
  564. }
  565.  
  566. onBufferCreated(event: Events.BUFFER_CREATED, data: BufferCreatedData) {
  567. const audioTrack = data.tracks.audio;
  568. if (audioTrack) {
  569. this.mediaBuffer = audioTrack.buffer;
  570. }
  571. if (data.tracks.video) {
  572. this.videoBuffer = data.tracks.video.buffer;
  573. }
  574. }
  575.  
  576. onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
  577. const { frag, part } = data;
  578. if (frag.type !== PlaylistLevelType.AUDIO) {
  579. return;
  580. }
  581. if (this.fragContextChanged(frag)) {
  582. // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
  583. // Avoid setting state back to IDLE or concluding the audio switch; otherwise, the switched-to track will not buffer
  584. this.warn(
  585. `Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${
  586. frag.level
  587. } finished buffering, but was aborted. state: ${
  588. this.state
  589. }, audioSwitch: ${this.audioSwitch}`
  590. );
  591. return;
  592. }
  593. this.fragPrevious = frag;
  594. if (this.audioSwitch && frag.sn !== 'initSegment') {
  595. this.audioSwitch = false;
  596. this.hls.trigger(Events.AUDIO_TRACK_SWITCHED, { id: this.trackId });
  597. }
  598. this.fragBufferedComplete(frag, part);
  599. }
  600.  
  601. private onError(event: Events.ERROR, data: ErrorData) {
  602. switch (data.details) {
  603. case ErrorDetails.FRAG_LOAD_ERROR:
  604. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  605. case ErrorDetails.KEY_LOAD_ERROR:
  606. case ErrorDetails.KEY_LOAD_TIMEOUT:
  607. if (!data.fatal) {
  608. const frag = data.frag;
  609. // don't handle frag error not related to audio fragment
  610. if (!frag || frag.type !== PlaylistLevelType.AUDIO) {
  611. return;
  612. }
  613. const fragCurrent = this.fragCurrent;
  614. console.assert(
  615. fragCurrent &&
  616. frag.sn === fragCurrent.sn &&
  617. frag.level === fragCurrent.level &&
  618. frag.urlId === fragCurrent.urlId, // FIXME: audio-group id
  619. 'Frag load error must match current frag to retry'
  620. );
  621. const config = this.config;
  622. if (this.fragLoadError + 1 <= this.config.fragLoadingMaxRetry) {
  623. // exponential backoff capped to config.fragLoadingMaxRetryTimeout
  624. const delay = Math.min(
  625. Math.pow(2, this.fragLoadError) * config.fragLoadingRetryDelay,
  626. config.fragLoadingMaxRetryTimeout
  627. );
  628. this.warn(`Frag loading failed, retry in ${delay} ms`);
  629. this.retryDate = performance.now() + delay;
  630. this.fragLoadError++;
  631. this.state = State.FRAG_LOADING_WAITING_RETRY;
  632. } else if (data.levelRetry) {
  633. // Reset current fragment since audio track audio is essential and may not have a fail-over track
  634. this.fragCurrent = null;
  635. // Fragment errors that result in a level switch or redundant fail-over
  636. // should reset the audio stream controller state to idle
  637. this.fragLoadError = 0;
  638. this.state = State.IDLE;
  639. } else {
  640. logger.error(
  641. `${data.details} reaches max retry, redispatch as fatal ...`
  642. );
  643. // switch error to fatal
  644. data.fatal = true;
  645. this.hls.stopLoad();
  646. this.state = State.ERROR;
  647. }
  648. }
  649. break;
  650. case ErrorDetails.AUDIO_TRACK_LOAD_ERROR:
  651. case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:
  652. // when in ERROR state, don't switch back to IDLE state in case a non-fatal error is received
  653. if (this.state !== State.ERROR && this.state !== State.STOPPED) {
  654. // if fatal error, stop processing, otherwise move to IDLE to retry loading
  655. this.state = data.fatal ? State.ERROR : State.IDLE;
  656. this.warn(
  657. `${data.details} while loading frag, switching to ${this.state} state`
  658. );
  659. }
  660. break;
  661. case ErrorDetails.BUFFER_FULL_ERROR:
  662. // if in appending state
  663. if (
  664. data.parent === 'audio' &&
  665. (this.state === State.PARSING || this.state === State.PARSED)
  666. ) {
  667. const media = this.mediaBuffer;
  668. const currentTime = this.media.currentTime;
  669. const mediaBuffered =
  670. media &&
  671. BufferHelper.isBuffered(media, currentTime) &&
  672. BufferHelper.isBuffered(media, currentTime + 0.5);
  673. // reduce max buf len if current position is buffered
  674. if (mediaBuffered) {
  675. this.reduceMaxBufferLength();
  676. this.state = State.IDLE;
  677. } else {
  678. // current position is not buffered, but browser is still complaining about buffer full error
  679. // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
  680. // in that case flush the whole audio buffer to recover
  681. this.warn(
  682. 'Buffer full error also media.currentTime is not buffered, flush audio buffer'
  683. );
  684. this.fragCurrent = null;
  685. // flush everything
  686. this.hls.trigger(Events.BUFFER_FLUSHING, {
  687. startOffset: 0,
  688. endOffset: Number.POSITIVE_INFINITY,
  689. type: 'audio',
  690. });
  691. }
  692. }
  693. break;
  694. default:
  695. break;
  696. }
  697. }
  698.  
  699. onBufferFlushed(event: Events.BUFFER_FLUSHED, { type }: BufferFlushedData) {
  700. /* after successful buffer flushing, filter flushed fragments from bufferedFrags
  701. use mediaBuffered instead of media (so that we will check against video.buffered ranges in case of alt audio track)
  702. */
  703. const media = this.mediaBuffer ? this.mediaBuffer : this.media;
  704. if (media && type === ElementaryStreamTypes.AUDIO) {
  705. // filter fragments potentially evicted from buffer. this is to avoid memleak on live streams
  706. this.fragmentTracker.detectEvictedFragments(
  707. ElementaryStreamTypes.AUDIO,
  708. BufferHelper.getBuffered(media)
  709. );
  710. }
  711. // reset reference to frag
  712. this.fragPrevious = null;
  713. // move to IDLE once flush complete. this should trigger new fragment loading
  714. this.state = State.IDLE;
  715. }
  716.  
  717. private _handleTransmuxComplete(transmuxResult: TransmuxerResult) {
  718. const id = 'audio';
  719. const { hls } = this;
  720. const { remuxResult, chunkMeta } = transmuxResult;
  721.  
  722. const context = this.getCurrentContext(chunkMeta);
  723. if (!context) {
  724. this.warn(
  725. `The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`
  726. );
  727. return;
  728. }
  729. const { frag, part } = context;
  730. const { audio, text, id3, initSegment } = remuxResult;
  731.  
  732. // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
  733. // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
  734. if (this.fragContextChanged(frag)) {
  735. return;
  736. }
  737.  
  738. this.state = State.PARSING;
  739. if (this.audioSwitch && audio) {
  740. this.completeAudioSwitch();
  741. }
  742.  
  743. if (initSegment?.tracks) {
  744. this._bufferInitSegment(initSegment.tracks, frag, chunkMeta);
  745. hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
  746. frag,
  747. id,
  748. tracks: initSegment.tracks,
  749. });
  750. // Only flush audio from old audio tracks when PTS is known on new audio track
  751. }
  752. if (audio) {
  753. const { startPTS, endPTS, startDTS, endDTS } = audio;
  754. if (part) {
  755. part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
  756. startPTS,
  757. endPTS,
  758. startDTS,
  759. endDTS,
  760. };
  761. }
  762. frag.setElementaryStreamInfo(
  763. ElementaryStreamTypes.AUDIO,
  764. startPTS,
  765. endPTS,
  766. startDTS,
  767. endDTS
  768. );
  769. this.bufferFragmentData(audio, frag, part, chunkMeta);
  770. }
  771.  
  772. if (id3?.samples?.length) {
  773. const emittedID3: FragParsingMetadataData = Object.assign(
  774. {
  775. frag,
  776. id,
  777. },
  778. id3
  779. );
  780. hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
  781. }
  782. if (text) {
  783. const emittedText: FragParsingUserdataData = Object.assign(
  784. {
  785. frag,
  786. id,
  787. },
  788. text
  789. );
  790. hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
  791. }
  792. }
  793.  
  794. private _bufferInitSegment(
  795. tracks: TrackSet,
  796. frag: Fragment,
  797. chunkMeta: ChunkMetadata
  798. ) {
  799. if (this.state !== State.PARSING) {
  800. return;
  801. }
  802. // delete any video track found on audio transmuxer
  803. if (tracks.video) {
  804. delete tracks.video;
  805. }
  806.  
  807. // include levelCodec in audio and video tracks
  808. const track = tracks.audio;
  809. if (!track) {
  810. return;
  811. }
  812.  
  813. track.levelCodec = track.codec;
  814. track.id = 'audio';
  815. this.log(
  816. `Init audio buffer, container:${track.container}, codecs[parsed]=[${track.codec}]`
  817. );
  818. this.hls.trigger(Events.BUFFER_CODECS, tracks);
  819. const initSegment = track.initSegment;
  820. if (initSegment?.byteLength) {
  821. const segment: BufferAppendingData = {
  822. type: 'audio',
  823. data: initSegment,
  824. frag,
  825. part: null,
  826. chunkMeta,
  827. };
  828. this.hls.trigger(Events.BUFFER_APPENDING, segment);
  829. }
  830. // trigger handler right now
  831. this.tick();
  832. }
  833.  
  834. protected loadFragment(
  835. frag: Fragment,
  836. trackDetails: LevelDetails,
  837. targetBufferTime: number
  838. ) {
  839. // only load if fragment is not loaded or if in audio switch
  840. const fragState = this.fragmentTracker.getState(frag);
  841. this.fragCurrent = frag;
  842.  
  843. // we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
  844. if (
  845. this.audioSwitch ||
  846. fragState === FragmentState.NOT_LOADED ||
  847. fragState === FragmentState.PARTIAL
  848. ) {
  849. if (frag.sn === 'initSegment') {
  850. this._loadInitSegment(frag);
  851. } else if (trackDetails.live && !Number.isFinite(this.initPTS[frag.cc])) {
  852. this.log(
  853. `Waiting for video PTS in continuity counter ${frag.cc} of live stream before loading audio fragment ${frag.sn} of level ${this.trackId}`
  854. );
  855. this.state = State.WAITING_INIT_PTS;
  856. } else {
  857. this.startFragRequested = true;
  858. this.nextLoadPosition = frag.start + frag.duration;
  859. super.loadFragment(frag, trackDetails, targetBufferTime);
  860. }
  861. }
  862. }
  863.  
  864. private completeAudioSwitch() {
  865. const { hls, media, trackId } = this;
  866. if (media) {
  867. this.log('Switching audio track : flushing all audio');
  868. hls.trigger(Events.BUFFER_FLUSHING, {
  869. startOffset: 0,
  870. endOffset: Number.POSITIVE_INFINITY,
  871. type: 'audio',
  872. });
  873. }
  874. this.audioSwitch = false;
  875. hls.trigger(Events.AUDIO_TRACK_SWITCHED, { id: trackId });
  876. }
  877. }
  878. export default AudioStreamController;